[med-svn] [python-ruffus] 01/03: Imported Upstream version 2.6.3+dfsg

Andreas Tille tille at debian.org
Fri Jul 17 21:04:38 UTC 2015


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository python-ruffus.

commit 65b49ab723facc50f535056620580124ef7a2412
Author: Andreas Tille <tille at debian.org>
Date:   Fri Jul 17 22:03:46 2015 +0200

    Imported Upstream version 2.6.3+dfsg
---
 PKG-INFO                                           |   2 +-
 README.rst                                         | 127 ++++
 doc/_build/latex/ruffus.pdf                        | Bin 4688129 -> 4690982 bytes
 doc/history.rst                                    |  22 +-
 doc/implementation_notes.rst                       |  27 -
 doc/static_data/ruffus.pdf                         | Bin 4688129 -> 4690982 bytes
 doc/todo.rst                                       |  50 +-
 doc/tutorials/new_syntax_worked_example_code.rst   | 186 ++++--
 ruffus/drmaa_wrapper.py                            |  87 ++-
 ruffus/file_name_parameters.py                     |  35 +-
 ruffus/proxy_logger.py                             |  45 --
 ruffus/ruffus_exceptions.py                        |   4 +
 ruffus/ruffus_utility.py                           |  64 +-
 ruffus/ruffus_version.py                           |   2 +-
 ruffus/task.py                                     | 642 ++++++++++++---------
 .../auto_generated_pipeline_examples/parallel.py   | 228 --------
 .../auto_generated_pipeline_examples/simple.py     | 253 --------
 .../auto_generated_pipeline_examples/simpler.py    | 269 ---------
 ruffus/test/complicated_example.py                 |   6 +-
 .../create_test_script_from_dependency_tree.py     |   3 +-
 ruffus/test/manual_test_ctrl_c_exceptions.py       |   2 +-
 ruffus/test/play_with_colours.py                   |   3 +-
 ruffus/test/run_all_unit_tests.cmd                 |  93 +++
 ruffus/test/run_all_unit_tests3.cmd                |  93 +++
 ruffus/test/simpler.py                             | 234 --------
 ruffus/test/simpler_with_shared_logging.py         |   9 +-
 ruffus/test/slow_process_for_testing.py            |  22 +
 ruffus/test/test_N_x_M_and_collate.py              |  27 +-
 ruffus/test/test_active_if.py                      |  23 +-
 ruffus/test/test_branching_dependencies.py         |  22 +-
 ruffus/test/test_cmdline.py                        |  20 +-
 ruffus/test/test_collate.py                        |  17 +-
 ruffus/test/test_combinatorics.py                  | 160 +++--
 ruffus/test/test_drmaa_wrapper_run_job_locally.py  |  86 +++
 ruffus/test/test_empty_files_decorator.py          |  14 +-
 ruffus/test/test_exceptions.py                     |   9 +-
 ruffus/test/test_file_name_parameters.py           | 510 ++++++++--------
 ruffus/test/test_files_decorator.py                |  28 +-
 ruffus/test/test_filesre_combine.py                |  21 +-
 ruffus/test/test_filesre_split_and_combine.py      |  21 +-
 ruffus/test/test_follows_mkdir.py                  |  32 +-
 ruffus/test/test_graphviz.py                       |  18 +-
 ..._inputs_with_multiple_args_raising_exception.py |  31 +-
 ruffus/test/test_job_completion_checksums.py       |  86 ++-
 ruffus/test/test_job_history_with_exceptions.py    |  57 +-
 ruffus/test/test_mkdir.py                          |  43 +-
 ruffus/test/test_newstyle_combinatorics.py         | 125 ++--
 ruffus/test/test_newstyle_proxy.py                 |  39 +-
 ruffus/test/test_newstyle_regex_error_messages.py  |  37 +-
 ruffus/test/test_pausing.py                        |  18 +-
 ruffus/test/test_pipeline_printout_graph.py        |  19 +-
 ruffus/test/test_posttask_merge.py                 |  21 +-
 ruffus/test/test_proxy_logger.py                   |  78 +++
 ruffus/test/test_regex_error_messages.py           |  39 +-
 ruffus/test/test_ruffus_utility.py                 |  20 +-
 .../test_ruffus_utility_parse_task_arguments.py    |  51 +-
 ruffus/test/test_runtime_data.py                   |  35 +-
 ruffus/test/test_softlink_uptodate.py              |  24 +-
 ruffus/test/test_split_and_combine.py              |  13 +-
 ruffus/test/test_split_regex_and_collate.py        |  25 +-
 ruffus/test/test_split_subdivide_checkpointing.py  |  24 +-
 ruffus/test/test_subpipeline.py                    |  33 +-
 ..._subpipeline.py => test_subpipeline_cmdline.py} | 134 +++--
 ruffus/test/test_suffix_output_dir.py              | 204 ++++---
 ruffus/test/test_task_file_dependencies.py         |  16 +-
 ruffus/test/test_task_misc.py                      |   7 +-
 ruffus/test/test_transform_add_inputs.py           |   8 +-
 ruffus/test/test_transform_formatter.py            | 147 +++++
 ruffus/test/test_transform_inputs.py               |  19 +-
 ruffus/test/test_transform_with_no_re_matches.py   |  33 +-
 ruffus/test/test_tutorial7.py                      |  44 +-
 ruffus/test/test_unicode_filenames.py              |  59 +-
 ruffus/test/test_verbosity.py                      |  55 +-
 ruffus/test/test_with_logger.py                    | 214 +++++++
 74 files changed, 2665 insertions(+), 2609 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 08e8ac3..a36e7c8 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: ruffus
-Version: 2.6.2
+Version: 2.6.3
 Summary: Light-weight Python Computational Pipeline Management
 Home-page: http://www.ruffus.org.uk
 Author: Leo Goodstadt
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..d9e1922
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,127 @@
+***************************************
+Overview
+***************************************
+
+The ruffus module is a lightweight way to add support
+for running computational pipelines.
+
+Computational pipelines are often conceptually quite simple, especially
+if we breakdown the process into simple stages, or separate **tasks**.
+
+Each stage or **task** in a computational pipeline is represented by a python function
+Each python function can be called in parallel to run multiple **jobs**.
+
+Ruffus was originally designed for use in bioinformatics to analyse multiple genome
+data sets.
+
+***************************************
+Documentation
+***************************************
+
+Ruffus documentation can be found `here <http://www.ruffus.org.uk/>`_ ,
+with `installation notes <http://www.ruffus.org.uk/installation.html>`_ ,
+a `short tutorial <http://www.ruffus.org.uk/tutorials/simple_tutorial/simple_tutorial.html>`_ and
+an `in-depth manual <http://www.ruffus.org.uk/tutorials/manual/manual_introduction.html>`_ .
+
+
+***************************************
+Background
+***************************************
+
+The purpose of a pipeline is to determine automatically which parts of a multi-stage
+process needs to be run and in what order in order to reach an objective ("targets")
+
+Computational pipelines, especially for analysing large scientific datasets are
+in widespread use.
+However, even a conceptually simple series of steps can be difficult to set up and
+to maintain, perhaps because the right tools are not available.
+
+***************************************
+Design
+***************************************
+
+The ruffus module has the following design goals:
+
+* Simplicity. Can be picked up in 10 minutes
+* Elegance
+* Lightweight
+* Unintrusive
+* Flexible/Powerful
+
+***************************************
+Features
+***************************************
+
+Automatic support for
+
+* Managing dependencies
+* Parallel jobs
+* Re-starting from arbitrary points, especially after errors
+* Display of the pipeline as a flowchart
+* Reporting
+
+***************************************
+A Simple example
+***************************************
+
+Use the **@transform(...)** python decorator before the function definitions:
+
+  .. code-block:: python
+
+    from ruffus import *
+
+    # make 10 dummy DNA data files
+    data_files = [(prefix + ".fastq") for prefix in range("abcdefghij")]
+    for df in data_files:
+        open(df, "w").close()
+
+
+    @transform(data_files, suffix(".fastq"), ".bam")
+    def run_bwa(input_file, output_file):
+        print "Align DNA sequences in %s to a genome -> %s " % (input_file, output_file)
+        # make dummy output file
+        open(output_file, "w").close()
+
+
+    @transform(run_bwa, suffix(".bam"), ".sorted.bam")
+    def sort_bam(input_file, output_file):
+        print "Sort DNA sequences in %s -> %s " % (input_file, output_file)
+        # make dummy output file
+        open(output_file, "w").close()
+
+    pipeline_run([sort_bam], multithread = 5)
+
+
+the ``@transform`` decorator indicate that the data flows from the ``run_bwa`` function to ``sort_bwa`` down
+the pipeline.
+
+********
+Usage
+********
+
+Each stage or **task** in a computational pipeline is represented by a python function
+Each python function can be called in parallel to run multiple **jobs**.
+
+1. Import module::
+
+        import ruffus
+
+
+1. Annotate functions with python decorators
+
+2. Print dependency graph if you necessary
+
+    - For a graphical flowchart in ``jpg``, ``svg``, ``dot``, ``png``, ``ps``, ``gif`` formats::
+
+        pipeline_printout_graph ("flowchart.svg")
+
+    This requires ``dot`` to be installed
+
+    - For a text printout of all jobs ::
+
+        pipeline_printout(sys.stdout)
+
+
+3. Run the pipeline::
+
+    pipeline_run(list_of_target_tasks, verbose = NNN, [multithread | multiprocess = NNN])
diff --git a/doc/_build/latex/ruffus.pdf b/doc/_build/latex/ruffus.pdf
index 0c4877e..c917f6b 100644
Binary files a/doc/_build/latex/ruffus.pdf and b/doc/_build/latex/ruffus.pdf differ
diff --git a/doc/history.rst b/doc/history.rst
index 2eb2031..1c9c656 100644
--- a/doc/history.rst
+++ b/doc/history.rst
@@ -17,7 +17,27 @@ Major Features added to Ruffus
     See :ref:`To do list <todo>` for future enhancements to Ruffus
 
 ********************************************************************
-version 2.6
+version 2.6.3
+********************************************************************
+    25th April 2015
+
+=====================================================================================================================
+1) Bug fixes and minor enhancements
+=====================================================================================================================
+
+    * `@transform(..., suffix("xxx"),` :red:`output_dir` `= "/new/output/path")` works even when the ouput has more than one file  `(github)  <(https://github.com/bunbun/ruffus/issues/43)>`__
+    * `@subdivide( ..., suffix("xxx"),` :red:`output_dir` `= "/new/output/path")` works in exactly the same way as `@transform(..., outputdir="xxx")`  `(github)  <(https://github.com/bunbun/ruffus/issues/42)>`__
+    * `ruffus.drmaa_wrapper.run_job()` works with python3 `(github)  <(https://github.com/bunbun/ruffus/issues/46)>`__
+    * `ruffus.drmaa.wrapper.run_job_locally()` allows env to be set (environment) `(github)  <(https://github.com/bunbun/ruffus/issues/44)>`__
+    * New object-orientated style syntax and `ruffus.cmdline.run` `(github)  <(https://github.com/bunbun/ruffus/issues/48)>`__
+
+
+
+
+
+
+********************************************************************
+version 2.6.2
 ********************************************************************
 
     12th March 2015
diff --git a/doc/implementation_notes.rst b/doc/implementation_notes.rst
index 4a87582..afc0be6 100644
--- a/doc/implementation_notes.rst
+++ b/doc/implementation_notes.rst
@@ -2,33 +2,6 @@
 Implementation Tips
 ##########################################
 
-******************************************************************************
-Items remaining for current release
-******************************************************************************
-======================================================================================================
-Code
-======================================================================================================
-    #. update_checksum_level_on_tasks(checksum_level) is non reentrant
-    #. ``Task.description_with_args_placeholder`` needs to only fill in placeholders at the last minute
-       Otherwise cloned pipelines will have the wrong name
-
-======================================================================================================
-Unit tests
-======================================================================================================
-    #. output_dir for @mkdir
-    #. When are things defined / linked up
-    #. When can we join up Pipelines / tasks / set_input()?
-    #.  Sub pipeline
-    #.  Whether setup occurs ``pipeline_run()`` where ``target_tasks`` and ``forcedtorun_tasks`` are in different linked or unlinked pipelines
-    #. name lookup
-    #. Task (dependency) parsing inside @transform, pipeline.transform(input = , add_inputs, replace_inputs =), pipeline.split(..., output=)
-    #. ``mkdir()`` should not be allowed inside input parameters apart from @follows
-    #. Cannot dependency cannot be self
-    #. ``Pipeline.clone()``
-    #. ``Task.set_input()``
-    #. ``@product`` ``set_input`` should take (``input``, ``input2``...)
-
-
 
 
 ******************************************************************************
diff --git a/doc/static_data/ruffus.pdf b/doc/static_data/ruffus.pdf
index 0c4877e..c917f6b 100644
Binary files a/doc/static_data/ruffus.pdf and b/doc/static_data/ruffus.pdf differ
diff --git a/doc/todo.rst b/doc/todo.rst
index 36c5320..363fcff 100644
--- a/doc/todo.rst
+++ b/doc/todo.rst
@@ -40,6 +40,34 @@ In up coming release:
 ##########################################
 
 ****************************************************************************************
+Todo: Mention python3.2 multiprocessing import and proxies bug in FAQ
+****************************************************************************************
+
+****************************************************************************************
+Todo: Refactor Error Messages
+****************************************************************************************
+
+    When are messages indented?
+    When are messages wrapped / extended across new lines
+
+****************************************************************************************
+Todo: More documentation for formatter()
+****************************************************************************************
+
+    Needs to discuss how to escape. Also in FAQ?
+
+****************************************************************************************
+Todo: OOP syntax taking strings
+****************************************************************************************
+
+****************************************************************************************
+Todo: Extra unit tests
+****************************************************************************************
+    #. ``@product`` ``set_input`` should take (``input``, ``input2``...)
+    #. bioinformatics pipelines (complicated example)
+    #. ``output_from`` and Pipeline names
+
+****************************************************************************************
 Todo: document ``output_from()``
 ****************************************************************************************
 
@@ -56,6 +84,23 @@ Todo: Log the progress through the pipeline in a machine parsable format
     * Timestamped text file
     * Timestamped Database
 
+    Unit tests dependeing on topology output:
+
+    * ``Pipeline.clone()``
+    * Whether setup occurs ``pipeline_run()`` where ``target_tasks`` and ``forcedtorun_tasks`` are in different linked or unlinked pipelines
+    * pipeline in separate module
+    * self dependency -> errors
+
+
+****************************************************************************************
+Todo: Check non-reentrant / global variables
+****************************************************************************************
+    #. update_checksum_level_on_tasks(checksum_level) is non reentrant
+
+****************************************************************************************
+Todo: Pipeline runs should have tags / names
+****************************************************************************************
+
 ****************************************************************************************
 Todo: either_or: Prevent failed jobs from propagating further
 ****************************************************************************************
@@ -488,11 +533,6 @@ Planned: Ruffus GUI interface.
     Desktop (PyQT or web-based solution?)  I'd love to see an svg pipeline picture that I could actually interact with
 
 
-********************************************************************************************************
-Planned: Non-decorator / Function interface to Ruffus
-********************************************************************************************************
-
-
 
 .. _todo.retry:
 
diff --git a/doc/tutorials/new_syntax_worked_example_code.rst b/doc/tutorials/new_syntax_worked_example_code.rst
index a079615..5dc2c34 100644
--- a/doc/tutorials/new_syntax_worked_example_code.rst
+++ b/doc/tutorials/new_syntax_worked_example_code.rst
@@ -8,9 +8,101 @@ Python Code for: New Object orientated syntax for Ruffus in Version 2.6
 
         * :ref:`new_syntax.worked_example <new_syntax.worked_example>`
 
-    This code is from ``test/test_subpipeline.py`` in the **Ruffus** distribution
+    This code is adapted from ``test/test_subpipeline.py`` in the **Ruffus** distribution
 
 
+==============================================================================
+Output
+==============================================================================
+
+    Let us save the script to ``test_subpipeline_cmdline.py``
+
+    #) Try running the script as is:
+        .. <<bash
+
+        .. code-block:: bash
+
+            # cleanup before and afterwards
+            $ ./test_subpipeline_cmdline.py --cleanup
+        ..
+            bash
+
+    #) If we printout the pipeline, we can see that, by default,
+       the entire pipeline (with all its sub-pipelines) will run.
+
+        .. <<bash
+
+        .. code-block:: bash
+
+            # grep Completed Tasks
+            $ ./test_subpipeline_cmdline.py --cleanup --verbose 1 --just_print
+
+            ________________________________________
+            Tasks which will be run:
+
+            Task = "pipeline1a::mkdir('tempdir/')   before task_originate "
+            Task = "pipeline1a::mkdir('tempdir/testdir',   'tempdir/testdir2') #2   before task_originate "
+            Task = 'pipeline1a::task_originate'
+            Task = 'pipeline1a::add_input'
+            Task = 'pipeline1a::22_to_33'
+            Task = 'pipeline1a::33_to_44'
+            Task = "pipeline1b::mkdir('tempdir/')   before task_originate "
+            Task = "pipeline1b::mkdir('tempdir/testdir',   'tempdir/testdir2') #2   before task_originate "
+            Task = 'pipeline1b::task_originate'
+            Task = 'pipeline1b::add_input'
+            Task = 'pipeline1b::22_to_33'
+            Task = 'pipeline1b::33_to_44'
+            Task = "pipeline1c::mkdir('tempdir/')   before task_originate "
+            Task = "pipeline1c::mkdir('tempdir/testdir',   'tempdir/testdir2') #2   before task_originate "
+            Task = 'pipeline1c::task_originate'
+            Task = 'pipeline1c::add_input'
+            Task = 'pipeline1c::22_to_33'
+            Task = 'pipeline1c::33_to_44'
+            Task = 'pipeline2::44_to_55'
+            Task = 'pipeline2::task_m_to_1'
+
+        ..
+            bash
+
+    .. code-block:: bash
+
+    #) Specifying either the main ``pipeline2`` or the last task in ``pipeline2``  produces the same output. All the ancestral tasks in pipelines1a-c will be run automatically.
+
+        .. <<bash
+
+        .. code-block:: bash
+
+            # grep Completed Tasks
+            $ ./test_subpipeline_cmdline.py --cleanup --verbose 1 --just_print --target_tasks pipeline2
+
+            $ ./test_subpipeline_cmdline.py --cleanup --verbose 1 --just_print --target_tasks pipeline2::task_m_to_1
+        ..
+            bash
+
+    #) Specifying only ``pipeline1a`` or any task in ``pipeline1a``  in ``--target_tasks`` will only run the specified tasks in that subpipeline.
+
+        .. <<bash
+
+        .. code-block:: bash
+
+            # grep Completed Tasks
+            $ ./test_subpipeline_cmdline.py --cleanup --verbose 1 --just_print --target_tasks pipeline1a
+            $ ./test_subpipeline_cmdline.py --cleanup --verbose 1 --just_print --forced_tasks pipeline1a::task_originate
+
+            Task = "pipeline1a::mkdir('tempdir/')   before task_originate "
+            Task = "pipeline1a::mkdir('tempdir/testdir',   'tempdir/testdir2') #2   before task_originate "
+            Task = 'pipeline1a::task_originate'
+            Task = 'pipeline1a::add_input'
+            Task = 'pipeline1a::22_to_33'
+            Task = 'pipeline1a::33_to_44'
+
+        ..
+            bash
+
+
+==============================================================================
+Code
+==============================================================================
     .. <<python
 
     .. code-block:: python
@@ -19,7 +111,7 @@ Python Code for: New Object orientated syntax for Ruffus in Version 2.6
         #!/usr/bin/env python
         from __future__ import print_function
         """
-            test_subpipeline.py
+
 
                 Demonstrates the new Ruffus syntax in version 2.6
         """
@@ -163,60 +255,74 @@ Python Code for: New Object orientated syntax for Ruffus in Version 2.6
             return test_pipeline2
 
 
-        def run_pipeline():
 
-            #   First two pipelines are created as separate instances by the make_pipeline1 function
-            pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + ss for ss in ("a.1", "b.1")])
-            pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + ss for ss in ("c.1", "d.1")])
 
-            #   The Third pipeline is a clone of pipeline1b
-            pipeline1c = pipeline1b.clone(new_name = "pipeline1c")
+        #   First two pipelines are created as separate instances by the make_pipeline1 function
+        pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + ss for ss in ("a.1", "b.1")])
+        pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + ss for ss in ("c.1", "d.1")])
+
+        #   The Third pipeline is a clone of pipeline1b
+        pipeline1c = pipeline1b.clone(new_name = "pipeline1c")
+
+        #   Set the "originate" files for pipeline1c to ("e.1" and "f.1")
+        #       Otherwise they would use the original ("c.1", "d.1")
+        pipeline1c.set_output(output = [])
+        pipeline1c.set_output(output = [tempdir + ss for ss in ("e.1", "f.1")])
+
+        #   Join all pipeline1a-c to pipeline2
+        pipeline2 = make_pipeline2()
+        pipeline2.set_input(input = [pipeline1a, pipeline1b, pipeline1c])
+
 
-            #   Set the "originate" files for pipeline1c to ("e.1" and "f.1")
-            #       Otherwise they would use the original ("c.1", "d.1")
-            pipeline1c.set_output(output = [])
-            pipeline1c.set_output(output = [tempdir + ss for ss in ("e.1", "f.1")])
+        import ruffus.cmdline as cmdline
+        parser = cmdline.get_argparse(description='Demonstrates the new Ruffus syntax in version 2.6')
 
-            #   Join all pipeline1a-c to pipeline2
-            pipeline2 = make_pipeline2()
-            pipeline2.set_input(input = [pipeline1a, pipeline1b, pipeline1c])
+        parser.add_argument('--cleanup', "-C",
+                            action="store_true",
+                            help="Cleanup before and after.")
 
 
-            pipeline2.printout_graph("test.svg", "svg", [task_m_to_1])
-            pipeline2.printout(verbose = 0)
-            pipeline2.run(multiprocess = 10, verbose = 0)
+        options = parser.parse_args()
 
 
-        class Test_task(unittest.TestCase):
 
-            def tearDown (self):
-                """
-                """
-                try:
-                    shutil.rmtree(tempdir)
-                except:
-                    pass
+        #  standard python logger which can be synchronised across concurrent Ruffus tasks
+        logger, logger_mutex = cmdline.setup_logging (__name__, options.log_file, options.verbose)
 
 
-            def test_subpipelines (self):
+        # if we are printing only
+        if  not options.just_print and \
+            not options.flowchart and \
+            not options.touch_files_only:
+            cmdline.run (options)
+            sys.exit()
 
-                run_pipeline()
+        #
+        #   Cleanup beforehand
+        #
+        if options.cleanup:
+            try:
+                shutil.rmtree(tempdir)
+            except:
+                pass
+
+        #
+        #   Run
+        #
+        cmdline.run (options)
+
+        #
+        #   Cleanup Afterwards
+        #
+        if options.cleanup:
+            try:
+                shutil.rmtree(tempdir)
+            except:
+                pass
 
-                # Check that the output reflecting the pipeline topology is correct.
-                correct_output = 'tempdir/a.1.55=tempdir/a.1.44+tempdir/a.1.33+tempdir/a.1.22+tempdir/a.1=; tempdir/testdir/whatever.txt=; ; ' \
-                                 'tempdir/b.1.55=tempdir/b.1.44+tempdir/b.1.33+tempdir/b.1.22+tempdir/b.1=; tempdir/testdir/whatever.txt=; ; ' \
-                                 'tempdir/c.1.55=tempdir/c.1.44+tempdir/c.1.33+tempdir/c.1.22+tempdir/c.1=; tempdir/testdir/whatever.txt=; ; ' \
-                                 'tempdir/d.1.55=tempdir/d.1.44+tempdir/d.1.33+tempdir/d.1.22+tempdir/d.1=; tempdir/testdir/whatever.txt=; ; ' \
-                                 'tempdir/e.1.55=tempdir/e.1.44+tempdir/e.1.33+tempdir/e.1.22+tempdir/e.1=; tempdir/testdir/whatever.txt=; ; ' \
-                                 'tempdir/f.1.55=tempdir/f.1.44+tempdir/f.1.33+tempdir/f.1.22+tempdir/f.1=; tempdir/testdir/whatever.txt=; ; '
-                with open(tempdir + "final.output") as real_output:
-                    real_output_str = real_output.read()
-                self.assertEqual(correct_output, real_output_str)
 
 
 
-        if __name__ == '__main__':
-            unittest.main()
 
     ..
         python
diff --git a/ruffus/drmaa_wrapper.py b/ruffus/drmaa_wrapper.py
index 1e8d261..83d4df1 100644
--- a/ruffus/drmaa_wrapper.py
+++ b/ruffus/drmaa_wrapper.py
@@ -23,6 +23,14 @@
 #   LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 #   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 #   THE SOFTWARE.
+#
+#   Portions of code from adapted from:
+#
+#       http://stackoverflow.com/questions/375427/non-blocking-read-on-a-subprocess-pipe-in-python
+#       Courtesy of J.F. Sebastian
+#       Use is licensed under the "Creative Commons Attribution Share Alike license"
+#       See http://stackexchange.com/legal
+#
 #################################################################################
 """
 
@@ -51,6 +59,18 @@ import datetime
 import subprocess
 import time
 
+import sys
+import subprocess
+import threading
+
+try:
+    from Queue import Queue, Empty
+except ImportError:
+    from queue import Queue, Empty  # python 3.x
+
+ON_POSIX = 'posix' in sys.builtin_module_names
+
+
 if sys.hexversion >= 0x03000000:
     # everything is unicode in python3
     path_str_type = str
@@ -181,7 +201,7 @@ def write_job_script_to_temp_file( cmd_str, job_script_directory, job_name, job_
         os.makedirs(job_script_directory)
     except:
         pass
-    tmpfile = tempfile.NamedTemporaryFile(mode='w+b', prefix='drmaa_script_' + time_stmp_str + "__", dir = job_script_directory,  delete = False)
+    tmpfile = tempfile.NamedTemporaryFile(mode='w', prefix='drmaa_script_' + time_stmp_str + "__", dir = job_script_directory,  delete = False)
 
     #
     #   hopefully #!/bin/sh is universally portable among unix-like operating systems
@@ -361,33 +381,72 @@ def run_job_using_drmaa (cmd_str, job_name = None, job_other_options = "", job_s
     return stdout, stderr
 
 
+
+def enqueue_output(out, queue, echo):
+    for line in iter(out.readline, b''):
+        queue.put(line)
+        if echo is not None:
+            echo.write(line)
+            echo.flush()
+    out.close()
+
+
 #_________________________________________________________________________________________
 
 #   run_job_locally
 
 #_________________________________________________________________________________________
-def run_job_locally (cmd_str, logger = None):
+def run_job_locally (cmd_str, logger = None, job_environment = None, working_directory = None, local_echo = False):
     """
     Runs specified command locally instead of drmaa
     """
-    process = subprocess.Popen(  cmd_str,
-                                 cwd = os.getcwd(),
-                                 shell = True,
-                                 stdin = subprocess.PIPE,
-                                 stdout = subprocess.PIPE,
-                                 stderr = subprocess.PIPE )
 
-    # process.stdin.close()
-    stdout, stderr = process.communicate()
+    popen_params = {"args"      : cmd_str,
+                    "cwd"       : working_directory if working_directory is not None else os.getcwd(),
+                    "shell"     : True,
+                    "stdin"     : subprocess.PIPE,
+                    "stdout"    : subprocess.PIPE,
+                    "stderr"    : subprocess.PIPE,
+                    "bufsize"   :1,
+                    "close_fds" : ON_POSIX}
+    if job_environment is not None:
+        popen_params["env"] = job_environment
+
+    process = subprocess.Popen(  **popen_params )
+    stderrQ = Queue()
+    stdoutQ = Queue()
+    stdout_t = threading.Thread(target=enqueue_output, args=(process.stdout, stdoutQ, sys.stdout if local_echo else None))
+    stderr_t = threading.Thread(target=enqueue_output, args=(process.stderr, stderrQ, sys.stderr if local_echo else None))
+    # if daemon = False, sub process cannot be interrupted by Ctrl-C
+    stdout_t.daemon = True
+    stderr_t.daemon = True
+    stderr_t.start()
+    stdout_t.start()
+    process.wait()
+    stdout_t.join()
+    stderr_t.join()
+
+    stdout, stderr = [], []
+    try:
+        while True:
+            stdout.append(stdoutQ.get(False))
+    except:
+        pass
+
+    try:
+        while True:
+            stderr.append(stderrQ.get(False))
+    except:
+        pass
 
     if process.returncode != 0:
         raise error_drmaa_job( "The locally run command was terminated by signal %i:\n"
                                "The original command was:\n%s\n"
                                "The stderr was: \n%s\n\n"
                                "The stdout was: \n%s\n\n" %
-                                 (-process.returncode, cmd_str, "".join( stderr), "".join( stdout)) )
+                                 (-process.returncode, cmd_str, stderr, stdout) )
 
-    return stdout.splitlines(True), stderr.splitlines(True)
+    return stdout, stderr
 
 
 #_________________________________________________________________________________________
@@ -453,7 +512,7 @@ def touch_output_files (cmd_str, output_files, logger = None):
 def run_job(cmd_str, job_name = None, job_other_options = None, job_script_directory = None,
             job_environment = None, working_directory = None, logger = None,
             drmaa_session = None, retain_job_scripts = False,
-            run_locally = False, output_files = None, touch_only = False, verbose = 0):
+            run_locally = False, output_files = None, touch_only = False, verbose = 0, local_echo = False):
     """
     Runs specified command either using drmaa, or locally or only in simulation (touch the output files only)
     """
@@ -463,6 +522,6 @@ def run_job(cmd_str, job_name = None, job_other_options = None, job_script_direc
         return "","",
 
     if run_locally:
-        return run_job_locally (cmd_str, logger)
+        return run_job_locally (cmd_str, logger, job_environment, working_directory, local_echo)
 
     return run_job_using_drmaa (cmd_str, job_name, job_other_options, job_script_directory, job_environment, working_directory, retain_job_scripts, logger, drmaa_session, verbose)
diff --git a/ruffus/file_name_parameters.py b/ruffus/file_name_parameters.py
index b99e8c5..bb8de27 100644
--- a/ruffus/file_name_parameters.py
+++ b/ruffus/file_name_parameters.py
@@ -130,9 +130,9 @@ def epoch_seconds_to_str (epoch_seconds):
     return (time_str + fraction_of_second_as_str)
 
 
-err_msg_no_regex_match = ("No jobs were run because no files names matched. "
+err_msg_no_regex_match = ("No jobs were run because no file names matched.\n"
                         "Please make sure that the regular expression is correctly specified.")
-err_msg_empty_files_parameter= ("@files() was empty, i.e. no files were specified. "
+err_msg_empty_files_parameter= ("@files() was empty, i.e. no files were specified.\n"
                         "Please make sure this is by design.")
 
 
@@ -175,15 +175,21 @@ class t_suffix_file_names_transform(t_file_names_transform):
         self.output_dir = output_dir
 
     def substitute (self, starting_file_names, pattern):
-        return regex_replace(starting_file_names[0], self.matching_regex_str, self.matching_regex, pattern)
+        if self.output_dir == []:
+            return regex_replace(starting_file_names[0], self.matching_regex_str, self.matching_regex, pattern)
+        else:
+            # change directory of starting file and return substitution
+            starting_file_name = os.path.join(self.output_dir, os.path.split(starting_file_names[0])[1])
+            return regex_replace(starting_file_name, self.matching_regex_str, self.matching_regex, pattern)
+        return
 
     def substitute_output_files (self, starting_file_names, pattern):
-        res = regex_replace(starting_file_names[0], self.matching_regex_str, self.matching_regex, pattern, SUFFIX_SUBSTITUTE)
         if self.output_dir == []:
-            return res
-        # N.B. Does not do output directory substitution for extra parameters
+            return regex_replace(starting_file_names[0], self.matching_regex_str, self.matching_regex, pattern, SUFFIX_SUBSTITUTE)
         else:
-            return os.path.join(self.output_dir, os.path.split(res)[1])
+            # change directory of starting file and return substitution
+            starting_file_name = os.path.join(self.output_dir, os.path.split(starting_file_names[0])[1])
+            return regex_replace(starting_file_name, self.matching_regex_str, self.matching_regex, pattern, SUFFIX_SUBSTITUTE)
 
 
 class t_regex_file_names_transform(t_file_names_transform):
@@ -291,6 +297,14 @@ class t_params_tasks_globs_run_time_data(object):
         return t_params_tasks_globs_run_time_data(output_param, self.tasks, output_glob,
                                                     self.runtime_data_names)
 
+    def output_file_names_transformed (self, filenames, file_names_transform):
+        """
+        return clone with the filenames / globs transformed by the supplied transform object
+        """
+        output_glob  = file_names_transform.substitute_output_files(filenames, self.globs)
+        output_param = file_names_transform.substitute_output_files(filenames, self.params)
+        return t_params_tasks_globs_run_time_data(output_param, self.tasks, output_glob,
+                                                    self.runtime_data_names)
     #
     #   deprecated
     #
@@ -1101,7 +1115,6 @@ def yield_io_params_per_job (input_params,
             #
             #       So we do (2) first, ignoring tasks, then (1)
             if extra_input_files_task_globs:
-                # DEBUGGG
                 extra_inputs = extra_input_files_task_globs.file_names_transformed (filenames, file_names_transform)
 
                 #
@@ -1127,7 +1140,7 @@ def yield_io_params_per_job (input_params,
                 #   do regex substitution to complete glob pattern
                 #       before glob matching
                 #
-                output_pattern_transformed = output_pattern.file_names_transformed (filenames, file_names_transform)
+                output_pattern_transformed = output_pattern.output_file_names_transformed (filenames, file_names_transform)
                 output_param          = file_names_from_tasks_globs(output_pattern_transformed, runtime_data)
                 output_param_unglobbed= file_names_from_tasks_globs(output_pattern_transformed.unexpanded_globs(), runtime_data)
                 yield ( (input_param, output_param            ) + extra_params,
@@ -1144,8 +1157,8 @@ def yield_io_params_per_job (input_params,
         except error_input_file_does_not_match:
             if runtime_data != None:
                 if not "MATCH_FAILURE" in runtime_data:
-                    runtime_data["MATCH_FAILURE"] = []
-                runtime_data["MATCH_FAILURE"].append(str(sys.exc_info()[1]).replace("\n", "").strip())
+                    runtime_data["MATCH_FAILURE"] = defaultdict(set)
+                runtime_data["MATCH_FAILURE"][iterator].add(str(sys.exc_info()[1]).strip())
             continue
 
         # all other exceptions including malformed regexes are raised
diff --git a/ruffus/proxy_logger.py b/ruffus/proxy_logger.py
index fbf0032..7879d73 100644
--- a/ruffus/proxy_logger.py
+++ b/ruffus/proxy_logger.py
@@ -351,49 +351,4 @@ def make_shared_logger_and_proxy (logger_factory, logger_name, args):
 
 
 
-import unittest, os,sys
-from .proxy_logger import *
-import traceback
-
-
-class Test_Logging(unittest.TestCase):
-
-
-
-    def test_rotating_log(self):
-        """
-            test rotating via proxy
-        """
-        open("/tmp/lg.log", "w").close()
-        args={}
-        args["file_name"] = "/tmp/lg.log"
-        args["rotating"] = True
-        args["maxBytes"]=20000
-        args["backupCount"]=10
-        #args["level"]= logging.INFO
-        (my_log,
-         logging_mutex) = make_shared_logger_and_proxy (setup_std_shared_logger,
-                                                        "my_logger", args)
-        with logging_mutex:
-            my_log.debug('This is a debug message')
-            my_log.info('This is an info message')
-            my_log.warning('This is a warning message')
-            my_log.error('This is an error message')
-            my_log.critical('This is a critical error message')
-            my_log.log(logging.ERROR, 'This is a debug message')
-        self.assert_(open("/tmp/lg.log") .read() == \
-"""This is a warning message
-This is an error message
-This is a critical error message
-This is a debug message
-""")
-
-
-#
-#   debug code not run if called as a module
-#
-if __name__ == '__main__':
-    if sys.argv.count("--debug"):
-        sys.argv.remove("--debug")
-    unittest.main()
 
diff --git a/ruffus/ruffus_exceptions.py b/ruffus/ruffus_exceptions.py
index 0f42e9a..f39a1ed 100644
--- a/ruffus/ruffus_exceptions.py
+++ b/ruffus/ruffus_exceptions.py
@@ -230,6 +230,10 @@ class error_no_head_tasks(error_task):
     pass
 class error_no_tail_tasks(error_task):
     pass
+class error_executable_str(error_task):
+    pass
+class error_extras_wrong_type(error_task):
+    pass
 
 
 
diff --git a/ruffus/ruffus_utility.py b/ruffus/ruffus_utility.py
index ce00a19..381e3db 100644
--- a/ruffus/ruffus_utility.py
+++ b/ruffus/ruffus_utility.py
@@ -427,7 +427,7 @@ def swap_nesting_order (orig_coll):
                 new_list[jj][ii] = value
             else:
                 new_dict[jj][ii] = value
-    return new_list, new_dict
+    return new_list, dict(new_dict)
 
 #_________________________________________________________________________________________
 #
@@ -634,7 +634,7 @@ def get_all_paths_components(paths, compiled_regexes):
 #   apply_func_to_sequence
 #
 #_________________________________________________________________________________________
-def apply_func_to_sequence(seq, func, tuple_of_conforming_types = (path_str_type,), tuple_of_sequences_types = (list, tuple,set)):
+def apply_func_to_sequence(seq, func, tuple_of_conforming_types = (path_str_type,), tuple_of_sequences_types = (list, tuple, set)):
     """
     Recurses into list/tuple/set sequences to apply func to conforming types
     Non-conforming types are left alone
@@ -734,12 +734,13 @@ class t_formatter_replace(object):
 
         # some contortions because format decodes {0} as an offset into a list and not not a lookup into a dict...
         dl, dd = swap_nesting_order(self.path_regex_components)
+
         try:
             return p.format(*dl, **dd)
         except (KeyError, IndexError):
-            raise error_input_file_does_not_match("Field '%s' in ('%s') using formatter(%s) fails to match Files '%s'."
+            raise error_input_file_does_not_match("Missing key = {%s} in '%s'.\n  input =  %r,\n filter = formatter(%s)."
                                                   "."
-                                                  % (   str(sys.exc_info()[1]),
+                                                  % (   str(sys.exc_info()[1])[1:-1],
                                                         p,
                                                         self.display_regex_strings,
                                                         self.filenames))
@@ -786,6 +787,22 @@ class t_nested_formatter_replace(object):
                                                         formatter_str,
                                                         self.filenames))
 
+#_________________________________________________________________________________________
+#
+#   t_nested_string_replace
+#
+#_________________________________________________________________________________________
+class t_nested_string_replace(object):
+    """
+    Replaces path with directory
+    """
+    def __init__(self, prev_str, new_str):
+        self.prev_str = prev_str
+        self.new_str  = new_str
+
+    def __call__(self, p):
+        return p.replace(prev_str, new_str)
+
 
 #_________________________________________________________________________________________
 #
@@ -826,6 +843,9 @@ def formatter_replace (filenames, regex_str, compiled_regex, substitution_patter
 def nested_formatter_replace (filenames, regex_strings, compiled_regexes, substitution_patterns):
     return apply_func_to_sequence(substitution_patterns, t_nested_formatter_replace(filenames, regex_strings, compiled_regexes))
 
+def nested_string_replace (prev_str, new_str, substitution_patterns):
+    return apply_func_to_sequence(substitution_patterns, t_nested_string_replace(prev_str, new_str))
+
 
 #_________________________________________________________________________________________
 
@@ -905,6 +925,10 @@ def get_first_string_in_nested_sequence (p):
         return strings[0]
     return None
 
+
+#
+#   TODOOO third object could be a dict or a list
+#
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
 #   Encoders: turn objects and filenames into a more presentable format
@@ -1438,10 +1462,10 @@ def get_parsed_arguments_str_for_errors (task_description, bad_arg_str, unnamed_
     parsed_arg_str = ", ".join(unnamed_result_strs + named_result_strs)
     # make function names clearer in arg list
     parsed_arg_str = re.sub(r"<function (\w+) at 0x[0-9a-f]+>", r"\1", parsed_arg_str)
-    return task_description %  (parsed_arg_str + ", ...\n" +
-                                # mark out problem
-                                (" " * (indent-5 if indent - 5 > 0 else 0)) + "===> " +
-                                bad_arg_str)
+    return "\n" + task_description %  (parsed_arg_str + ", ...\n" +
+                                            # mark out problem
+                                            (" " * (indent-5 if indent - 5 > 0 else 0)) + "===> " +
+                                            bad_arg_str)
 
 
 
@@ -1705,6 +1729,8 @@ def parse_task_arguments ( orig_unnamed_arguments, orig_named_arguments, expecte
     #   extras is mandatory if exists
     #
     if 'extras' in expected_arguments:
+        results['extras' ] = []
+        results['named_extras'] = {}
         if len(unnamed_arguments):
             # move list to results: remember python does shallow copies of lists
             results['extras'] = unnamed_arguments
@@ -1712,11 +1738,23 @@ def parse_task_arguments ( orig_unnamed_arguments, orig_named_arguments, expecte
             unnamed_arguments = []
             #del unnamed_arguments[:]
         elif 'extras'  in named_arguments:
-            results['extras' ] = named_arguments['extras' ]
-            named_result_strs.append("%s=%r" % ("extras", named_arguments['extras' ]))
-            del named_arguments['extras' ]
-        else:
-            results['extras' ] = []
+            # Named extras only
+            if isinstance(named_arguments['extras'], dict):
+                results["named_extras"] = named_arguments['extras']
+            # Unnamed extras only
+            elif isinstance(named_arguments['extras'], list):
+                results["extras"] = named_arguments['extras']
+            # Wrong type: blow up
+            else:
+                err_msg = ("The extras paramter must be either a list of values\nor a dictionary of named parameter values:\n%s" %
+                            get_parsed_arguments_str_for_errors(task_description,
+                                                                "extras=%r" % (named_arguments['extras'],),
+                                                                unnamed_result_strs,
+                                                                named_result_strs))
+                raise error_extras_wrong_type(err_msg)
+
+            named_result_strs.append("%s=%r" % ("extras", named_arguments['extras']))
+            del named_arguments['extras']
 
 
     if len(unnamed_arguments):
diff --git a/ruffus/ruffus_version.py b/ruffus/ruffus_version.py
index 7725cbb..54676d9 100755
--- a/ruffus/ruffus_version.py
+++ b/ruffus/ruffus_version.py
@@ -24,4 +24,4 @@
 #   OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 #   THE SOFTWARE.
 #################################################################################
-__version='2.6.2'
+__version='2.6.3'
diff --git a/ruffus/task.py b/ruffus/task.py
index 85900df..18d1d98 100644
--- a/ruffus/task.py
+++ b/ruffus/task.py
@@ -219,10 +219,10 @@ class t_stream_logger:
         self.stream.write(message + "\n")
 
     def warning(self, message):
-        sys.stream.write("\n\nWARNING:\n    " + message + "\n\n")
+        self.stream.write("\n\nWARNING:\n    " + message + "\n\n")
 
     def error(self, message):
-        sys.stream.write("\n\nERROR:\n    " + message + "\n\n")
+        self.stream.write("\n\nERROR:\n    " + message + "\n\n")
 
     def debug(self, message):
         self.stream.write(message + "\n")
@@ -558,10 +558,12 @@ def job_wrapper_io_files(params, user_defined_work_func, register_cleanup, touch
     if touch_files_only == 0:
         # @originate only uses output files
         if output_files_only:
+            # TODOOO extra and named extras
             ret_val = user_defined_work_func(*(params[1:]))
         # all other decorators
         else:
             try:
+                # TODOOO extra and named extras
                 ret_val = user_defined_work_func(*params)
                 # EXTRA pipeline_run DEBUGGING
                 if EXTRA_PIPELINERUN_DEBUGGING:
@@ -801,6 +803,40 @@ def run_pooled_job_without_exceptions(process_parameters):
 #   Helper function
 
 # 88888888888888888888888888888888888888888888888888888888888888888888888888888
+def subprocess_checkcall_wrapper(**named_args):
+    """
+    Splits string at semicolons and runs with subprocess.check_call
+    """
+    for cmd in named_args["command_str"].split(";"):
+        cmd = cmd.replace("\n", " ").strip()
+        if not len(cmd):
+            continue
+        cmd = cmd.format(**named_args)
+        subprocess.check_call(cmd, shell = True)
+
+
+def exec_string_as_task_func(input_args, output_args, **named_args):
+    """
+    Ruffus provided function for tasks which are just strings
+        (no Python function provided)
+    The task executor function is given as a paramter which is
+        then called with the arguments.
+    Convoluted but avoids special casing too much
+    """
+    if not "__RUFFUS_TASK_CALLBACK__" in named_args or \
+        not callable(named_args["__RUFFUS_TASK_CALLBACK__"]):
+        raise Exception("Missing call back function")
+    if not "command_str" in named_args or \
+        not isinstance(named_args["command_str"], (path_str_type,)):
+        raise Exception("Missing call back function string")
+
+
+    callback = named_args["__RUFFUS_TASK_CALLBACK__"]
+    del named_args["__RUFFUS_TASK_CALLBACK__"]
+
+    named_args["input"] = input_args
+    named_args["output"] = output_args
+    callback(**named_args)
 
 
 # _____________________________________________________________________________
@@ -863,6 +899,9 @@ class Pipeline(dict):
         self.tail_tasks = []
         self.lookup = dict()
 
+        self.command_str_callback = subprocess_checkcall_wrapper
+
+
     # _________________________________________________________________________
 
     #   _create_task
@@ -872,7 +911,33 @@ class Pipeline(dict):
         """
         Create task with a function
         """
-        # add task to main pipeline
+
+        #
+        #   If string, this is a command to be executed later
+        #   Derive task name from command
+        #
+        #
+        if isinstance(task_func, (path_str_type,)):
+            task_str = task_func
+            task_func = exec_string_as_task_func
+            if not task_name:
+                elements = task_str.split()
+                use_n_elements = 1
+                while use_n_elements < len(elements):
+                    task_name = " ".join(elements[0:use_n_elements])
+                    if task_name not in self.task_names:
+                        break
+                else:
+                    raise error_duplicate_task_name("The task string '%s' is ambiguous for "
+                                                    "Pipeline '%s'. You must disambiguate "
+                                                    "explicitly with different task names "
+                                                    % (task_str, self.name))
+            return Task(task_func, task_name, self)
+
+
+        #
+        #   Derive task name from Python Task function name
+        #
         if not task_name:
             if task_func.__module__ == "__main__":
                 task_name = task_func.__name__
@@ -881,22 +946,22 @@ class Pipeline(dict):
                     "." + task_func.__name__
 
         if task_name not in self:
-            task = Task(task_func, task_name, self)
+            return Task(task_func, task_name, self)
 
         # task_name already there as the identifying task_name.
         # If the task_func also matches everything is fine
         elif (task_name in self.task_names and
               self[task_name].user_defined_work_func == task_func):
-            task = self[task_name]
+            return self[task_name]
 
         # If the task name is already taken but with a different function,
         #   this will blow up
         # But if the function is being reused and with a previously different
         # task name then OK
         else:
-            task = Task(task_func, task_name, self)
+            return Task(task_func, task_name, self)
+
 
-        return task
 
     # _________________________________________________________________________
 
@@ -940,6 +1005,16 @@ class Pipeline(dict):
 
     # _________________________________________________________________________
 
+    #   command_str_callback
+
+    # _________________________________________________________________________
+    def set_command_str_callback(self, command_str_callback):
+        if not callable(command_str_callback):
+            raise Exception("set_command_str_callback() takes a python function or a callable object.")
+        self.command_str_callback = command_str_callback
+
+    # _________________________________________________________________________
+
     #   get_head_tasks
 
     # _________________________________________________________________________
@@ -1117,13 +1192,43 @@ class Pipeline(dict):
             pipeline.merge
         """
         name = get_name_from_args(named_args)
+
+        #   if task_func is a string, will
+        #       1) set self.task_func = exec_string_as_task_func
+        #       2) set self.name if necessary to the first unambigous words of the the command_str
+        #       2) set self.func_description to the command_str
         task = self._create_task(task_func, name)
+
+
         task.created_via_decorator = False
         task.syntax = syntax
+        if isinstance(task_func, (path_str_type,)):
+            task_func_name = task._name
+        else:
+            task_func_name = task_func.__name__
+
         task.description_with_args_placeholder = "{syntax}(name = {task_display_name!r}, task_func = {task_func_name}, %s)" \
-            .format(syntax = task.syntax,
-               task_display_name = task._get_display_name(),
-               task_func_name = task_func.__name__)
+            .format(syntax = syntax,
+                    task_display_name = task._get_display_name(),
+                    task_func_name = task_func_name,)
+
+        if isinstance(task_func, (path_str_type,)):
+            #
+            #   Make sure extras is  dict
+            #
+            if "extras" in named_args:
+                if not isinstance(named_args["extras"], dict):
+                    raise error_executable_str((task.description_with_args_placeholder % "...") +
+                                               "\n requires a dictionary for named parameters. " +
+                                               "For example:\n" +
+                                               task.description_with_args_placeholder %
+                                               "extras = {my_param = 45, her_param = 'whatever'}")
+            else:
+                named_args["extras"] = dict()
+            named_args["extras"]["command_str"] = task_func
+            #named_args["extras"]["__RUFFUS_TASK_CALLBACK__"] = pipeline.command_str_callback
+
+
         return task
 
     # _________________________________________________________________________
@@ -1433,16 +1538,24 @@ class Pipeline(dict):
 
     # _________________________________________________________________________
     def run(self, *unnamed_args, **named_args):
-        pipeline_run(pipeline=self, *unnamed_args, **named_args)
+        if "pipeline" not in named_args:
+            named_args["pipeline"] = self
+        pipeline_run(*unnamed_args, **named_args)
 
     def printout(self, *unnamed_args, **named_args):
-        pipeline_printout(pipeline=self, *unnamed_args, **named_args)
+        if "pipeline" not in named_args:
+            named_args["pipeline"] = self
+        pipeline_printout(*unnamed_args, **named_args)
 
     def get_task_names(self, *unnamed_args, **named_args):
-        pipeline_get_task_names(pipeline=self, *unnamed_args, **named_args)
+        if "pipeline" not in named_args:
+            named_args["pipeline"] = self
+        pipeline_get_task_names(*unnamed_args, **named_args)
 
     def printout_graph(self, *unnamed_args, **named_args):
-        pipeline_printout_graph(pipeline=self, *unnamed_args, **named_args)
+        if "pipeline" not in named_args:
+            named_args["pipeline"] = self
+        pipeline_printout_graph(*unnamed_args, **named_args)
 
 #
 #   Global default shared pipeline (used for decorators)
@@ -1514,7 +1627,8 @@ def lookup_unique_task_from_func(task_func, default_pipeline_name="main"):
 #   lookup_tasks_from_name
 
 # _____________________________________________________________________________
-def lookup_tasks_from_name(task_name, default_pipeline_name, default_module_name="__main__"):
+def lookup_tasks_from_name(task_name, default_pipeline_name, default_module_name="__main__",
+                           pipeline_names_as_alias_to_all_tasks = False):
     """
 
         Tries:
@@ -1555,11 +1669,14 @@ def lookup_tasks_from_name(task_name, default_pipeline_name, default_module_name
     #      N.B. This is the *only* time multiple tasks might be returned
     #
     if task_name in Pipeline.pipelines:
-        if not len(Pipeline.pipelines[task_name].get_tail_tasks()):
+        if pipeline_names_as_alias_to_all_tasks:
+            return Pipeline.pipelines[task_name].tasks
+        elif len(Pipeline.pipelines[task_name].get_tail_tasks()):
+            return Pipeline.pipelines[task_name].get_tail_tasks()
+        else:
             raise error_no_tail_tasks(
                 "Pipeline %s has no tail tasks defined. Which task do you "
                 "mean when you specify the whole pipeline as a dependency?" % task_name)
-        return Pipeline.pipelines[task_name].get_tail_tasks()
 
     #
     #   (4) Try all other pipelines
@@ -1594,7 +1711,8 @@ def lookup_tasks_from_name(task_name, default_pipeline_name, default_module_name
 # _____________________________________________________________________________
 def lookup_tasks_from_user_specified_names(task_description, task_names,
                                            default_pipeline_name="main",
-                                           default_module_name="__main__"):
+                                           default_module_name="__main__",
+                                           pipeline_names_as_alias_to_all_tasks = False):
     """
     Given a list of task names, look up the corresponding tasks
     Will just pass through if the task_name is already a task
@@ -1616,11 +1734,17 @@ def lookup_tasks_from_user_specified_names(task_description, task_names,
             continue
 
         elif isinstance(task_name, Pipeline):
-            if not len(task_name.get_tail_tasks()):
+            if pipeline_names_as_alias_to_all_tasks:
+                task_list.extend(task_name.tasks)
+                continue
+            # use tail tasks
+            elif len(task_name.get_tail_tasks()):
+                task_list.extend(task_name.get_tail_tasks())
+                continue
+            # no tail task
+            else:
                 raise error_no_tail_tasks("Pipeline %s has no 'tail tasks'. Which task do you mean"
                                           " when you specify the whole pipeline?" % task_name.name)
-            task_list.extend(task_name.get_tail_tasks())
-            continue
 
         if isinstance(task_name, collections.Callable):
             # blows up if ambiguous
@@ -1640,7 +1764,8 @@ def lookup_tasks_from_user_specified_names(task_description, task_names,
 
             # Will throw Exception if ambiguous
             tasks = lookup_tasks_from_name(
-                task_name, default_pipeline_name, default_module_name)
+                task_name, default_pipeline_name, default_module_name,
+                pipeline_names_as_alias_to_all_tasks)
             # not found
             if not tasks:
                 raise error_node_not_task("%s task '%s' is not a pipelined task in Ruffus. Is it "
@@ -1732,7 +1857,7 @@ class Task (node):
     #   __init__
 
     # _________________________________________________________________________
-    def __init__(self, func, task_name=None, pipeline=None):
+    def __init__(self, func, task_name, pipeline = None, command_str = None):
         """
         * Creates a Task object with a specified python function and task name
         * The type of the Task (whether it is a transform or merge or collate
@@ -1743,10 +1868,16 @@ class Task (node):
         if pipeline is None:
             pipeline = main_pipeline
         self.pipeline = pipeline
-        self.func_module_name = str(func.__module__)
-        self.func_name = func.__name__
-        # convert description into one line
-        self.func_description = re.sub("\n\s+", " ", func.__doc__).strip() if func.__doc__ else ""
+        # no function: just string
+        if command_str is not None:
+            self.func_module_name = ""
+            self.func_name = ""
+            self.func_description = command_str
+        else:
+            self.func_module_name = str(func.__module__)
+            self.func_name = func.__name__
+            # convert description into one line
+            self.func_description = re.sub("\n\s+", " ", func.__doc__).strip() if func.__doc__ else ""
 
         if not task_name:
             task_name = self.func_module_name + "." + self.func_name
@@ -1836,6 +1967,8 @@ class Task (node):
         self.pipeline.lookup[task_name] = [self]
         self.pipeline.task_names.add(task_name)
 
+        self.command_str_callback = "PIPELINE"
+
         #
         #   Allow pipeline to lookup task by
         #       1) Func
@@ -1848,6 +1981,8 @@ class Task (node):
         for lookup in (func, self.func_name, self.func_module_name + "." + self.func_name):
             # don't add to lookup if this conflicts with a task_name which is
             # always unique and overriding
+            if lookup == ".":
+                continue
             if lookup not in self.pipeline.task_names:
                 # non-unique map
                 if lookup in self.pipeline.lookup:
@@ -1859,18 +1994,17 @@ class Task (node):
                     self.pipeline.lookup[lookup] = [self]
                     self.pipeline[lookup] = self
 
-    #
-
     # _________________________________________________________________________
 
     #   _clone
 
     # _________________________________________________________________________
-    def _clone(self, pipeline):
+    def _clone(self, new_pipeline):
         """
         * Clones a Task object from self
         """
-        new_task = Task(self.user_defined_work_func, self._name, pipeline)
+        new_task = Task(self.user_defined_work_func, self._name, new_pipeline)
+        new_task.command_str_callback = self.command_str_callback
         new_task._action_type = self._action_type
         new_task._action_type_desc = self._action_type_desc
         new_task.checksum_level = self.checksum_level
@@ -1889,7 +2023,8 @@ class Task (node):
         new_task._setup_task_func = self._setup_task_func
         new_task.error_type = self.error_type
         new_task.syntax = self.syntax
-        new_task.description_with_args_placeholder = self.description_with_args_placeholder
+        new_task.description_with_args_placeholder = \
+            self.description_with_args_placeholder.replace(self.pipeline.name, new_pipeline.name)
         new_task.has_input_param = self.has_input_param
         new_task.has_pipeline_in_input_param = self.has_pipeline_in_input_param
         new_task.output_filenames = copy.deepcopy(self.output_filenames)
@@ -1899,6 +2034,17 @@ class Task (node):
 
         return new_task
 
+    # _________________________________________________________________________
+
+    #   command_str_callback
+
+    # _________________________________________________________________________
+    def set_command_str_callback(self, command_str_callback):
+        if not callable(command_str_callback):
+            raise Exception("set_command_str_callback() takes a python function or a callable object.")
+        self.command_str_callback = command_str_callback
+
+
 
     # _________________________________________________________________________
 
@@ -2022,8 +2168,11 @@ class Task (node):
             old_action = Task._action_names[self._action_type]
             new_action = Task._action_names[new_action_type]
             actions = " and ".join(list(set((old_action, new_action))))
-            raise error_decorator_args("%s\n      has conflicting task specifications: (%s)\n" %
-                                       (self.description_with_args_placeholder % "...", actions))
+            raise error_decorator_args("Duplicate task for:\n\n%s\n\n"
+                                       "This has already been specified with a the same name "
+                                       "or function\n"
+                                       "(%r, %s)\n" %
+                                       (self.description_with_args_placeholder % "...", self._get_display_name(), actions))
         self._action_type = new_action_type
         self._action_type_desc = Task._action_names[new_action_type]
 
@@ -2050,10 +2199,10 @@ class Task (node):
         Returns task name, removing __main__. namespace or main. if present
         """
         if self.pipeline.name != "main":
-            return "{pipeline_name}.{task_name}".format(pipeline_name = self.pipeline.name,
-                                                    task_name = self._name.replace("__main__.", "").replace("main.", ""))
+            return "{pipeline_name}::{task_name}".format(pipeline_name = self.pipeline.name,
+                                                    task_name = self._name.replace("__main__.", "").replace("main::", ""))
         else:
-            return self._name.replace("__main__.", "").replace("main.", "")
+            return self._name.replace("__main__.", "").replace("main::", "")
 
     # _________________________________________________________________________
 
@@ -2227,7 +2376,7 @@ class Task (node):
             #        messages.append(indent_str + "Task up-to-date")
 
         else:
-            runtime_data["MATCH_FAILURE"] = []
+            runtime_data["MATCH_FAILURE"] = defaultdict(set)
             #
             #   return messages description per job if verbose > 5 else
             #       whether up to date or not
@@ -2277,14 +2426,20 @@ class Task (node):
                         #    messages.append(indent_str + "  Job up-to-date")
 
             if cnt_jobs == 0:
-                messages.append(indent_str + "!!! No jobs for this task. Are you sure there is "
+                messages.append(indent_str + "!!! No jobs for this task.")
+                messages.append(indent_str + "Are you sure there is "
                                 "not a error in your code / regular expression?")
             # LOGGER
+
+            # DEBUGGGG!!
             if verbose >= 4 or (verbose and cnt_jobs == 0):
-                if runtime_data and "MATCH_FAILURE" in runtime_data:
-                    for s in runtime_data["MATCH_FAILURE"]:
-                        messages.append(indent_str + "Job Warning: File match failure: " + s)
-            runtime_data["MATCH_FAILURE"] = []
+                if runtime_data and "MATCH_FAILURE" in runtime_data and\
+                    self.param_generator_func in runtime_data["MATCH_FAILURE"]:
+                    for job_msg in runtime_data["MATCH_FAILURE"][self.param_generator_func]:
+                        messages.append(indent_str + "Job Warning: Input substitution failed:")
+                        messages.extend("  "+ indent_str + line for line in job_msg.split("\n"))
+
+            runtime_data["MATCH_FAILURE"][self.param_generator_func] = set()
         messages.append("")
         return messages
 
@@ -2367,6 +2522,7 @@ class Task (node):
                 #
                 #   Percolate warnings from parameter factories
                 #
+                #  !!
                 if (verbose >= 1 and "ruffus_WARNING" in runtime_data and
                         self.param_generator_func in runtime_data["ruffus_WARNING"]):
                     for msg in runtime_data["ruffus_WARNING"][self.param_generator_func]:
@@ -2883,7 +3039,7 @@ class Task (node):
         #
         self.parsed_args = parse_task_arguments(unnamed_args, named_args,
                                                 ["input", "filter", "modify_inputs",
-                                                 "output", "extras"],
+                                                 "output", "extras", "output_dir"],
                                                 self.description_with_args_placeholder)
 
     # _________________________________________________________________________
@@ -3786,6 +3942,11 @@ class Task (node):
         self._remove_all_parents()
         ancestral_tasks =  self._deferred_connect_parents()
         ancestral_tasks |= self._setup_task_func(self)
+        if "named_extras" in self.parsed_args:
+            if self.command_str_callback == "PIPELINE":
+                self.parsed_args["named_extras"]["__RUFFUS_TASK_CALLBACK__"] = self.pipeline.command_str_callback
+            else:
+                self.parsed_args["named_extras"]["__RUFFUS_TASK_CALLBACK__"] = self.command_str_callback
         #DEBUGGG
         #print("  task._complete_setup finish %s\n" % (self._get_display_name(), ), file = sys.stderr)
         return ancestral_tasks
@@ -4242,14 +4403,13 @@ def lookup_pipeline(pipeline):
         None                : main_pipeline
         string              : lookup name in pipelines
     """
+    if pipeline is None:
+        return main_pipeline
 
     # Pipeline object pass through unchanged
     if isinstance(pipeline, Pipeline):
         return pipeline
 
-    # default to main_pipeline if None
-    if not pipeline:
-        return main_pipeline
 
     # strings: lookup from name
     if isinstance(pipeline, str) and pipeline in Pipeline.pipelines:
@@ -4257,6 +4417,128 @@ def lookup_pipeline(pipeline):
 
     raise error_not_a_pipeline("%s does not name a pipeline." % pipeline)
 
+
+
+
+# _____________________________________________________________________________
+
+#   _pipeline_prepare_to_run
+
+# _____________________________________________________________________________
+def _pipeline_prepare_to_run(checksum_level, history_file, pipeline, runtime_data, target_tasks, forcedtorun_tasks):
+    """
+    Common function to setup pipeline, check parameters
+        before pipeline_run, pipeline_printout, pipeline_printout_graph
+    """
+
+    if checksum_level is None:
+        checksum_level = get_default_checksum_level()
+
+    update_checksum_level_on_tasks(checksum_level)
+
+    #
+    #   If we aren't using checksums, and history file hasn't been specified,
+    #       we might be a bit surprised to find Ruffus writing to a
+    #       sqlite db anyway.
+    #   Let us just dump to a placeholder memory db that can then be discarded
+    #   Of course, if history_file is specified, we presume you know what
+    #       you are doing
+    #
+    if checksum_level == CHECKSUM_FILE_TIMESTAMPS and history_file is None:
+        history_file = ':memory:'
+    #
+    # load previous job history if it exists, otherwise create an empty history
+    #
+    job_history = open_job_history(history_file)
+
+
+    #
+    # @active_if decorated tasks can change their active state every time
+    #   pipeline_run / pipeline_printout / pipeline_printout_graph is called
+    #
+    update_active_states_for_all_tasks()
+
+    #
+    #   run time data
+    #
+    if runtime_data is None:
+        runtime_data = {}
+    if not isinstance(runtime_data, dict):
+        raise Exception("Parameter runtime_data should be a "
+                        "dictionary of values passes to jobs at run time.")
+
+
+    #
+    #   This is the default namespace for looking for tasks
+    #
+    #   pipeline must be a Pipeline or a string naming a pipeline
+    #
+    #   Keep pipeline
+    #
+    if pipeline is not None:
+        pipeline = lookup_pipeline(pipeline)
+        default_pipeline_name = pipeline.name
+    else:
+        default_pipeline_name = "main"
+
+
+
+
+    #
+    #   Lookup target jobs
+    #
+    if target_tasks is None:
+        target_tasks = []
+    if forcedtorun_tasks is None:
+        forcedtorun_tasks = []
+    # lookup names, prioritise the specified pipeline or "main"
+    target_tasks = lookup_tasks_from_user_specified_names("Target", target_tasks, default_pipeline_name, "__main__", True)
+    forcedtorun_tasks = lookup_tasks_from_user_specified_names("Forced to run", forcedtorun_tasks,
+                                                               default_pipeline_name, "__main__", True)
+
+    #
+    #   Empty target, either run the specified tasks from the pipeline
+    #   or will run every single task under the sun
+    #
+    if not target_tasks:
+        if pipeline:
+            target_tasks.extend(list(pipeline.tasks))
+        if not target_tasks:
+            for pipeline_name in Pipeline.pipelines.keys():
+                target_tasks.extend(list(Pipeline.pipelines[pipeline_name].tasks))
+
+    # make sure pipeline is defined
+    pipeline = lookup_pipeline(pipeline)
+
+
+    # Unique task list
+    target_tasks = list(set(target_tasks))
+
+    #
+    #   Make sure all tasks in dependency list from (forcedtorun_tasks and target_tasks)
+    #       are setup and linked to real functions
+    #
+    processed_tasks = set()
+    completed_pipeline_names = set()
+    incomplete_pipeline_names = set()
+
+    # get list of all involved pipelines
+    for task in forcedtorun_tasks + target_tasks:
+        if task.pipeline.name not in completed_pipeline_names:
+            incomplete_pipeline_names.add(task.pipeline.name)
+
+    # set up each pipeline.
+    # These will in turn lookup up their antecedents (even in another pipeline) and
+    #   set them up as well.
+    for pipeline_name in incomplete_pipeline_names:
+        if pipeline_name in completed_pipeline_names:
+            continue
+        completed_pipeline_names = completed_pipeline_names.union(
+            pipeline.pipelines[pipeline_name]._complete_task_setup(processed_tasks))
+
+
+
+    return checksum_level, job_history, pipeline, runtime_data, target_tasks, forcedtorun_tasks
 # _____________________________________________________________________________
 
 #   pipeline_printout_in_dot_format
@@ -4327,79 +4609,16 @@ def pipeline_printout_graph(stream,
     global EXTRA_PIPELINERUN_DEBUGGING
     EXTRA_PIPELINERUN_DEBUGGING = False
 
-    if checksum_level is None:
-        checksum_level = get_default_checksum_level()
-
-    #
-    #   pipeline must be a Pipeline or a string naming a pipeline
-    #
-    pipeline = lookup_pipeline(pipeline)
-    #
-    #   Make sure all tasks in dependency list are linked to real functions
-    #
-    processed_tasks = set()
-    completed_pipeline_names = pipeline._complete_task_setup(processed_tasks)
 
-    update_checksum_level_on_tasks(checksum_level)
+    (checksum_level,
+     job_history,
+     pipeline,
+     runtime_data,
+     target_tasks,
+     forcedtorun_tasks ) = _pipeline_prepare_to_run(checksum_level, history_file,
+                                                    pipeline, runtime_data,
+                                                    target_tasks, forcedtorun_tasks)
 
-    #
-    # @active_if decorated tasks can change their active state every time
-    #   pipeline_run / pipeline_printout / pipeline_printout_graph is called
-    #
-    update_active_states_for_all_tasks()
-
-    #
-    #   run time data
-    #
-    if runtime_data is None:
-        runtime_data = {}
-    if not isinstance(runtime_data, dict):
-        raise Exception("pipeline_run parameter runtime_data should be a "
-                        "dictionary of values passes to jobs at run time.")
-
-    #
-    #   If we aren't using checksums, and history file hasn't been specified,
-    #       we might be a bit surprised to find Ruffus writing to a
-    #       sqlite db anyway.
-    #   Let us just dump to a placeholder memory db that can then be discarded
-    #   Of course, if history_file is specified, we presume you know what
-    #       you are doing
-    #
-    if checksum_level == CHECKSUM_FILE_TIMESTAMPS and history_file is None:
-        history_file = ':memory:'
-
-    #
-    # load previous job history if it exists, otherwise create an empty history
-    #
-    job_history = open_job_history(history_file)
-
-    #
-    #   target jobs
-    #
-    if target_tasks is None:
-        target_tasks = []
-    if forcedtorun_tasks is None:
-        forcedtorun_tasks = []
-    target_tasks = lookup_tasks_from_user_specified_names("Target", target_tasks, pipeline.name)
-    if not target_tasks:
-        target_tasks = list(pipeline.tasks)
-    forcedtorun_tasks = lookup_tasks_from_user_specified_names("Forced to run", forcedtorun_tasks,
-                                                               pipeline.name)
-
-    #
-    #   forcedtorun_tasks and target_tasks may include more pipelines
-    #       which have to be setup
-    #
-    incomplete_pipeline_names = set()
-    for task in forcedtorun_tasks + target_tasks:
-        if task.pipeline.name not in completed_pipeline_names:
-            incomplete_pipeline_names.add(task.pipeline.name)
-
-    for pipeline_name in incomplete_pipeline_names:
-        if pipeline_name in completed_pipeline_names:
-            continue
-        completed_pipeline_names = completed_pipeline_names.union(
-            pipeline.pipelines[pipeline_name]._complete_task_setup(processed_tasks))
 
     (topological_sorted, ignore_param1, ignore_param2, ignore_param3) = \
         topologically_sorted_nodes(target_tasks, forcedtorun_tasks,
@@ -4576,74 +4795,19 @@ def pipeline_printout(output_stream=None,
                         "an output file, e.g. sys.stdout and not %s"
                         % str(output_stream))
 
-    if runtime_data is None:
-        runtime_data = {}
-    if not isinstance(runtime_data, dict):
-        raise Exception("pipeline_run parameter runtime_data should be a "
-                        "dictionary of values passes to jobs at run time.")
-
-    if checksum_level is None:
-        checksum_level = get_default_checksum_level()
-
-    #
-    #   pipeline must be a Pipeline or a string naming a pipeline
-    #
-    pipeline = lookup_pipeline(pipeline)
-    #
-    #   Make sure all tasks in dependency list are linked to real functions
-    #
-    processed_tasks = set()
-    completed_pipeline_names = pipeline._complete_task_setup(processed_tasks)
-
-    update_checksum_level_on_tasks(checksum_level)
-
-    #
-    # @active_if decorated tasks can change their active state every time
-    #   pipeline_run / pipeline_printout / pipeline_printout_graph is called
-    #
-    update_active_states_for_all_tasks()
-
-    #
-    #   target jobs
-    #
-    target_tasks = lookup_tasks_from_user_specified_names("Target", target_tasks, pipeline.name)
-    if not target_tasks:
-        target_tasks = list(pipeline.tasks)
-    forcedtorun_tasks = lookup_tasks_from_user_specified_names("Forced to run", forcedtorun_tasks,
-                                                               pipeline.name)
-
-    #
-    #   forcedtorun_tasks and target_tasks may include more pipelines
-    #       which have to be setup
-    #
-    incomplete_pipeline_names = set()
-    for task in forcedtorun_tasks + target_tasks:
-        if task.pipeline.name not in completed_pipeline_names:
-            incomplete_pipeline_names.add(task.pipeline.name)
-
-    for pipeline_name in incomplete_pipeline_names:
-        if pipeline_name in completed_pipeline_names:
-            continue
-        completed_pipeline_names = completed_pipeline_names.union(
-            pipeline.pipelines[pipeline_name]._complete_task_setup(processed_tasks))
-
     logging_strm = t_verbose_logger(verbose, verbose_abbreviated_path,
                                     t_stream_logger(output_stream), runtime_data)
 
-    #
-    #   If we aren't using checksums, and history file hasn't been specified,
-    #       we might be a bit surprised to find Ruffus writing to a
-    #       sqlite db anyway.
-    #   Let us just dump to a placeholder memory db that can then be discarded
-    #   Of course, if history_file is specified, we presume you know what
-    #       you are doing
-    if checksum_level == CHECKSUM_FILE_TIMESTAMPS and history_file is None:
-        history_file = ':memory:'
+    (checksum_level,
+     job_history,
+     pipeline,
+     runtime_data,
+     target_tasks,
+     forcedtorun_tasks ) = _pipeline_prepare_to_run(checksum_level, history_file,
+                                                    pipeline, runtime_data,
+                                                    target_tasks, forcedtorun_tasks)
+
 
-    #
-    # load previous job history if it exists, otherwise create an empty history
-    #
-    job_history = open_job_history(history_file)
 
     (incomplete_tasks,
      self_terminated_nodes,
@@ -4989,7 +5153,7 @@ def make_job_parameter_generator(incomplete_tasks, task_parents, logger,
                     if cnt_jobs_created == 0:
                         incomplete_tasks.remove(t)
                         t._completed()
-                        log_at_level(logger, 2, verbose,
+                        log_at_level(logger, 1, verbose,
                                      "Uptodate Task = %r" % t._get_display_name())
                         # LOGGER: logs All Tasks (including any task function docstrings)
                         log_at_level(logger, 10, verbose, "   No jobs created for %r. Retired "
@@ -4999,13 +5163,22 @@ def make_job_parameter_generator(incomplete_tasks, task_parents, logger,
                         #   Add extra warning if no regular expressions match:
                         #   This is a common class of frustrating errors
                         #
+                        # DEBUGGGG!!
                         if verbose >= 1 and \
                                 "ruffus_WARNING" in runtime_data and \
                                 t.param_generator_func in runtime_data["ruffus_WARNING"]:
-                            for msg in runtime_data["ruffus_WARNING"][
-                                    t.param_generator_func]:
-                                logger.warning("    'In Task %r:' %s "
-                                               % (t._get_display_name(), msg))
+                            indent_str = " " * 8
+                            for msg in runtime_data["ruffus_WARNING"][t.param_generator_func]:
+                                messages = [msg.replace("\n", "\n" + indent_str)]
+                                if verbose >= 4 and runtime_data and \
+                                    "MATCH_FAILURE" in runtime_data and \
+                                    t.param_generator_func in runtime_data["MATCH_FAILURE"]:
+                                    for job_msg in runtime_data["MATCH_FAILURE"][t.param_generator_func]:
+                                        messages.append(indent_str + "Job Warning: Input substitution failed:")
+                                        messages.append(indent_str + "  " +job_msg.replace("\n", "\n" + indent_str + "  "))
+                                logger.warning("    In Task %r:\n%s%s "
+                                               % (t._get_display_name(), indent_str, "\n".join(messages)))
+
 
                 #
                 #   GeneratorExit thrown when generator doesn't complete.
@@ -5360,12 +5533,6 @@ def pipeline_run(target_tasks=[],
 
     syncmanager = multiprocessing.Manager()
 
-    if runtime_data is None:
-        runtime_data = {}
-    if not isinstance(runtime_data, dict):
-        raise Exception("pipeline_run parameter runtime_data should be a "
-                        "dictionary of values passes to jobs at run time.")
-
     #
     #   whether using multiprocessing or multithreading
     #
@@ -5379,8 +5546,26 @@ def pipeline_run(target_tasks=[],
         parallelism = 1
         pool = None
 
-    if checksum_level is None:
-        checksum_level = get_default_checksum_level()
+    if verbose == 0:
+        logger = black_hole_logger
+    elif verbose >= 11:
+        #   debugging aid: See t_stderr_logger
+        #   Each invocation of add_unique_prefix adds a unique prefix to
+        #       all subsequent output So that individual runs of pipeline run
+        #       are tagged
+        if hasattr(logger, "add_unique_prefix"):
+            logger.add_unique_prefix()
+
+
+    (checksum_level,
+     job_history,
+     pipeline,
+     runtime_data,
+     target_tasks,
+     forcedtorun_tasks ) = _pipeline_prepare_to_run(checksum_level, history_file,
+                                                    pipeline, runtime_data,
+                                                    target_tasks, forcedtorun_tasks)
+
 
     #
     #   Supplement mtime with system clock if using CHECKSUM_HISTORY_TIMESTAMPS
@@ -5401,75 +5586,10 @@ def pipeline_run(target_tasks=[],
                      % one_second_per_job)
         runtime_data["ONE_SECOND_PER_JOB"] = one_second_per_job
 
-    if verbose == 0:
-        logger = black_hole_logger
-    elif verbose >= 11:
-        #   debugging aid: See t_stderr_logger
-        #   Each invocation of add_unique_prefix adds a unique prefix to
-        #       all subsequent output So that individual runs of pipeline run
-        #       are tagged
-        if hasattr(logger, "add_unique_prefix"):
-            logger.add_unique_prefix()
-
     if touch_files_only and verbose >= 1:
         logger.info("Touch output files instead of remaking them.")
 
     #
-    #   pipeline must be a Pipeline or a string naming a pipeline
-    #
-    pipeline = lookup_pipeline(pipeline)
-    #
-    #   Make sure all tasks in dependency list are linked to real functions
-    #
-    processed_tasks = set()
-    completed_pipeline_names = pipeline._complete_task_setup(processed_tasks)
-
-    # link_task_names_to_functions ()
-    update_checksum_level_on_tasks(checksum_level)
-
-    #
-    #   If we aren't using checksums, and history file hasn't been specified,
-    #       we might be a bit surprised to find Ruffus writing to a
-    #       sqlite db anyway.
-    #   Let us just dump to a placeholder memory db that can then be discarded
-    #   Of course, if history_file is specified, we presume you know what
-    #       you are doing
-    if checksum_level == CHECKSUM_FILE_TIMESTAMPS and history_file is None:
-        history_file = ':memory:'
-
-    job_history = open_job_history(history_file)
-
-    #
-    # @active_if decorated tasks can change their active state every time
-    #   pipeline_run / pipeline_printout / pipeline_printout_graph is called
-    #
-    update_active_states_for_all_tasks()
-
-    #
-    #   target jobs
-    #
-    target_tasks = lookup_tasks_from_user_specified_names("Target", target_tasks, pipeline.name)
-    if not target_tasks:
-        target_tasks = list(pipeline.tasks)
-    forcedtorun_tasks = lookup_tasks_from_user_specified_names("Forced to run", forcedtorun_tasks,
-                                                               pipeline.name)
-
-    #
-    #   forcedtorun_tasks and target_tasks may include more pipelines
-    #       which have to be setup
-    #
-    incomplete_pipeline_names = set()
-    for task in forcedtorun_tasks + target_tasks:
-        if task.pipeline.name not in completed_pipeline_names:
-            incomplete_pipeline_names.add(task.pipeline.name)
-
-    for pipeline_name in incomplete_pipeline_names:
-        if pipeline_name in completed_pipeline_names:
-            continue
-        completed_pipeline_names = completed_pipeline_names.union(
-            pipeline.pipelines[pipeline_name]._complete_task_setup(processed_tasks))
-
-    #
     #   To update the checksum file, we force all tasks to rerun
     #       but then don't actually call the task function...
     #
@@ -5816,8 +5936,6 @@ def pipeline_run(target_tasks=[],
 
     if len(job_errors):
         raise job_errors
-    # DEBUGGG
-    #print("pipeline_run finish", file = sys.stderr)
 
 
 #   use high resolution timestamps where available
diff --git a/ruffus/test/auto_generated_pipeline_examples/parallel.py b/ruffus/test/auto_generated_pipeline_examples/parallel.py
deleted file mode 100644
index 0bec4f1..0000000
--- a/ruffus/test/auto_generated_pipeline_examples/parallel.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python2.5
-"""
-
-    test_tasks.py
-
-"""
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   options
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-from optparse import OptionParser
-import sys, os
-import os.path
-import StringIO
-
-# add self to search path for testing
-exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
-if __name__ == '__main__':
-    module_name = os.path.split(sys.argv[0])[1]
-    module_name = os.path.splitext(module_name)[0];
-else:
-    module_name = __name__
-
-# graph, task etc are one directory down
-if __name__ == '__main__':
-    sys.path.append("/net/cpp-group/Leo/inprogress/pipeline/installation/src/ruffus")
-
-
-
-parser = OptionParser(version="%prog 1.0")
-parser.add_option("-t", "--target_tasks", dest="target_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Target task(s) of pipeline.")
-parser.add_option("-f", "--forced_tasks", dest="forced_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Pipeline task(s) which will be included even if they are up to date.")
-parser.add_option("-j", "--jobs", dest="jobs",
-                  default=5,
-                  metavar="jobs",
-                  type="int",
-                  help="Specifies  the number of jobs (commands) to run simultaneously.")
-parser.add_option("-v", "--verbose", dest = "verbose",
-                  action="count", default=0,
-                  help="Print more verbose messages for each additional verbose level.")
-parser.add_option("-d", "--dependency", dest="dependency_file",
-                  default="simple.svg",
-                  metavar="FILE",
-                  type="string",
-                  help="Print a dependency graph of the pipeline that would be executed "
-                        "to FILE, but do not execute it.")
-parser.add_option("-F", "--dependency_graph_format", dest="dependency_graph_format",
-                  metavar="FORMAT",
-                  type="string",
-                  default = 'svg',
-                  help="format of dependency graph file. Can be 'ps' (PostScript), "+
-                  "'svg' 'svgz' (Structured Vector Graphics), " +
-                  "'png' 'gif' (bitmap  graphics) etc ")
-parser.add_option("-n", "--just_print", dest="just_print",
-                    action="store_true", default=False,
-                    help="Print a description of the jobs that would be executed, "
-                        "but do not execute them.")
-
-parameters = [
-                ]
-
-
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   imports
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-import StringIO
-import re
-import operator
-import sys
-from collections import defaultdict
-
-from graph import *
-from task import *
-import task
-from print_dependencies import *
-# use simplejson in place of json for python < 2.6
-try:
-    import json
-except ImportError:
-    import simplejson
-    json = simplejson
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Functions
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-def create_custom_file_func(params):
-    """
-    creates function which can be used as input to @files_func
-    """
-    def cust_func ():
-        for job_param in params:
-            yield job_param
-    return cust_func
-
-
-def is_job_uptodate (infiles, outfiles, *extra_params):
-    """
-    assumes first two parameters are files, checks if they are up to date
-    """
-    return task.needs_update_check_modify_time (infiles, outfiles, *extra_params)
-
-
-
-def test_post_task_function ():
-    print "Hooray"
-
-import time
-def test_job_io(infiles, outfiles, extra_params):
-    """
-    cat input files content to output files
-        after writing out job parameters
-    """
-    # dump parameters
-    params = (infiles, outfiles) + extra_params
-    sys.stdout.write('    job = %s\n' % json.dumps(params))
-
-
-
-    if isinstance(infiles, str):
-        infiles = [infiles]
-    elif infiles is None:
-        infiles = []
-    if isinstance(outfiles, str):
-        outfiles = [outfiles]
-    output_text = list()
-    for f in infiles:
-        output_text.append(open(f).read())
-    output_text = "".join(sorted(output_text))
-    output_text += json.dumps(infiles) + " -> " + json.dumps(outfiles) + "\n"
-    for f in outfiles:
-        open(f, "w").write(output_text)
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Main logic
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-
-
-
-# get help string
-f =StringIO.StringIO()
-parser.print_help(f)
-helpstr = f.getvalue()
-(options, remaining_args) = parser.parse_args()
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Tasks
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-parameters = [
-                 ['A', 1, 2], # 1st job
-                 ['B', 3, 4], # 2nd job
-                 ['C', 5, 6], # 3rd job
-             ]
-
-#
-#   first task
-#
-def first_task():
-    print >>sys.stderr, "First task"
-
- at follows(first_task)
- at parallel(parameters)
-def parallel_task(name, param1, param2):
-    sys.stderr.write("    Parallel task %s: " % name)
-    sys.stderr.write("%d + %d = %d\n" % (param1, param2, param1 + param2))
-
-
-    pipeline_run([parallel_task], multiprocess = 2)
-
-pipeline_run([parallel_task])
-
-
-
-
-
-if options.just_print:
-    pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks, long_winded=True)
-
-elif options.dependency_file:
-    graph_printout (     open(options.dependency_file, "w"),
-                         options.dependency_graph_format,
-                         options.target_tasks,
-                         options.forced_tasks)
-else:
-    pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs)
-
diff --git a/ruffus/test/auto_generated_pipeline_examples/simple.py b/ruffus/test/auto_generated_pipeline_examples/simple.py
deleted file mode 100644
index 361969d..0000000
--- a/ruffus/test/auto_generated_pipeline_examples/simple.py
+++ /dev/null
@@ -1,253 +0,0 @@
-#!/usr/bin/env python2.5
-"""
-
-    test_tasks.py
-
-"""
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   options
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-from optparse import OptionParser
-import sys, os
-import os.path
-import StringIO
-
-# add self to search path for testing
-exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
-if __name__ == '__main__':
-    module_name = os.path.split(sys.argv[0])[1]
-    module_name = os.path.splitext(module_name)[0];
-else:
-    module_name = __name__
-
-# graph, task etc are one directory down
-if __name__ == '__main__':
-    sys.path.append("/net/cpp-group/Leo/inprogress/pipeline/installation/src/ruffus")
-
-
-
-parser = OptionParser(version="%prog 1.0")
-parser.add_option("-t", "--target_tasks", dest="target_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Target task(s) of pipeline.")
-parser.add_option("-f", "--forced_tasks", dest="forced_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Pipeline task(s) which will be included even if they are up to date.")
-parser.add_option("-j", "--jobs", dest="jobs",
-                  default=5,
-                  metavar="jobs",
-                  type="int",
-                  help="Specifies  the number of jobs (commands) to run simultaneously.")
-parser.add_option("-v", "--verbose", dest = "verbose",
-                  action="count", default=0,
-                  help="Print more verbose messages for each additional verbose level.")
-parser.add_option("-d", "--dependency", dest="dependency_file",
-                  default="simple.svg",
-                  metavar="FILE",
-                  type="string",
-                  help="Print a dependency graph of the pipeline that would be executed "
-                        "to FILE, but do not execute it.")
-parser.add_option("-F", "--dependency_graph_format", dest="dependency_graph_format",
-                  metavar="FORMAT",
-                  type="string",
-                  default = 'svg',
-                  help="format of dependency graph file. Can be 'ps' (PostScript), "+
-                  "'svg' 'svgz' (Structured Vector Graphics), " +
-                  "'png' 'gif' (bitmap  graphics) etc ")
-parser.add_option("-n", "--just_print", dest="just_print",
-                    action="store_true", default=False,
-                    help="Print a description of the jobs that would be executed, "
-                        "but do not execute them.")
-
-parameters = [
-                ]
-
-
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   imports
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-import StringIO
-import re
-import operator
-import sys
-from collections import defaultdict
-
-from graph import *
-from task import *
-import task
-from print_dependencies import *
-# use simplejson in place of json for python < 2.6
-try:
-    import json
-except ImportError:
-    import simplejson
-    json = simplejson
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Functions
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-def create_custom_file_func(params):
-    """
-    creates function which can be used as input to @files_func
-    """
-    def cust_func ():
-        for job_param in params:
-            yield job_param
-    return cust_func
-
-
-def is_job_uptodate (infiles, outfiles, *extra_params):
-    """
-    assumes first two parameters are files, checks if they are up to date
-    """
-    return task.needs_update_check_modify_time (infiles, outfiles, *extra_params)
-
-
-
-def test_post_task_function ():
-    print "Hooray"
-
-import time
-def test_job_io(infiles, outfiles, extra_params):
-    """
-    cat input files content to output files
-        after writing out job parameters
-    """
-    # dump parameters
-    params = (infiles, outfiles) + extra_params
-    sys.stdout.write('    job = %s\n' % json.dumps(params))
-
-
-
-    if isinstance(infiles, str):
-        infiles = [infiles]
-    elif infiles is None:
-        infiles = []
-    if isinstance(outfiles, str):
-        outfiles = [outfiles]
-    output_text = list()
-    for f in infiles:
-        output_text.append(open(f).read())
-    output_text = "".join(sorted(output_text))
-    output_text += json.dumps(infiles) + " -> " + json.dumps(outfiles) + "\n"
-    for f in outfiles:
-        open(f, "w").write(output_text)
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Main logic
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-
-
-
-# get help string
-f =StringIO.StringIO()
-parser.print_help(f)
-helpstr = f.getvalue()
-(options, remaining_args) = parser.parse_args()
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Tasks
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-#
-#    task1
-#
- at files(None, 'a.1')
-def task1(infiles, outfiles, *extra_params):
-    """
-    First task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task2
-#
- at files_re('*.1', '(.*).1', r'\1.2')
- at follows(task1)
-def task2(infiles, outfiles, *extra_params):
-    """
-    Second task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task3
-#
- at files_re('*.1', '(.*).1', r'\1.3')
- at follows(task2)
-def task3(infiles, outfiles, *extra_params):
-    """
-    Third task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task4
-#
- at files_re('*.1', '(.*).1', r'\1.4')
- at follows(task3)
-def task4(infiles, outfiles, *extra_params):
-    """
-    Fourth task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-
-if options.just_print:
-    pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks, long_winded=True)
-
-elif options.dependency_file:
-    graph_printout (     open(options.dependency_file, "w"),
-                         options.dependency_graph_format,
-                         options.target_tasks,
-                         options.forced_tasks)
-else:
-    pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs)
-
diff --git a/ruffus/test/auto_generated_pipeline_examples/simpler.py b/ruffus/test/auto_generated_pipeline_examples/simpler.py
deleted file mode 100644
index 29ee6dc..0000000
--- a/ruffus/test/auto_generated_pipeline_examples/simpler.py
+++ /dev/null
@@ -1,269 +0,0 @@
-#!/usr/bin/env python2.5
-"""
-
-    test_tasks.py
-
-"""
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   options
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-from optparse import OptionParser
-import sys, os
-import os.path
-import StringIO
-
-# add self to search path for testing
-exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
-if __name__ == '__main__':
-    module_name = os.path.split(sys.argv[0])[1]
-    module_name = os.path.splitext(module_name)[0];
-else:
-    module_name = __name__
-
-# graph, task etc are one directory down
-if __name__ == '__main__':
-    sys.path.append("/net/cpp-group/Leo/inprogress/pipeline/installation/src/ruffus")
-
-
-
-parser = OptionParser(version="%prog 1.0")
-parser.add_option("-t", "--target_tasks", dest="target_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Target task(s) of pipeline.")
-parser.add_option("-f", "--forced_tasks", dest="forced_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Pipeline task(s) which will be included even if they are up to date.")
-parser.add_option("-j", "--jobs", dest="jobs",
-                  default=5,
-                  metavar="jobs",
-                  type="int",
-                  help="Specifies  the number of jobs (commands) to run simultaneously.")
-parser.add_option("-v", "--verbose", dest = "verbose",
-                  action="count", default=0,
-                  help="Print more verbose messages for each additional verbose level.")
-parser.add_option("-d", "--dependency", dest="dependency_file",
-                  default="simple.svg",
-                  metavar="FILE",
-                  type="string",
-                  help="Print a dependency graph of the pipeline that would be executed "
-                        "to FILE, but do not execute it.")
-parser.add_option("-F", "--dependency_graph_format", dest="dependency_graph_format",
-                  metavar="FORMAT",
-                  type="string",
-                  default = 'svg',
-                  help="format of dependency graph file. Can be 'ps' (PostScript), "+
-                  "'svg' 'svgz' (Structured Vector Graphics), " +
-                  "'png' 'gif' (bitmap  graphics) etc ")
-parser.add_option("-n", "--just_print", dest="just_print",
-                    action="store_true", default=False,
-                    help="Print a description of the jobs that would be executed, "
-                        "but do not execute them.")
-parser.add_option("-M", "--minimal_rebuild_mode", dest="minimal_rebuild_mode",
-                    action="store_true", default=False,
-                    help="Rebuild a minimum of tasks necessary for the target. "
-                    "Ignore upstream out of date tasks if intervening tasks are fine.")
-parser.add_option("-K", "--no_key_legend_in_graph", dest="no_key_legend_in_graph",
-                    action="store_true", default=False,
-                    help="Do not print out legend and key for dependency graph.")
-parser.add_option("-H", "--draw_graph_horizontally", dest="draw_horizontally",
-                    action="store_true", default=False,
-                    help="Draw horizontal dependency graph.")
-
-parameters = [
-                ]
-
-
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   imports
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-import StringIO
-import re
-import operator
-import sys
-from collections import defaultdict
-
-from graph import *
-from task import *
-import task
-from print_dependencies import *
-# use simplejson in place of json for python < 2.6
-try:
-    import json
-except ImportError:
-    import simplejson
-    json = simplejson
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Functions
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-def create_custom_file_func(params):
-    """
-    creates function which can be used as input to @files_func
-    """
-    def cust_func ():
-        for job_param in params:
-            yield job_param
-    return cust_func
-
-
-def is_job_uptodate (infiles, outfiles, *extra_params):
-    """
-    assumes first two parameters are files, checks if they are up to date
-    """
-    return task.needs_update_check_modify_time (infiles, outfiles, *extra_params)
-
-
-
-def test_post_task_function ():
-    print "Hooray"
-
-import time
-def test_job_io(infiles, outfiles, extra_params):
-    """
-    cat input files content to output files
-        after writing out job parameters
-    """
-    # dump parameters
-    params = (infiles, outfiles) + extra_params
-    sys.stdout.write('    job = %s\n' % json.dumps(params))
-
-
-
-    if isinstance(infiles, str):
-        infiles = [infiles]
-    elif infiles is None:
-        infiles = []
-    if isinstance(outfiles, str):
-        outfiles = [outfiles]
-    output_text = list()
-    for f in infiles:
-        output_text.append(open(f).read())
-    output_text = "".join(sorted(output_text))
-    output_text += json.dumps(infiles) + " -> " + json.dumps(outfiles) + "\n"
-    for f in outfiles:
-        open(f, "w").write(output_text)
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Main logic
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-
-
-
-# get help string
-f =StringIO.StringIO()
-parser.print_help(f)
-helpstr = f.getvalue()
-(options, remaining_args) = parser.parse_args()
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Tasks
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-#
-#    task1
-#
- at files(None, 'a.1')
-def task1(infiles, outfiles, *extra_params):
-    """
-    First task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task2
-#
- at files_re('*.1', '(.*).1', r'\1.1', r'\1.2')
- at follows(task1)
-def task2(infiles, outfiles, *extra_params):
-    """
-    Second task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task3
-#
- at files_re('*.1', '(.*).1', r'\1.2', r'\1.3')
- at follows(task2)
-def task3(infiles, outfiles, *extra_params):
-    """
-    Third task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-#
-#    task4
-#
- at files_re('*.1', '(.*).1', r'\1.3', r'\1.4')
- at follows(task3)
-def task4(infiles, outfiles, *extra_params):
-    """
-    Fourth task
-    """
-    test_job_io(infiles, outfiles, extra_params)
-
-
-
-
-if options.just_print:
-    pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
-                        long_winded=True,
-                        gnu_make_maximal_rebuild_mode = not options.minimal_rebuild_mode)
-
-elif options.dependency_file:
-    pipeline_printout_graph (     open(options.dependency_file, "w"),
-                         options.dependency_graph_format,
-                         options.target_tasks,
-                         options.forced_tasks,
-                         draw_vertically = not options.draw_horizontally,
-                         gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
-                         no_key_legend  = options.no_key_legend_in_graph)
-else:
-    pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
-                    gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode)
-
diff --git a/ruffus/test/complicated_example.py b/ruffus/test/complicated_example.py
index 15bf56e..b98035b 100755
--- a/ruffus/test/complicated_example.py
+++ b/ruffus/test/complicated_example.py
@@ -517,7 +517,7 @@ def summarise_all( summary_files, total_summary_file_name):
 if __name__ == '__main__':
     try:
         if options.just_print:
-            pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks, verbose=1)
+            pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks, verbose=1, pipeline= "main")
 
         elif options.dependency_file:
             graph_printout (     open(options.dependency_file, "w"),
@@ -525,7 +525,7 @@ if __name__ == '__main__':
                                  options.target_tasks,
                                  options.forced_tasks)
         else:
-            pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs)
+            pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs, pipeline= "main")
     except Exception as e:
         print(e.args)
-            
+
diff --git a/ruffus/test/create_test_script_from_dependency_tree.py b/ruffus/test/create_test_script_from_dependency_tree.py
index 2a521bc..b8b2d3b 100755
--- a/ruffus/test/create_test_script_from_dependency_tree.py
+++ b/ruffus/test/create_test_script_from_dependency_tree.py
@@ -286,7 +286,8 @@ if __name__ == '__main__':
                                  no_key_legend  = options.no_key_legend_in_graph)
         else:
             pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
-                            gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode)
+                            gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
+                            pipeline= "main")
 except Exception, e:
     print e.args
     \n""")
diff --git a/ruffus/test/manual_test_ctrl_c_exceptions.py b/ruffus/test/manual_test_ctrl_c_exceptions.py
index fb419ee..8f0ee23 100755
--- a/ruffus/test/manual_test_ctrl_c_exceptions.py
+++ b/ruffus/test/manual_test_ctrl_c_exceptions.py
@@ -86,7 +86,7 @@ def do_main ():
     print("Start....", file=sys.stdout)
     sys.stdout.flush()
     pipeline_run(verbose = 11,
-                 multiprocess = 5)
+                 multiprocess = 5, pipeline= "main")
     print("too late!!", file=sys.stdout)
     sys.stdout.flush()
     cleanup_tmpdir()
diff --git a/ruffus/test/play_with_colours.py b/ruffus/test/play_with_colours.py
index ca51ea0..c96e5b9 100755
--- a/ruffus/test/play_with_colours.py
+++ b/ruffus/test/play_with_colours.py
@@ -271,7 +271,8 @@ if __name__ == '__main__':
                                     minimal_key_legend             = False,
 
                                     user_colour_scheme = custom_flow_chart_colour_scheme,
-                                    pipeline_name = "Colour schemes")
+                                    pipeline_name = "Colour schemes",
+                                    pipeline= "main")
 
 
 
diff --git a/ruffus/test/run_all_unit_tests.cmd b/ruffus/test/run_all_unit_tests.cmd
new file mode 100755
index 0000000..149df22
--- /dev/null
+++ b/ruffus/test/run_all_unit_tests.cmd
@@ -0,0 +1,93 @@
+echo Running test_file_name_parameters.py                                           && \
+python  -m unittest test_file_name_parameters                                       && \
+echo Running test_with_logger.py                                                    && \
+python  -m unittest test_with_logger                                                && \
+echo Running script test_with_logger.py                                             && \
+python  test_with_logger.py                                                         && \
+echo Running test_proxy_logger.py                                                   && \
+python  -m unittest test_proxy_logger                                               && \
+echo Running test_exceptions.py                                                     && \
+python  -m unittest test_exceptions                                                 && \
+echo Running test_task_file_dependencies.py                                         && \
+python  -m unittest test_task_file_dependencies                                     && \
+echo Running test_task_misc.py                                                      && \
+python  -m unittest test_task_misc                                                  && \
+echo Running test_inputs_with_multiple_args_raising_exception.py                    && \
+python  -m unittest test_inputs_with_multiple_args_raising_exception                && \
+echo Running test_collate.py                                                        && \
+python  -m unittest test_collate                                                    && \
+echo Running test_empty_files_decorator.py                                          && \
+python  -m unittest test_empty_files_decorator                                      && \
+echo Running test_transform_with_no_re_matches.py                                   && \
+python  -m unittest test_transform_with_no_re_matches                               && \
+echo Running test_transform_inputs.py                                               && \
+python  -m unittest test_transform_inputs                                           && \
+echo Running test_files_decorator.py                                                && \
+python  -m unittest test_files_decorator                                            && \
+echo Running test_verbosity.py                                                      && \
+python  -m unittest test_verbosity                                                  && \
+echo Running test_transform_add_inputs.py                                           && \
+python  -m unittest test_transform_add_inputs                                       && \
+echo Running test_split_regex_and_collate.py                                        && \
+python  -m unittest test_split_regex_and_collate                                    && \
+echo Running test_tutorial7.py                                                      && \
+python  -m unittest test_tutorial7                                                  && \
+echo Running test_ruffus_utility.py                                                 && \
+python  -m unittest test_ruffus_utility                                             && \
+echo Running test_filesre_combine.py                                                && \
+python  -m unittest test_filesre_combine                                            && \
+echo Running test_filesre_split_and_combine.py                                      && \
+python  -m unittest test_filesre_split_and_combine                                  && \
+echo Running test_branching_dependencies.py                                         && \
+python  -m unittest test_branching_dependencies                                     && \
+echo Running test_split_and_combine.py                                              && \
+python  -m unittest test_split_and_combine                                          && \
+echo Running test_runtime_data.py                                                   && \
+python  -m unittest test_runtime_data                                               && \
+echo Running test_pausing.py                                                        && \
+python  -m unittest test_pausing                                                    && \
+echo Running test_active_if.py                                                      && \
+python  -m unittest test_active_if                                                  && \
+echo Running test_softlink_uptodate.py                                              && \
+python  -m unittest test_softlink_uptodate                                          && \
+echo Running test_newstyle_proxy.py                                                 && \
+python  -m unittest test_newstyle_proxy                                             && \
+echo Running test_job_history_with_exceptions.py                                    && \
+python  -m unittest test_job_history_with_exceptions                                && \
+echo Running test_mkdir.py                                                          && \
+python  -m unittest test_mkdir                                                      && \
+echo Running test_posttask_merge.py                                                 && \
+python  -m unittest test_posttask_merge                                             && \
+echo Running test_cmdline.py                                                        && \
+python  -m unittest test_cmdline                                                    && \
+echo Running test_graphviz.py                                                       && \
+python  -m unittest test_graphviz                                                   && \
+echo Running test_ruffus_utility_parse_task_arguments.py                            && \
+python  -m unittest test_ruffus_utility_parse_task_arguments                        && \
+echo Running test_split_subdivide_checkpointing.py                                  && \
+python  -m unittest test_split_subdivide_checkpointing                              && \
+echo Running test_pipeline_printout_graph.py                                        && \
+python  -m unittest test_pipeline_printout_graph                                    && \
+echo Running test_follows_mkdir.py                                                  && \
+python  -m unittest test_follows_mkdir                                              && \
+echo Running test_N_x_M_and_collate.py                                              && \
+python  -m unittest test_N_x_M_and_collate                                          && \
+echo Running test_unicode_filenames.py                                              && \
+python  -m unittest test_unicode_filenames                                          && \
+echo Running test_subpipeline.py                                                    && \
+python  -m unittest test_subpipeline                                                && \
+# fragile tests involving error messages
+echo Running test_regex_error_messages.py                                           && \
+python  -m unittest test_regex_error_messages                                       && \
+echo Running test_newstyle_regex_error_messages.py                                  && \
+python  -m unittest test_newstyle_regex_error_messages                              && \
+echo Running test_combinatorics.py                                                  && \
+python  -m unittest test_combinatorics                                              && \
+echo Running test_newstyle_combinatorics.py                                         && \
+python  -m unittest test_newstyle_combinatorics                                     && \
+echo Running test_job_completion_checksums.py                                       && \
+python  -m unittest test_job_completion_checksums                                   && \
+echo Running test_transform_formatter.py                                            && \
+python  -m unittest test_transform_formatter                                        && \
+echo DONE!!!                                                                        
+
diff --git a/ruffus/test/run_all_unit_tests3.cmd b/ruffus/test/run_all_unit_tests3.cmd
new file mode 100755
index 0000000..be7e655
--- /dev/null
+++ b/ruffus/test/run_all_unit_tests3.cmd
@@ -0,0 +1,93 @@
+echo Running test_file_name_parameters.py                                           && \
+python3 -m unittest test_file_name_parameters                                       && \
+echo Running test_with_logger.py                                                    && \
+python3 -m unittest test_with_logger                                                && \
+echo Running script test_with_logger.py                                             && \
+python3 test_with_logger.py                                                         && \
+echo Running test_proxy_logger.py                                                   && \
+python3 -m unittest test_proxy_logger                                               && \
+echo Running test_exceptions.py                                                     && \
+python3 -m unittest test_exceptions                                                 && \
+echo Running test_task_file_dependencies.py                                         && \
+python3 -m unittest test_task_file_dependencies                                     && \
+echo Running test_task_misc.py                                                      && \
+python3 -m unittest test_task_misc                                                  && \
+echo Running test_inputs_with_multiple_args_raising_exception.py                    && \
+python3 -m unittest test_inputs_with_multiple_args_raising_exception                && \
+echo Running test_collate.py                                                        && \
+python3 -m unittest test_collate                                                    && \
+echo Running test_empty_files_decorator.py                                          && \
+python3 -m unittest test_empty_files_decorator                                      && \
+echo Running test_transform_with_no_re_matches.py                                   && \
+python3 -m unittest test_transform_with_no_re_matches                               && \
+echo Running test_transform_inputs.py                                               && \
+python3 -m unittest test_transform_inputs                                           && \
+echo Running test_files_decorator.py                                                && \
+python3 -m unittest test_files_decorator                                            && \
+echo Running test_verbosity.py                                                      && \
+python3 -m unittest test_verbosity                                                  && \
+echo Running test_transform_add_inputs.py                                           && \
+python3 -m unittest test_transform_add_inputs                                       && \
+echo Running test_split_regex_and_collate.py                                        && \
+python3 -m unittest test_split_regex_and_collate                                    && \
+echo Running test_tutorial7.py                                                      && \
+python3 -m unittest test_tutorial7                                                  && \
+echo Running test_ruffus_utility.py                                                 && \
+python3 -m unittest test_ruffus_utility                                             && \
+echo Running test_filesre_combine.py                                                && \
+python3 -m unittest test_filesre_combine                                            && \
+echo Running test_filesre_split_and_combine.py                                      && \
+python3 -m unittest test_filesre_split_and_combine                                  && \
+echo Running test_branching_dependencies.py                                         && \
+python3 -m unittest test_branching_dependencies                                     && \
+echo Running test_split_and_combine.py                                              && \
+python3 -m unittest test_split_and_combine                                          && \
+echo Running test_runtime_data.py                                                   && \
+python3 -m unittest test_runtime_data                                               && \
+echo Running test_pausing.py                                                        && \
+python3 -m unittest test_pausing                                                    && \
+echo Running test_active_if.py                                                      && \
+python3 -m unittest test_active_if                                                  && \
+echo Running test_softlink_uptodate.py                                              && \
+python3 -m unittest test_softlink_uptodate                                          && \
+echo Running test_newstyle_proxy.py                                                 && \
+python3 -m unittest test_newstyle_proxy                                             && \
+echo Running test_job_history_with_exceptions.py                                    && \
+python3 -m unittest test_job_history_with_exceptions                                && \
+echo Running test_mkdir.py                                                          && \
+python3 -m unittest test_mkdir                                                      && \
+echo Running test_posttask_merge.py                                                 && \
+python3 -m unittest test_posttask_merge                                             && \
+echo Running test_cmdline.py                                                        && \
+python3 -m unittest test_cmdline                                                    && \
+echo Running test_graphviz.py                                                       && \
+python3 -m unittest test_graphviz                                                   && \
+echo Running test_ruffus_utility_parse_task_arguments.py                            && \
+python3 -m unittest test_ruffus_utility_parse_task_arguments                        && \
+echo Running test_split_subdivide_checkpointing.py                                  && \
+python3 -m unittest test_split_subdivide_checkpointing                              && \
+echo Running test_pipeline_printout_graph.py                                        && \
+python3 -m unittest test_pipeline_printout_graph                                    && \
+echo Running test_follows_mkdir.py                                                  && \
+python3 -m unittest test_follows_mkdir                                              && \
+echo Running test_N_x_M_and_collate.py                                              && \
+python3 -m unittest test_N_x_M_and_collate                                          && \
+echo Running test_unicode_filenames.py                                              && \
+python3 -m unittest test_unicode_filenames                                          && \
+echo Running test_subpipeline.py                                                    && \
+python3 -m unittest test_subpipeline                                                && \
+# fragile tests involving error messages
+echo Running test_regex_error_messages.py                                           && \
+python3 -m unittest test_regex_error_messages                                       && \
+echo Running test_newstyle_regex_error_messages.py                                  && \
+python3 -m unittest test_newstyle_regex_error_messages                              && \
+echo Running test_combinatorics.py                                                  && \
+python3 -m unittest test_combinatorics                                              && \
+echo Running test_newstyle_combinatorics.py                                         && \
+python3 -m unittest test_newstyle_combinatorics                                     && \
+echo Running test_job_completion_checksums.py                                       && \
+python3 -m unittest test_job_completion_checksums                                   && \
+echo Running test_transform_formatter.py                                            && \
+python3 -m unittest test_transform_formatter                                        && \
+echo DONE!!!                                                                        
+
diff --git a/ruffus/test/simpler.py b/ruffus/test/simpler.py
deleted file mode 100644
index 16cee2e..0000000
--- a/ruffus/test/simpler.py
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/usr/bin/env python
-from __future__ import print_function
-"""
-
-    test_tasks.py
-
-"""
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   options
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-from optparse import OptionParser
-import sys, os
-import os.path
-try:
-    import StringIO as io
-except:
-    import io as io
-
-# add self to search path for testing
-exe_path = os.path.split(os.path.abspath(sys.argv[0]))[0]
-sys.path.insert(0,os.path.abspath(os.path.join(exe_path,"..", "..")))
-if __name__ == '__main__':
-    module_name = os.path.split(sys.argv[0])[1]
-    module_name = os.path.splitext(module_name)[0];
-else:
-    module_name = __name__
-
-
-
-
-parser = OptionParser(version="%prog 1.0")
-parser.add_option("-t", "--target_tasks", dest="target_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Target task(s) of pipeline.")
-parser.add_option("-f", "--forced_tasks", dest="forced_tasks",
-                  action="append",
-                  default = list(),
-                  metavar="JOBNAME",
-                  type="string",
-                  help="Pipeline task(s) which will be included even if they are up to date.")
-parser.add_option("-j", "--jobs", dest="jobs",
-                  default=1,
-                  metavar="jobs",
-                  type="int",
-                  help="Specifies  the number of jobs (commands) to run simultaneously.")
-parser.add_option("-v", "--verbose", dest = "verbose",
-                  action="count", default=0,
-                  help="Print more verbose messages for each additional verbose level.")
-parser.add_option("-d", "--dependency", dest="dependency_file",
-                  metavar="FILE",
-                  type="string",
-                  help="Print a dependency graph of the pipeline that would be executed "
-                        "to FILE, but do not execute it.")
-parser.add_option("-F", "--dependency_graph_format", dest="dependency_graph_format",
-                  metavar="FORMAT",
-                  type="string",
-                  default = 'svg',
-                  help="format of dependency graph file. Can be 'ps' (PostScript), "+
-                  "'svg' 'svgz' (Structured Vector Graphics), " +
-                  "'png' 'gif' (bitmap  graphics) etc ")
-parser.add_option("-n", "--just_print", dest="just_print",
-                    action="store_true", default=False,
-                    help="Print a description of the jobs that would be executed, "
-                        "but do not execute them.")
-parser.add_option("-M", "--minimal_rebuild_mode", dest="minimal_rebuild_mode",
-                    action="store_true", default=False,
-                    help="Rebuild a minimum of tasks necessary for the target. "
-                    "Ignore upstream out of date tasks if intervening tasks are fine.")
-parser.add_option("-K", "--no_key_legend_in_graph", dest="no_key_legend_in_graph",
-                    action="store_true", default=False,
-                    help="Do not print out legend and key for dependency graph.")
-parser.add_option("-H", "--draw_graph_horizontally", dest="draw_horizontally",
-                    action="store_true", default=False,
-                    help="Draw horizontal dependency graph.")
-
-parameters = [
-                ]
-
-
-
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   imports
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-import re
-import operator
-import sys,os
-from collections import defaultdict
-
-sys.path.append(os.path.abspath(os.path.join(exe_path,"..", "..")))
-from ruffus import *
-
-# use simplejson in place of json for python < 2.6
-try:
-    import json
-except ImportError:
-    import simplejson
-    json = simplejson
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Functions
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Main logic
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-
-
-
-# get help string
-f =io.StringIO()
-parser.print_help(f)
-helpstr = f.getvalue()
-(options, remaining_args) = parser.parse_args()
-
-import time
-def sleep_a_while ():
-    time.sleep(0.1)
-
-
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-#   Tasks
-
-
-#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-
-
-#
-#    task1
-#
- at files(None, 'a.1')
-def task1(infile, outfile):
-    """
-    First task
-    """
-    output_text  = ""
-    output_text += json.dumps(infile) + " -> " + json.dumps(outfile) + "\n"
-    open(outfile, "w").write(output_text)
-
-
-
-#
-#    task2
-#
- at transform(task1, suffix(".1"), ".2")
-def task2(infile, outfile):
-    """
-    Second task
-    """
-    output_text  = open(infile).read() if infile else ""
-    output_text += json.dumps(infile) + " -> " + json.dumps(outfile) + "\n"
-    open(outfile, "w").write(output_text)
-
-
-
-#
-#    task3
-#
- at transform(task2, suffix(".2"), ".3")
-def task3(infile, outfile):
-    """
-    Third task
-    """
-    output_text  = open(infile).read() if infile else ""
-    output_text += json.dumps(infile) + " -> " + json.dumps(outfile) + "\n"
-    open(outfile, "w").write(output_text)
-
-
-
-#
-#    task4
-#
- at transform(task3, suffix(".3"), ".4")
-def task4(infile, outfile):
-    """
-    Fourth task
-    """
-    output_text  = open(infile).read() if infile else ""
-    output_text += json.dumps(infile) + " -> " + json.dumps(outfile) + "\n"
-    open(outfile, "w").write(output_text)
-
-#
-#   Necessary to protect the "entry point" of the program under windows.
-#       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
-#
-if __name__ == '__main__':
-    try:
-        if options.just_print:
-            pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
-                                gnu_make_maximal_rebuild_mode = not options.minimal_rebuild_mode,
-                            verbose = options.verbose)
-
-        elif options.dependency_file:
-            pipeline_printout_graph (     open(options.dependency_file, "w"),
-                                 options.dependency_graph_format,
-                                 options.target_tasks,
-                                 options.forced_tasks,
-                                 draw_vertically = not options.draw_horizontally,
-                                 gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
-                                 no_key_legend  = options.no_key_legend_in_graph)
-        else:
-            pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
-                            gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
-                            verbose = options.verbose)
-    except Exception as e:
-        print(e.args)
diff --git a/ruffus/test/simpler_with_shared_logging.py b/ruffus/test/simpler_with_shared_logging.py
index f35ccc0..1773763 100755
--- a/ruffus/test/simpler_with_shared_logging.py
+++ b/ruffus/test/simpler_with_shared_logging.py
@@ -303,7 +303,8 @@ if __name__ == '__main__':
         if options.just_print:
             pipeline_printout(sys.stdout, options.target_tasks, options.forced_tasks,
                                 verbose = options.verbose,
-                                gnu_make_maximal_rebuild_mode = not options.minimal_rebuild_mode)
+                                gnu_make_maximal_rebuild_mode = not options.minimal_rebuild_mode,
+                                pipeline= "main")
 
         elif options.dependency_file:
             pipeline_printout_graph (     open(options.dependency_file, "w"),
@@ -312,11 +313,13 @@ if __name__ == '__main__':
                                  options.forced_tasks,
                                  draw_vertically = not options.draw_horizontally,
                                  gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
-                                 no_key_legend  = options.no_key_legend_in_graph)
+                                 no_key_legend  = options.no_key_legend_in_graph,
+                                 pipeline= "main")
         else:
             pipeline_run(options.target_tasks, options.forced_tasks, multiprocess = options.jobs,
                             gnu_make_maximal_rebuild_mode  = not options.minimal_rebuild_mode,
                             verbose = options.verbose,
-                            logger = logger_proxy)
+                            logger = logger_proxy,
+                            pipeline= "main")
     except Exception as e:
         print(e.args)
diff --git a/ruffus/test/slow_process_for_testing.py b/ruffus/test/slow_process_for_testing.py
new file mode 100644
index 0000000..93c190c
--- /dev/null
+++ b/ruffus/test/slow_process_for_testing.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import sys, os, time
+print ("    ", os.getcwd(), file = sys.stderr)
+print ("    ", os.environ, file = sys.stderr)
+loop_variable = 0
+loop_limit = 4
+while True:
+    if loop_variable >= loop_limit:
+        break
+    try:
+        sys.stderr.write("    Stderr %d\n" % loop_variable)
+        sys.stderr.flush()
+        sys.stdout.write("    Stdout %d\n" % loop_variable)
+        sys.stdout.flush()
+        loop_variable += 1
+        time.sleep(0.5)
+    except:
+        sys.stderr.write("    Ignore Exception. Now you have made me angry: I won't stop till 100\n")
+        loop_limit = 100
+        pass
+
diff --git a/ruffus/test/test_N_x_M_and_collate.py b/ruffus/test/test_N_x_M_and_collate.py
index 0262904..e1a480f 100755
--- a/ruffus/test/test_N_x_M_and_collate.py
+++ b/ruffus/test/test_N_x_M_and_collate.py
@@ -57,25 +57,16 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, follows, merge, posttask, split, collate, mkdir, regex, files
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
 
 import random
 
-script_path = os.path.dirname(os.path.abspath(__file__))
-gene_data_dir ="%s/temp_gene_data_for_intermediate_example" % script_path
-simulation_data_dir =  "%s/temp_simulation_data_for_intermediate_example" % script_path
-working_dir =  "%s/working_dir_for_intermediate_example" % script_path
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
+gene_data_dir ="%s/temp_gene_data_for_intermediate_example" % tempdir
+simulation_data_dir =  "%s/temp_simulation_data_for_intermediate_example" % tempdir
+working_dir =  "%s/working_dir_for_intermediate_example" % tempdir
 
 
 
@@ -240,6 +231,10 @@ def cleanup_simulation_data ():
         os.unlink(f)
     try_rmdir(working_dir)
 
+    try_rmdir(tempdir)
+
+
+
 
 #_________________________________________________________________________________________
 #
@@ -336,7 +331,7 @@ class Test_ruffus(unittest.TestCase):
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         for oo in "000.mean", "001.mean":
             results_file_name = os.path.join(working_dir, oo)
             if not os.path.exists(results_file_name):
diff --git a/ruffus/test/test_active_if.py b/ruffus/test/test_active_if.py
index 08b289f..9b422b4 100755
--- a/ruffus/test/test_active_if.py
+++ b/ruffus/test/test_active_if.py
@@ -7,10 +7,8 @@ from __future__ import print_function
 """
 
 
-tempdir = "test_active_if"
-
-
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -21,19 +19,8 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
+import ruffus
+from ruffus import transform, follows, originate, pipeline_run, Pipeline, suffix, regex, mkdir, active_if, collate, merge
 
 
 
@@ -212,7 +199,7 @@ class Test_ruffus(unittest.TestCase):
     def test_active_if_true (self):
         global pipeline_active_if
         pipeline_active_if = True
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
 
         with open("test_active_if/summary.5") as ii:
             active_text = ii.read()
@@ -222,7 +209,7 @@ class Test_ruffus(unittest.TestCase):
     def test_active_if_false (self):
         global pipeline_active_if
         pipeline_active_if = False
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         with open("test_active_if/summary.5") as ii:
             inactive_text = ii.read()
         if inactive_text != expected_inactive_text:
diff --git a/ruffus/test/test_branching_dependencies.py b/ruffus/test/test_branching_dependencies.py
index 0d96f70..68a90e9 100755
--- a/ruffus/test/test_branching_dependencies.py
+++ b/ruffus/test/test_branching_dependencies.py
@@ -10,6 +10,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -22,16 +23,8 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, transform, follows, posttask, merge, mkdir, suffix, originate, regex, inputs, jobs_limit, files
 
 
 
@@ -124,7 +117,6 @@ def test_job_io(infiles, outfiles, extra_params):
 #       ->  4           ->
 #                   5   ->    6
 #
-tempdir = "temp_branching_dir/"
 
 def do_write(file_name, what):
     with open(file_name, "a") as oo:
@@ -375,7 +367,7 @@ class Test_ruffus(unittest.TestCase):
 
     def test_ruffus (self):
         print("\n\n     Run pipeline normally...")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         check_final_output_correct()
         check_job_order_correct(tempdir + "jobs.start")
         check_job_order_correct(tempdir + "jobs.finish")
@@ -387,11 +379,11 @@ class Test_ruffus(unittest.TestCase):
         print("       First delete b.1 for task2...")
         os.unlink(os.path.join(tempdir, "b.1"))
         print("       Then run with touch_file_only...")
-        pipeline_run([task2], multiprocess = 10, touch_files_only=True, verbose = 0)
+        pipeline_run([task2], multiprocess = 10, touch_files_only=True, verbose = 0, pipeline= "main")
 
         # check touching has made task2 up to date
         s = StringIO()
-        pipeline_printout(s, [task2], verbose=4, wrap_width = 10000)
+        pipeline_printout(s, [task2], verbose=4, wrap_width = 10000, pipeline= "main")
         output_str = s.getvalue()
         #print (">>>\n", output_str, "<<<\n", file=sys.stderr)
         if "b.1" in output_str:
@@ -401,7 +393,7 @@ class Test_ruffus(unittest.TestCase):
         print("     Touching has made task2 up to date...\n")
 
         print("     Then run normally again...")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         check_final_output_correct(True)
         check_job_order_correct(tempdir + "jobs.start")
         check_job_order_correct(tempdir + "jobs.finish")
diff --git a/ruffus/test/test_cmdline.py b/ruffus/test/test_cmdline.py
index 9573ae7..19cbc97 100755
--- a/ruffus/test/test_cmdline.py
+++ b/ruffus/test/test_cmdline.py
@@ -21,19 +21,11 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
-handle_verbose =  ruffus.cmdline.handle_verbose
-cmdline=  ruffus.cmdline
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline
+from ruffus.cmdline import handle_verbose
+cmdline =  ruffus.cmdline
 
 import unittest
 import re
@@ -65,7 +57,7 @@ class Test_cmdline(unittest.TestCase):
     #def test_something(self):
     #    s = StringIO()
     #    cleanup_tmpdir()
-    #    pipeline_printout(s, [test_regex_task], verbose=5, wrap_width = 10000)
+    #    pipeline_printout(s, [test_regex_task], verbose=5, wrap_width = 10000, pipeline= "main")
     #    self.assertTrue(re.search('Missing files.*\[tmp_test_regex_error_messages/a_name.tmp1, tmp_test_regex_error_messages/a_name.tmp2', s.getvalue(), re.DOTALL))
     #    self.assertIn("Warning: File match failure: File 'tmp_test_regex_error_messages/a_name.tmp1' does not match regex", s.getvalue())
     #    self.assertRaisesRegex(fatal_error_input_file_does_not_match,
@@ -291,5 +283,5 @@ class Test_cmdline(unittest.TestCase):
 #       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
 #
 if __name__ == '__main__':
-    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3)
+    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3, pipeline= "main")
     unittest.main()
diff --git a/ruffus/test/test_collate.py b/ruffus/test/test_collate.py
index 851f188..e3c4c70 100755
--- a/ruffus/test/test_collate.py
+++ b/ruffus/test/test_collate.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,18 +20,9 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, follows, posttask, split, collate, mkdir, regex
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
 
 
 
@@ -82,7 +74,6 @@ species_list["reptiles"].append("crocodile" )
 species_list["fish"   ].append("pufferfish")
 
 
-tempdir = "temp_filesre_combine/"
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -179,7 +170,7 @@ class Test_ruffus(unittest.TestCase):
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
         check_species_correct()
 
     def test_newstyle_ruffus (self):
diff --git a/ruffus/test/test_combinatorics.py b/ruffus/test/test_combinatorics.py
index a975b2e..1645327 100755
--- a/ruffus/test/test_combinatorics.py
+++ b/ruffus/test/test_combinatorics.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 import sys
 
 # add grandparent to search path for testing
@@ -21,21 +22,12 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.combinatorics.__all__
-except AttributeError:
-    attrlist = dir (ruffus.combinatorics)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.combinatorics, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, formatter, originate, follows, merge
 
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "Pipeline", "formatter", "output_from", "originate":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError = ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE      = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
+from ruffus.combinatorics import *
 
 
 
@@ -48,7 +40,6 @@ except:
 import re
 
 
-workdir = 'tmp_test_combinatorics'
 #sub-1s resolution in system?
 one_second_per_job = None
 
@@ -62,7 +53,7 @@ def touch (filename):
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate(output = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+ at originate(output = [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
 def generate_initial_files1(out_name):
     with open(out_name, 'w') as outfile:
         pass
@@ -71,7 +62,7 @@ def generate_initial_files1(out_name):
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate([workdir +  "/e_name.tmp1", workdir +  "/f_name.tmp1"])
+ at originate([tempdir +  "/e_name.tmp1", tempdir +  "/f_name.tmp1"])
 def generate_initial_files2(out_name):
     with open(out_name, 'w') as outfile:
         pass
@@ -80,7 +71,7 @@ def generate_initial_files2(out_name):
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate([workdir +  "/g_name.tmp1", workdir +  "/h_name.tmp1"])
+ at originate([tempdir +  "/g_name.tmp1", tempdir +  "/h_name.tmp1"])
 def generate_initial_files3(out_name):
     with open(out_name, 'w') as outfile:
         pass
@@ -91,7 +82,7 @@ def generate_initial_files3(out_name):
 #___________________________________________________________________________
 @follows(generate_initial_files1)
 @product(
-        [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"],
+        [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"],
         formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
         generate_initial_files2,
         formatter(),
@@ -113,7 +104,7 @@ def test_product_task( infiles, outfile,
 #
 #   test_product_merged_task
 #___________________________________________________________________________
- at merge(test_product_task, workdir +  "/merged.results")
+ at merge(test_product_task, tempdir +  "/merged.results")
 def test_product_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -188,7 +179,7 @@ def test_combinations2_task( infiles, outfile,
         outf.write(prefices + ",")
 
 
- at merge(test_combinations2_task, workdir +  "/merged.results")
+ at merge(test_combinations2_task, tempdir +  "/merged.results")
 def test_combinations2_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -217,7 +208,7 @@ def test_combinations3_task( infiles, outfile,
     with open(outfile, "w") as outf:
         outf.write(prefices + ",")
 
- at merge(test_combinations3_task, workdir +  "/merged.results")
+ at merge(test_combinations3_task, tempdir +  "/merged.results")
 def test_combinations3_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -247,7 +238,7 @@ def test_permutations2_task( infiles, outfile,
     with open(outfile, "w") as outf:
         outf.write(prefices + ",")
 
- at merge(test_permutations2_task, workdir +  "/merged.results")
+ at merge(test_permutations2_task, tempdir +  "/merged.results")
 def test_permutations2_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -277,7 +268,7 @@ def test_permutations3_task( infiles, outfile,
     with open(outfile, "w") as outf:
         outf.write(prefices + ",")
 
- at merge(test_permutations3_task, workdir +  "/merged.results")
+ at merge(test_permutations3_task, tempdir +  "/merged.results")
 def test_permutations3_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -308,7 +299,7 @@ def test_combinations_with_replacement2_task( infiles, outfile,
     with open(outfile, "w") as outf:
         outf.write(prefices + ",")
 
- at merge(test_combinations_with_replacement2_task, workdir +  "/merged.results")
+ at merge(test_combinations_with_replacement2_task, tempdir +  "/merged.results")
 def test_combinations_with_replacement2_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -338,7 +329,7 @@ def test_combinations_with_replacement3_task( infiles, outfile,
     with open(outfile, "w") as outf:
         outf.write(prefices + ",")
 
- at merge(test_combinations_with_replacement3_task, workdir +  "/merged.results")
+ at merge(test_combinations_with_replacement3_task, tempdir +  "/merged.results")
 def test_combinations_with_replacement3_merged_task( infiles, outfile):
     with open(outfile, "w") as p:
         for infile in sorted(infiles):
@@ -348,13 +339,13 @@ def test_combinations_with_replacement3_merged_task( infiles, outfile):
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 
 class TestCombinatorics(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
 
@@ -366,19 +357,19 @@ class TestCombinatorics(unittest.TestCase):
         """Input file exists, output doesn't exist"""
         cleanup_tmpdir()
         s = StringIO()
-        pipeline_printout(s, [test_product_merged_task], verbose=5, wrap_width = 10000)
+        pipeline_printout(s, [test_product_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
         self.assertTrue(re.search('Job needs update:.*Missing files.*'
-                      '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/e_name.tmp1, '
-                      '.*tmp_test_combinatorics/h_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.e_name.h_name.tmp2\]', s.getvalue(), re.DOTALL))
+                      '\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/e_name.tmp1, '
+                      '.*{tempdir}/h_name.tmp1, '
+                      '.*{tempdir}/a_name.e_name.h_name.tmp2\]'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
     def test_product_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_product_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_product_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "aeg,aeh,afg,afh,beg,beh,bfg,bfh,ceg,ceh,cfg,cfh,deg,deh,dfg,dfh,")
 
@@ -395,8 +386,9 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_product_misspelt_capture_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field 'FILEPART'", s.getvalue())
+        pipeline_printout(s, [test_product_misspelt_capture_error_task], verbose=3, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field 'FILEPART'", s.getvalue())
 
 
     def test_product_out_of_range_formatter_ref_error(self):
@@ -407,8 +399,10 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_product_out_of_range_formatter_ref_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field 2", s.getvalue())
+        pipeline_printout(s, [test_product_out_of_range_formatter_ref_error_task], verbose=3, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field 2", s.getvalue())
+
 
     def test_product_formatter_ref_index_error(self):
         """
@@ -418,8 +412,10 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_product_formatter_ref_index_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field string index out of range", s.getvalue())
+        pipeline_printout(s, [test_product_formatter_ref_index_error_task], verbose=3, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field string index out of range", s.getvalue())
+
         #print s.getvalue()
 
 
@@ -432,19 +428,19 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_combinations2_merged_task], verbose=5, wrap_width = 10000)
+        pipeline_printout(s, [test_combinations2_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
         self.assertTrue(re.search('Job needs update:.*Missing files.*'
-                      '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                        '.*tmp_test_combinatorics/b_name.tmp1, '
-                        '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue(), re.DOTALL))
+                      '\[.*{tempdir}/a_name.tmp1, '
+                        '.*{tempdir}/b_name.tmp1, '
+                        '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
 
     def test_combinations2_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_combinations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_combinations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                               'ab,ac,ad,bc,bd,cd,')
 
@@ -457,19 +453,19 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_combinations3_merged_task], verbose=5, wrap_width = 10000)
+        pipeline_printout(s, [test_combinations3_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
         self.assertTrue(re.search(
-                       '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+                       '\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_combinations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_combinations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "abc,abd,acd,bcd,")
 
@@ -483,17 +479,17 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_permutations2_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/b_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue()))
+        pipeline_printout(s, [test_permutations2_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/b_name.tmp1, '
+                      '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_permutations2_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_permutations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_permutations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "ab,ac,ad,ba,bc,bd,ca,cb,cd,da,db,dc,")
 
@@ -506,18 +502,18 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_permutations3_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+        pipeline_printout(s, [test_permutations3_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_permutations3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_permutations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_permutations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          'abc,abd,acb,acd,adb,adc,bac,bad,bca,bcd,bda,bdc,cab,cad,cba,cbd,cda,cdb,dab,dac,dba,dbc,dca,dcb,')
 
@@ -531,17 +527,17 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_combinations_with_replacement2_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/b_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue()))
+        pipeline_printout(s, [test_combinations_with_replacement2_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/b_name.tmp1, '
+                      '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations_with_replacement2_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_combinations_with_replacement2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_combinations_with_replacement2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "aa,ab,ac,ad,bb,bc,bd,cc,cd,dd,")
 
@@ -554,18 +550,18 @@ class TestCombinatorics(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_combinations_with_replacement3_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+        pipeline_printout(s, [test_combinations_with_replacement3_merged_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations_with_replacement3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_combinations_with_replacement3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        pipeline_run([test_combinations_with_replacement3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job, pipeline= "main")
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          'aaa,aab,aac,aad,abb,abc,abd,acc,acd,add,bbb,bbc,bbd,bcc,bcd,bdd,ccc,ccd,cdd,ddd,')
 
@@ -575,7 +571,7 @@ class TestCombinatorics(unittest.TestCase):
     #   cleanup
     #___________________________________________________________________________
     def tearDown(self):
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
 
 
 
@@ -584,5 +580,5 @@ class TestCombinatorics(unittest.TestCase):
 #       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
 #
 if __name__ == '__main__':
-    #pipeline_printout(sys.stdout, [test_product_task], verbose = 5)
+    #pipeline_printout(sys.stdout, [test_product_task], verbose = 5, pipeline= "main")
     unittest.main()
diff --git a/ruffus/test/test_drmaa_wrapper_run_job_locally.py b/ruffus/test/test_drmaa_wrapper_run_job_locally.py
new file mode 100755
index 0000000..c40adb3
--- /dev/null
+++ b/ruffus/test/test_drmaa_wrapper_run_job_locally.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+from __future__ import print_function
+"""
+
+    test_drmaa_wrapper_run_job_locally.py
+
+"""
+
+import os
+script_dir = os.path.abspath(os.path.dirname(__file__))
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
+import sys
+
+# add grandparent to search path for testing
+grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+sys.path.insert(0, grandparent_dir)
+
+# module name = script name without extension
+module_name = os.path.splitext(os.path.basename(__file__))[0]
+
+
+import ruffus
+import ruffus.drmaa_wrapper
+from ruffus.drmaa_wrapper import write_job_script_to_temp_file
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   imports
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+import unittest
+import shutil
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   Main logic
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+class Test_ruffus(unittest.TestCase):
+    #___________________________________________________________________________
+    #
+    #   setup and cleanup
+    #___________________________________________________________________________
+    def setUp(self):
+        try:
+            os.mkdir(tempdir)
+        except OSError:
+            pass
+    def tearDown(self):
+        shutil.rmtree(tempdir)
+
+
+    def test_run_job (self):
+        environ = {"RUFFUS_HEEHEE":"what?"}
+        home_dir = os.path.expanduser("~")
+        sys.stderr.write("    Run silently...\n")
+        stdout, stderr = ruffus.drmaa_wrapper.run_job(cmd_str = "python %s/slow_process_for_testing.py" % script_dir,
+                                                         job_environment = environ,
+                                                         working_directory = home_dir,
+                                                         run_locally = True,
+                                                         verbose = 1,
+                                                         local_echo = False)
+        sys.stderr.write("    Run echoing to screen...\n")
+        stdout, stderr = ruffus.drmaa_wrapper.run_job(cmd_str = "python %s/slow_process_for_testing.py" % script_dir,
+                                                         job_environment = environ,
+                                                         working_directory = home_dir,
+                                                         run_locally = True,
+                                                         verbose = 1,
+                                                         local_echo = True)
+
+        self.assertEqual(stdout, ['    Stdout 0\n', '    Stdout 1\n', '    Stdout 2\n', '    Stdout 3\n'])
+        self.assertEqual(stderr, ['     %s\n' % home_dir, "     {'PWD': '%s', 'RUFFUS_HEEHEE': 'what?'}\n" % home_dir, '    Stderr 0\n', '    Stderr 1\n', '    Stderr 2\n', '    Stderr 3\n'])
+
+
+    def test_write_job_script_to_temp_file(self):
+        sys.stderr.write("    Write to temp_file...\n")
+        job_script_path, stdout_path, stderr_path = write_job_script_to_temp_file( "ls", None, "job_name", "", None, None)
+        os.unlink(job_script_path)
+        job_script_path, stdout_path, stderr_path = write_job_script_to_temp_file( "ls", tempdir, "job_name", "", None, None)
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/ruffus/test/test_empty_files_decorator.py b/ruffus/test/test_empty_files_decorator.py
index a4e8788..2e2bfe3 100755
--- a/ruffus/test/test_empty_files_decorator.py
+++ b/ruffus/test/test_empty_files_decorator.py
@@ -21,16 +21,9 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, files
 
 
 
@@ -117,7 +110,8 @@ class Test_task(unittest.TestCase):
         save_to_str_logger = t_save_to_str_logger()
         pipeline_run(multiprocess = 10,
                             logger = save_to_str_logger,
-                            verbose = 1)
+                            verbose = 1,
+                     pipeline= "main")
         self.assertTrue("@files() was empty" in save_to_str_logger.warning_str)
         print("\n    Warning printed out correctly", file=sys.stderr)
 
diff --git a/ruffus/test/test_exceptions.py b/ruffus/test/test_exceptions.py
index 6a6ff84..a073bc0 100755
--- a/ruffus/test/test_exceptions.py
+++ b/ruffus/test/test_exceptions.py
@@ -20,10 +20,9 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "parallel", "pipeline_run", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, parallel
+
 
 
 
@@ -51,7 +50,7 @@ except:
 class Test_ruffus(unittest.TestCase):
     def test_ruffus (self):
         try:
-            pipeline_run(multiprocess = 50, verbose = 0)
+            pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         except ruffus.ruffus_exceptions.RethrownJobError:
             return
         raise Exception("Missing exception")
diff --git a/ruffus/test/test_file_name_parameters.py b/ruffus/test/test_file_name_parameters.py
index 2e92528..3ff9536 100755
--- a/ruffus/test/test_file_name_parameters.py
+++ b/ruffus/test/test_file_name_parameters.py
@@ -9,6 +9,8 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.abspath(os.path.abspath(os.path.splitext(__file__)[0]))
+exe_path = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
 import sys
 
 # add grandparent to search path for testing
@@ -19,19 +21,12 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
+
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = list(map(__import__, [ruffus_name]))[0]
-task = ruffus.task
-combine = ruffus.combine
-non_str_sequence = ruffus.file_name_parameters.non_str_sequence
-needs_update_check_modify_time  = ruffus.file_name_parameters.needs_update_check_modify_time
-check_input_files_exist         = ruffus.file_name_parameters.check_input_files_exist
-args_param_factory              = ruffus.file_name_parameters.args_param_factory
-open_job_history                = ruffus.file_name_parameters.open_job_history
-
-#from ruffus.file_name_parameters import *
+from ruffus import task, combine
+from ruffus.file_name_parameters import non_str_sequence
+from ruffus.file_name_parameters import needs_update_check_modify_time, check_input_files_exist, args_param_factory, open_job_history
 
 
 
@@ -58,8 +53,6 @@ except ImportError:
 
 dumps = json.dumps
 
-exe_path = os.path.join(os.path.split(os.path.abspath(sys.argv[0]))[0], "..")
-test_path = os.path.normpath(os.path.join(exe_path, "test", "file_name_parameters"))
 
 
 
@@ -191,24 +184,24 @@ t5 = task.Task(list_generator_factory, "module.func5"); t5.param_generator_func
 next_task_id = 1
 class Test_files_re_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
         self.tasks = [t1, t2, t3, t4, t5]
 
 
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -264,16 +257,16 @@ class Test_files_re_param_factory(unittest.TestCase):
         """
         test combining operator
         """
-        paths = self.files_re(test_path + "/*", r"(.*).test$", combine(r"\1.input"), r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.files_re(tempdir + "/*", r"(.*).test$", combine(r"\1.input"), r"\1.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [
                              (('DIR/f0.input',), 'DIR/f0.output'),
                              (('DIR/f1.input',), 'DIR/f1.output'),
                              (('DIR/f2.input',), 'DIR/f2.output'),
                              ]
             )
-        paths = self.files_re(test_path + "/*", "(.*).test$", combine(r"\1.input"), r"combined.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.files_re(tempdir + "/*", "(.*).test$", combine(r"\1.input"), r"combined.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [(('DIR/f0.input',
                                'DIR/f1.input',
                                'DIR/f2.input'), 'combined.output')])
@@ -286,20 +279,20 @@ class Test_files_re_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        paths = self.files_re(test_path + "/*", "(.*).test$", r"\1.input", r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.files_re(tempdir + "/*", "(.*).test$", r"\1.input", r"\1.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                         [('DIR/f0.input', 'DIR/f0.output'),
                          ('DIR/f1.input', 'DIR/f1.output'),
                          ('DIR/f2.input', 'DIR/f2.output')])
-        self.assertTrue(self.check_input_files_exist(test_path + "/*", "(.*).test$",
+        self.assertTrue(self.check_input_files_exist(tempdir + "/*", "(.*).test$",
                                                         r"\1.test", r"\1.output"))
 
 
         #
         # nested forms
         #
-        paths = self.files_re(test_path + "/*", "(.*).test$", [r"\1.input",2,["something", r"\1"]], r"\1.output", r"\1.extra", 5)
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.files_re(tempdir + "/*", "(.*).test$", [r"\1.input",2,["something", r"\1"]], r"\1.output", r"\1.extra", 5)
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                         [(['DIR/f0.input', 2, ['something', 'DIR/f0']], 'DIR/f0.output', 'DIR/f0.extra', 5),
                          (['DIR/f1.input', 2, ['something', 'DIR/f1']], 'DIR/f1.output', 'DIR/f1.extra', 5),
                          (['DIR/f2.input', 2, ['something', 'DIR/f2']], 'DIR/f2.output', 'DIR/f2.extra', 5)])
@@ -307,8 +300,8 @@ class Test_files_re_param_factory(unittest.TestCase):
         #
         # only output
         #
-        paths = self.files_re(test_path + "/*", ".*/(.*).test$", r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.files_re(tempdir + "/*", ".*/(.*).test$", r"\1.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [('DIR/f0.test', 'f0.output'),
                              ('DIR/f1.test', 'f1.output'),
                              ('DIR/f2.test', 'f2.output')])
@@ -320,12 +313,12 @@ class Test_files_re_param_factory(unittest.TestCase):
         #
         # check simple is up to date
         #
-        self.assertEqual([res[0] for res in self.needs_update_check_modify_time(test_path + "/*",
+        self.assertEqual([res[0] for res in self.needs_update_check_modify_time(tempdir + "/*",
                             "(.*).test$", r"\1.output")], [True, False, False])
         #
         # check complex is up to date
         #
-        self.assertEqual([res[0] for res in self.needs_update_check_modify_time(test_path + "/*",
+        self.assertEqual([res[0] for res in self.needs_update_check_modify_time(tempdir + "/*",
                             "(.*).test$", [1,2,[[r"\1.output",
                                                  r"\1.output"]]])], [True, False, False])
 
@@ -338,7 +331,7 @@ class Test_files_re_param_factory(unittest.TestCase):
         # simple 1 input, 1 output
         #
         paths = self.files_re(file_list, r"(.*).test$", r"\1.input", r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                         [('DIR/f0.input', 'DIR/f0.output'),
                          ('DIR/f1.input', 'DIR/f1.output'),
                          ('DIR/f2.input', 'DIR/f2.output')])
@@ -347,7 +340,7 @@ class Test_files_re_param_factory(unittest.TestCase):
         # nested forms
         #
         paths = self.files_re(file_list, "(.*).test$", [r"\1.input",2,["something", r"\1"]], r"\1.output", r"\1.extra", 5)
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                         [(['DIR/f0.input', 2, ['something', 'DIR/f0']], 'DIR/f0.output', 'DIR/f0.extra', 5),
                          (['DIR/f1.input', 2, ['something', 'DIR/f1']], 'DIR/f1.output', 'DIR/f1.extra', 5),
                          (['DIR/f2.input', 2, ['something', 'DIR/f2']], 'DIR/f2.output', 'DIR/f2.extra', 5)])
@@ -356,7 +349,7 @@ class Test_files_re_param_factory(unittest.TestCase):
         # only output
         #
         paths = self.files_re(file_list, ".*/(.*).test$", r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [('DIR/f0.test', 'f0.output'),
                              ('DIR/f1.test', 'f1.output'),
                              ('DIR/f2.test', 'f2.output')])
@@ -409,24 +402,29 @@ class Test_files_re_param_factory(unittest.TestCase):
 
 class Test_split_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
+        for ii in range(3):
+            touch("%s/%d.test_match1" % (tempdir, ii))
+            touch("%s/%d.test_match2" % (tempdir, ii))
 
         self.tasks = [t1, t2, t3, t4, t5]
 
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
+            os.unlink("%s/%d.test_match1" % (tempdir, i))
+            os.unlink("%s/%d.test_match2" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -466,20 +464,31 @@ class Test_split_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        paths = self.do_task_split(test_path + "/*", [exe_path + "/a*.py", exe_path + "/r*.py"])
-        self.assertEqual(recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR"),
-                        (   ['DIR/f0.output',
-                             'DIR/f0.test',
-                             'DIR/f1.output',
-                             'DIR/f1.test',
-                             'DIR/f2.output',
-                             'DIR/f2.test',
-                             ],
-                            ['DIR/adjacent_pairs_iterate.py',
-                             'DIR/ruffus_exceptions.py',
-                             'DIR/ruffus_utility.py',
-                             'DIR/ruffus_version.py'
-                             ]              ))
+        paths = self.do_task_split(tempdir + "/*", [tempdir + "/*.test_match1", tempdir + "/*.test_match2"])
+        self.assertEqual(recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR"),
+                        (   ['DIR/0.test_match1',
+                            'DIR/0.test_match2',
+                            'DIR/1.test_match1',
+                            'DIR/1.test_match2',
+                            'DIR/2.test_match1',
+                            'DIR/2.test_match2',
+                            'DIR/f0.output',
+                            'DIR/f0.test',
+                            'DIR/f1.output',
+                            'DIR/f1.test',
+                            'DIR/f2.output',
+                            'DIR/f2.test'],
+                            ['DIR/0.test_match1',
+                            'DIR/1.test_match1',
+                            'DIR/2.test_match1',
+                            'DIR/0.test_match2',
+                            'DIR/1.test_match2',
+                            'DIR/2.test_match2']              ))
+
+
+
+
+
     def test_tasks(self):
         """
         test if can use tasks to specify dependencies
@@ -490,52 +499,59 @@ class Test_split_param_factory(unittest.TestCase):
                                                   "module.func3",
                                                   "module.func4",
                                                   "module.func5"),
-                                 test_path + "/*"],
-                                [exe_path + "/a*.py",                   # output params
-                                 exe_path + "/r*.py",
+                                 tempdir + "/*"],
+                                [tempdir + "/*.test_match1",                    # output params
+                                 tempdir + "/*.test_match2",
                                  "extra.file"],
                                 6)                                      # extra params
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR")
         self.assertEqual(paths,
-                        ([
-                         5,
-                         ['output4.test', 'output.ignored'],
-                         'output1.test',
-                         'output2.test',
-                         'output3.test',
-                         'output.ignored',
-                         (2, 'output5.test'),
-                         'DIR/f0.output',
-                         'DIR/f0.test',
-                         'DIR/f1.output',
-                         'DIR/f1.test',
-                         'DIR/f2.output',
-                         'DIR/f2.test'],
-                        ['DIR/adjacent_pairs_iterate.py',
-                         'DIR/ruffus_exceptions.py',
-                         'DIR/ruffus_utility.py',
-                         'DIR/ruffus_version.py',
-                         'extra.file'],
-                        6))
+                        ([  5,
+                            ['output4.test', 'output.ignored'],
+                            'output1.test',
+                            'output2.test',
+                            'output3.test',
+                            'output.ignored',
+                            (2, 'output5.test'),
+                            'DIR/0.test_match1',
+                            'DIR/0.test_match2',
+                            'DIR/1.test_match1',
+                            'DIR/1.test_match2',
+                            'DIR/2.test_match1',
+                            'DIR/2.test_match2',
+                            'DIR/f0.output',
+                            'DIR/f0.test',
+                            'DIR/f1.output',
+                            'DIR/f1.test',
+                            'DIR/f2.output',
+                            'DIR/f2.test'],
+                            ['DIR/0.test_match1',
+                            'DIR/1.test_match1',
+                            'DIR/2.test_match1',
+                            'DIR/0.test_match2',
+                            'DIR/1.test_match2',
+                            'DIR/2.test_match2',
+                            'extra.file'],
+                            6))
 
 
         # single job output consisting of a single file
-        paths = self.do_task_split(task.output_from("module.func2"), exe_path + "/a*.py")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
-        self.assertEqual(paths, ('output.ignored', ['DIR_E/adjacent_pairs_iterate.py']))
+        paths = self.do_task_split(task.output_from("module.func2"), tempdir + "/*.test_match1")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
+        self.assertEqual(paths,  ('output.ignored', ['DIR/0.test_match1', 'DIR/1.test_match1', 'DIR/2.test_match1']))
 
-        paths = self.do_task_split([task.output_from("module.func2")], exe_path + "/a*.py")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
-        self.assertEqual(paths, (['output.ignored'], ['DIR_E/adjacent_pairs_iterate.py']))
+        paths = self.do_task_split([task.output_from("module.func2")], tempdir + "/*.test_match1")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
+        self.assertEqual(paths,  (['output.ignored'], ['DIR/0.test_match1', 'DIR/1.test_match1', 'DIR/2.test_match1']))
 
         # single job output consisting of a list
-        paths = self.do_task_split(task.output_from("module.func4"), exe_path + "/a*.py")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
-        self.assertEqual(paths, ((2, 'output5.test'), ['DIR_E/adjacent_pairs_iterate.py']) )
+        paths = self.do_task_split(task.output_from("module.func4"), tempdir + "/*.test_match1")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
+        self.assertEqual(paths,  ((2, 'output5.test'), ['DIR/0.test_match1', 'DIR/1.test_match1', 'DIR/2.test_match1']) )
 
-        paths = self.do_task_split([task.output_from("module.func4")], exe_path + "/a*.py")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
-        self.assertEqual(paths, ([(2, 'output5.test')], ['DIR_E/adjacent_pairs_iterate.py']))
+        paths = self.do_task_split([task.output_from("module.func4")], tempdir + "/*.test_match1")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
+        self.assertEqual(paths, ([(2, 'output5.test')], ['DIR/0.test_match1', 'DIR/1.test_match1', 'DIR/2.test_match1']))
 
 #=========================================================================================
 
@@ -545,24 +561,24 @@ class Test_split_param_factory(unittest.TestCase):
 
 class Test_merge_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
 
         self.tasks = [t1, t2, t3, t4, t5]
 
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -611,21 +627,21 @@ class Test_merge_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        paths = self.do_task_merge(test_path + "/*",
+        paths = self.do_task_merge(tempdir + "/*",
                                 ["test1",                               # output params
                                  "test2",
                                  "extra.file"])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_result)
 
         #
         #   named parameters
         #
-        paths = self.do_task_merge(input = test_path + "/*",
+        paths = self.do_task_merge(input = tempdir + "/*",
                                 output = ["test1",                               # output params
                                          "test2",
                                          "extra.file"])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_result)
 
 
@@ -639,7 +655,7 @@ class Test_merge_param_factory(unittest.TestCase):
                                                   "module.func3",
                                                   "module.func4",
                                                   "module.func5"),
-                                 test_path + "/*"],
+                                 tempdir + "/*"],
                                 ["test1",                               # output params
                                  "test2",
                                  "extra.file"],
@@ -666,14 +682,14 @@ class Test_merge_param_factory(unittest.TestCase):
 
         # unnamed arguments
         paths = self.do_task_merge(*unnamed_args)
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         # NAMED ARGUMENTS
         paths = self.do_task_merge(input = unnamed_args[0],
                                    output = unnamed_args[1],
                                    extras = unnamed_args[2:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         paths = self.do_task_merge(task.output_from("module.func2"), "output", "extra")
@@ -681,20 +697,20 @@ class Test_merge_param_factory(unittest.TestCase):
 
         # single job output consisting of a single file
         paths = self.do_task_merge(task.output_from("module.func2"), "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, ('output.ignored', 'output'))
 
         paths = self.do_task_merge([task.output_from("module.func2")], "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, (['output.ignored'], 'output'))
 
         # single job output consisting of a list
         paths = self.do_task_merge(task.output_from("module.func4"), "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, ((2, 'output5.test'), 'output'))
 
         paths = self.do_task_merge([task.output_from("module.func4")], "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, ([(2, 'output5.test')], 'output'))
 
 #=========================================================================================
@@ -705,14 +721,14 @@ class Test_merge_param_factory(unittest.TestCase):
 
 class Test_transform_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
 
         self.tasks = [t1, t2, t3, t4, t5]
         self.maxDiff = None
@@ -720,10 +736,10 @@ class Test_transform_param_factory(unittest.TestCase):
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -774,10 +790,10 @@ class Test_transform_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        paths = self.do_task_transform(test_path + "/*.test", task.suffix(".test"),
+        paths = self.do_task_transform(tempdir + "/*.test", task.suffix(".test"),
                                             [".output1", ".output2"], ".output3")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             ('DIR/f0.test', ['DIR/f0.output1', 'DIR/f0.output2'], ".output3"),
@@ -791,7 +807,7 @@ class Test_transform_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        unnamed_args = [test_path + "/*.test",
+        unnamed_args = [tempdir + "/*.test",
                                             task.formatter("/(?P<name>\w+).test$"),
                                             ["{path[0]}/{name[0]}.output1{ext[0]}", "{path[0]}/{name[0]}.output2"], "{path[0]}/{name[0]}.output3"]
         expected_results = [
@@ -800,12 +816,12 @@ class Test_transform_param_factory(unittest.TestCase):
                             ('DIR/f2.test', ['DIR/f2.output1.test', 'DIR/f2.output2'], "DIR/f2.output3"),]
         # unnamed_args                                   ]
         paths = self.do_task_transform(*unnamed_args)
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_transform(input = unnamed_args[0], filter = unnamed_args[1], output = unnamed_args[2], extras=unnamed_args[3:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
     def test_regex(self):
@@ -815,7 +831,7 @@ class Test_transform_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        unnamed_args = [test_path + "/*.test",
+        unnamed_args = [tempdir + "/*.test",
                         task.regex(r"(.*)\.test"),
                                             [r"\1.output1", r"\1.output2"], r"\1.output3"]
         expected_results = [
@@ -826,12 +842,12 @@ class Test_transform_param_factory(unittest.TestCase):
 
         # unnamed_args                                   ]
         paths = self.do_task_transform(*unnamed_args)
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_transform(input = unnamed_args[0], filter = unnamed_args[1], output = unnamed_args[2], extras=unnamed_args[3:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
 
@@ -843,22 +859,22 @@ class Test_transform_param_factory(unittest.TestCase):
         # simple 1 input, 1 output
         #
         #
-        paths = self.do_task_transform(test_path + "/*.test", task.regex(r"(.*)\.test"),
+        paths = self.do_task_transform(tempdir + "/*.test", task.regex(r"(.*)\.test"),
                                             task.inputs(r"\1.testwhat"),
                                             [r"\1.output1", r"\1.output2"])
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             ('DIR/f0.testwhat', ['DIR/f0.output1', 'DIR/f0.output2']),
                             ('DIR/f1.testwhat', ['DIR/f1.output1', 'DIR/f1.output2']),
                             ('DIR/f2.testwhat', ['DIR/f2.output1', 'DIR/f2.output2']),
                                            ])
-        paths = self.do_task_transform(test_path + "/*.test", task.suffix(".test"),
+        paths = self.do_task_transform(tempdir + "/*.test", task.suffix(".test"),
                                             task.inputs(r"a.testwhat"),
                                             [".output1", ".output2"], ".output3")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             ('a.testwhat', ['DIR/f0.output1', 'DIR/f0.output2'], '.output3'),
@@ -868,7 +884,7 @@ class Test_transform_param_factory(unittest.TestCase):
         # add inputs
         #
         #
-        unnamed_args = [test_path + "/*.test", task.regex(r"(.*)\.test"),
+        unnamed_args = [tempdir + "/*.test", task.regex(r"(.*)\.test"),
                                             task.add_inputs(r"\1.testwhat"),
                                             [r"\1.output1", r"\1.output2"]]
         expected_results = [
@@ -879,27 +895,27 @@ class Test_transform_param_factory(unittest.TestCase):
 
         # unnamed_args                                   ]
         paths = self.do_task_transform(*unnamed_args)
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_transform(input = unnamed_args[0], filter = unnamed_args[1], add_inputs = unnamed_args[2], output = unnamed_args[3], extras=unnamed_args[4:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_transform(*unnamed_args[0:2], add_inputs = unnamed_args[2].args, output = unnamed_args[3], extras=unnamed_args[4:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
 
 
 
-        paths = self.do_task_transform(test_path + "/*.test", task.suffix(".test"),
+        paths = self.do_task_transform(tempdir + "/*.test", task.suffix(".test"),
                                             task.add_inputs(r"a.testwhat"),
                                             [".output1", ".output2"], ".output3")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (('DIR/f0.test','a.testwhat'), ['DIR/f0.output1', 'DIR/f0.output2'], '.output3'),
@@ -916,10 +932,10 @@ class Test_transform_param_factory(unittest.TestCase):
                                                           "module.func3",
                                                           "module.func4",
                                                           "module.func5"),
-                                        test_path + "/*.test"],
+                                        tempdir + "/*.test"],
                                         task.regex(r"(.*)\.test"),
                                         [r"\1.output1", r"\1.output2"], r"\1.output3")
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                                 [
                                         ((2, 'output5.test'), ['output5.output1', 'output5.output2'], 'output5.output3'),
@@ -935,24 +951,24 @@ class Test_transform_param_factory(unittest.TestCase):
 
         # single job output consisting of a single file
         paths = self.do_task_transform(task.output_from("module.func2"), task.regex(r"(.*)\..*"),  r"\1.output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [('output.ignored', 'output.output')])
 
 
 
         # Same output if task specified as part of a list of tasks
         paths = self.do_task_transform([task.output_from("module.func2")], task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [('output.ignored', 'output')])
 
         # single job output consisting of a list
         paths = self.do_task_transform(task.output_from("module.func4"), task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [((2, 'output5.test'), 'output')]  )
 
         # Same output if task specified as part of a list of tasks
         paths = self.do_task_transform([task.output_from("module.func4")], task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [((2, 'output5.test'), 'output')]  )
 
 #
@@ -965,31 +981,31 @@ class Test_transform_param_factory(unittest.TestCase):
 
 class Test_collate_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
-        touch("%s/e%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
+        touch("%s/e%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         for i in range(3):
-            touch("%s/e%d.test" % (test_path, i))
+            touch("%s/e%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
-        touch("%s/e%d.output" % (test_path, 1))
-        touch("%s/e%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
+        touch("%s/e%d.output" % (tempdir, 1))
+        touch("%s/e%d.output" % (tempdir, 2))
 
         self.tasks = [t1, t2, t3, t4, t5]
 
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
-            os.unlink("%s/e%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
+            os.unlink("%s/e%d.test" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-            os.unlink("%s/e%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+            os.unlink("%s/e%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -1024,8 +1040,8 @@ class Test_collate_param_factory(unittest.TestCase):
         """
         test regex collate with globs
         """
-        paths = self.do_task_collate(test_path + "/*", task.regex(r"(.*).test$"), r"\1.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.do_task_collate(tempdir + "/*", task.regex(r"(.*).test$"), r"\1.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [
                                 (('DIR/e0.test',), 'DIR/e0.output'),
                                 (('DIR/e1.test',), 'DIR/e1.output'),
@@ -1035,8 +1051,8 @@ class Test_collate_param_factory(unittest.TestCase):
                                 (('DIR/f2.test',), 'DIR/f2.output'),
                              ]
             )
-        paths = self.do_task_collate(test_path + "/*", task.regex("(.*).test$"), task.inputs(r"\1.input2"), r"combined.output")
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"),
+        paths = self.do_task_collate(tempdir + "/*", task.regex("(.*).test$"), task.inputs(r"\1.input2"), r"combined.output")
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"),
                             [((
                                'DIR/e0.input2',
                                'DIR/e1.input2',
@@ -1049,10 +1065,10 @@ class Test_collate_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        paths = self.do_task_collate(test_path + "/*.test", task.regex(r"(.*/[ef]).*\.test"),
+        paths = self.do_task_collate(tempdir + "/*.test", task.regex(r"(.*/[ef]).*\.test"),
                                             [r"\1.output1", r"\1.output2"], r"\1.extra")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (
@@ -1073,10 +1089,10 @@ class Test_collate_param_factory(unittest.TestCase):
         #
         # collating using inputs
         #
-        paths = self.do_task_collate(test_path + "/*.test", task.regex(r"(.*/[ef])(.).*\.test"),
+        paths = self.do_task_collate(tempdir + "/*.test", task.regex(r"(.*/[ef])(.).*\.test"),
                                             task.inputs(r"\1\2.whoopee"),  [r"\1.output1", r"\1.output2"], r"\1.extra")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (
@@ -1093,10 +1109,10 @@ class Test_collate_param_factory(unittest.TestCase):
         #
         # collating using inputs where some files do not match regex
         #
-        paths = self.do_task_collate(test_path + "/*.test", task.regex(r"(.*/f)[a-z0-9]+\.test"),
+        paths = self.do_task_collate(tempdir + "/*.test", task.regex(r"(.*/f)[a-z0-9]+\.test"),
                                             task.inputs(r"\1.whoopee"),  [r"\1.output1", r"\1.output2"], r"\1.extra")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [(('DIR/f.whoopee',), ['DIR/f.output1', 'DIR/f.output2'], 'DIR/f.extra')])
 
@@ -1104,10 +1120,10 @@ class Test_collate_param_factory(unittest.TestCase):
         #
         # collating using inputs where multiple copies of the same input names are removed
         #
-        paths = self.do_task_collate(test_path + "/*.test", task.regex(r"(.*/[ef])[a-z0-9]+\.test"),
+        paths = self.do_task_collate(tempdir + "/*.test", task.regex(r"(.*/[ef])[a-z0-9]+\.test"),
                                             task.inputs(r"\1.whoopee"),  [r"\1.output1", r"\1.output2"], r"\1.extra")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (
@@ -1126,10 +1142,10 @@ class Test_collate_param_factory(unittest.TestCase):
         #   test python set object. Note that set is constructed with the results of the substitution
         #
 
-        unnamed_args = [test_path + "/*.test",
+        unnamed_args = [tempdir + "/*.test",
                         task.regex(r"(.*/[ef])[a-z0-9]+\.test"),
                         task.inputs(r"\1.whoopee"),
-                        set([r"\1.output1", r"\1.output2", test_path + "/e.output2"]),
+                        set([r"\1.output1", r"\1.output2", tempdir + "/e.output2"]),
                         r"\1.extra"]
         expected_results = [
                             (
@@ -1146,22 +1162,22 @@ class Test_collate_param_factory(unittest.TestCase):
 
         # unnamed_args                                   ]
         paths = self.do_task_collate(*unnamed_args)
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_collate(input = unnamed_args[0], filter = unnamed_args[1], replace_inputs = unnamed_args[2], output = unnamed_args[3], extras=unnamed_args[4:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_collate(*unnamed_args[0:2], replace_inputs = unnamed_args[2], output = unnamed_args[3], extras=unnamed_args[4:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
         #named args
         paths = self.do_task_collate(*unnamed_args[0:2], replace_inputs = unnamed_args[2].args[0], output = unnamed_args[3], extras=unnamed_args[4:])
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths, expected_results)
 
 
@@ -1175,10 +1191,10 @@ class Test_collate_param_factory(unittest.TestCase):
                                                           "module.func3",
                                                           "module.func4",
                                                           "module.func5"),
-                                        test_path + "/*.test"],
+                                        tempdir + "/*.test"],
                                         task.regex(r"(.*[oef])[a-z0-9]+\.test"),
                                         [r"\1.output1", r"\1.output2"], r"\1.extra")
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (('DIR/e0.test', 'DIR/e1.test', 'DIR/e2.test'), ['DIR/e.output1', 'DIR/e.output2'], 'DIR/e.extra'),
@@ -1191,11 +1207,11 @@ class Test_collate_param_factory(unittest.TestCase):
                                                           "module.func3",
                                                           "module.func4",
                                                           "module.func5"),
-                                        test_path + "/*.test"],
+                                        tempdir + "/*.test"],
                                         task.regex(r"(.*[oef])[a-z0-9]+\.test"),
                                         task.inputs(r"\1.whoopee"),
                                         [r"\1.output1", r"\1.output2"], r"\1.extra")
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                         [
                             (
@@ -1218,27 +1234,27 @@ class Test_collate_param_factory(unittest.TestCase):
 
         # single job output consisting of a single file
         paths = self.do_task_collate(task.output_from("module.func2"), task.regex(r"(.*)\..*"),  r"\1.output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         #print dumps(paths, indent = 4)
         self.assertEqual(paths, [(('output.ignored',), 'output.output')])
 
 
         # Same output if task specified as part of a list of tasks
         paths = self.do_task_collate([task.output_from("module.func2")], task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [(('output.ignored',), 'output')])
 
         #
         # single job output consisting of a list
         #
         paths = self.do_task_collate(task.output_from("module.func4"), task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [(((2, 'output5.test'),), 'output')])
 
 
         # Same output if task specified as part of a list of tasks
         paths = self.do_task_collate([task.output_from("module.func4")], task.regex(r"(.*)\..*"),  "output")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths, [(((2, 'output5.test'),), 'output')] )
 
 #=========================================================================================
@@ -1249,24 +1265,24 @@ class Test_collate_param_factory(unittest.TestCase):
 
 class Test_files_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/f%d.output" % (test_path, 0))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/f%d.output" % (tempdir, 0))
         for i in range(3):
-            touch("%s/f%d.test" % (test_path, i))
+            touch("%s/f%d.test" % (tempdir, i))
         time.sleep(0.1)
-        touch("%s/f%d.output" % (test_path, 1))
-        touch("%s/f%d.output" % (test_path, 2))
+        touch("%s/f%d.output" % (tempdir, 1))
+        touch("%s/f%d.output" % (tempdir, 2))
 
         self.tasks = [t1, t2, t3, t4, t5]
 
 
     def tearDown(self):
         for i in range(3):
-            os.unlink("%s/f%d.test" % (test_path, i))
+            os.unlink("%s/f%d.test" % (tempdir, i))
         for i in range(3):
-            os.unlink("%s/f%d.output" % (test_path, i))
-        os.removedirs(test_path)
+            os.unlink("%s/f%d.output" % (tempdir, i))
+        os.removedirs(tempdir)
         pass
 
 
@@ -1304,8 +1320,8 @@ class Test_files_param_factory(unittest.TestCase):
         #
         # Replacement of globs in first parameter
         #
-        paths = self.files(test_path + "/*", "a.test", "b.test")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = self.files(tempdir + "/*", "a.test", "b.test")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths,
                                 [
                                     (
@@ -1324,8 +1340,8 @@ class Test_files_param_factory(unittest.TestCase):
         #
         # Replacement of globs in first parameter in-place
         #
-        paths = self.files([test_path + "/*", "robbie.test"], "a.test", "b.test")
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = self.files([tempdir + "/*", "robbie.test"], "a.test", "b.test")
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths,
                             [
                                     (
@@ -1345,8 +1361,8 @@ class Test_files_param_factory(unittest.TestCase):
         #
         # No Replacement of globs in other parameter of multi-job task
         #
-        paths = self.files([[[test_path + "/*", "robbie.test"], "a.test", "b.test"], ["a.test", ["b.test", 2], "a.*"]])
-        paths = recursive_replace(recursive_replace(paths, test_path, "DIR"), exe_path, "DIR_E")
+        paths = self.files([[[tempdir + "/*", "robbie.test"], "a.test", "b.test"], ["a.test", ["b.test", 2], "a.*"]])
+        paths = recursive_replace(recursive_replace(paths, tempdir, "DIR"), exe_path, "DIR_E")
         self.assertEqual(paths,
                                 [
                                     (
@@ -1449,42 +1465,42 @@ class Test_files_param_factory(unittest.TestCase):
 
 class Test_product_param_factory(unittest.TestCase):
     def setUp(self):
-        if not os.path.exists(test_path):
-            os.makedirs(test_path)
-        touch("%s/a.test1" % (test_path))
-        touch("%s/b.test1" % (test_path))
-        touch("%s/c.test2" % (test_path))
-        touch("%s/d.test2" % (test_path))
-        touch("%s/a.testwhat1" % (test_path))
-        touch("%s/b.testwhat1" % (test_path))
-        touch("%s/c.testwhat2" % (test_path))
-        touch("%s/d.testwhat2" % (test_path))
+        if not os.path.exists(tempdir):
+            os.makedirs(tempdir)
+        touch("%s/a.test1" % (tempdir))
+        touch("%s/b.test1" % (tempdir))
+        touch("%s/c.test2" % (tempdir))
+        touch("%s/d.test2" % (tempdir))
+        touch("%s/a.testwhat1" % (tempdir))
+        touch("%s/b.testwhat1" % (tempdir))
+        touch("%s/c.testwhat2" % (tempdir))
+        touch("%s/d.testwhat2" % (tempdir))
         time.sleep(0.1)
-        touch("%s/a.b.output" % (test_path))
-        touch("%s/a.c.output" % (test_path))
-        touch("%s/b.c.output" % (test_path))
-        touch("%s/b.d.output" % (test_path))
+        touch("%s/a.b.output" % (tempdir))
+        touch("%s/a.c.output" % (tempdir))
+        touch("%s/b.c.output" % (tempdir))
+        touch("%s/b.d.output" % (tempdir))
 
         self.tasks = [t1, t2, t3, t4, t5]
         self.maxDiff = None
 
 
     def tearDown(self):
-        os.unlink("%s/a.test1" % (test_path))
-        os.unlink("%s/b.test1" % (test_path))
-        os.unlink("%s/c.test2" % (test_path))
-        os.unlink("%s/d.test2" % (test_path))
-        os.unlink("%s/a.testwhat1" % (test_path))
-        os.unlink("%s/b.testwhat1" % (test_path))
-        os.unlink("%s/c.testwhat2" % (test_path))
-        os.unlink("%s/d.testwhat2" % (test_path))
-        os.unlink("%s/a.b.output" % (test_path))
-        os.unlink("%s/a.c.output" % (test_path))
-        os.unlink("%s/b.c.output" % (test_path))
-        os.unlink("%s/b.d.output" % (test_path))
-
-
-        os.removedirs(test_path)
+        os.unlink("%s/a.test1" % (tempdir))
+        os.unlink("%s/b.test1" % (tempdir))
+        os.unlink("%s/c.test2" % (tempdir))
+        os.unlink("%s/d.test2" % (tempdir))
+        os.unlink("%s/a.testwhat1" % (tempdir))
+        os.unlink("%s/b.testwhat1" % (tempdir))
+        os.unlink("%s/c.testwhat2" % (tempdir))
+        os.unlink("%s/d.testwhat2" % (tempdir))
+        os.unlink("%s/a.b.output" % (tempdir))
+        os.unlink("%s/a.c.output" % (tempdir))
+        os.unlink("%s/b.c.output" % (tempdir))
+        os.unlink("%s/b.d.output" % (tempdir))
+
+
+        os.removedirs(tempdir)
         pass
 
 
@@ -1522,9 +1538,9 @@ class Test_product_param_factory(unittest.TestCase):
         #
         # simple 1 input, 1 output
         #
-        args = [[test_path + "/a.test1", test_path + "/b.test1"],
+        args = [[tempdir + "/a.test1", tempdir + "/b.test1"],
                  task.formatter("(?:.+/)?(?P<ID>\w+)\.(.+)"),
-                [test_path + "/c.test2", test_path + "/d.test2", test_path + "/e.ignore"],
+                [tempdir + "/c.test2", tempdir + "/d.test2", tempdir + "/e.ignore"],
                 task.formatter("(?:.+/)?(?P<ID>\w+)\.(test2)"),
                 r"{path[0][0]}/{ID[0][0]}.{1[1][0]}.output"]
         expected_result =  [
@@ -1534,23 +1550,23 @@ class Test_product_param_factory(unittest.TestCase):
                             (('DIR/b.test1','DIR/d.test2'),'DIR/b.d.output')
                          ]
         paths = self.do_task_product(*args)
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"), expected_result)
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"), expected_result)
 
         # named parameters
         paths = self.do_task_product(input = args[0], filter = args[1],
                                      input2 = args[2], filter2 = args[3],
                                      output = args [4])
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"), expected_result)
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"), expected_result)
 
         # named parameters
         paths = self.do_task_product(*args[0:2],
                                      input2 = args[2], filter2 = args[3],
                                      output = args [4])
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"), expected_result)
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"), expected_result)
 
         paths = self.do_task_product(*args[0:4],
                                      output = args [4])
-        self.assertEqual(recursive_replace(paths, test_path, "DIR"), expected_result)
+        self.assertEqual(recursive_replace(paths, tempdir, "DIR"), expected_result)
 
 
     def test_inputs(self):
@@ -1561,11 +1577,11 @@ class Test_product_param_factory(unittest.TestCase):
         # (replace) inputs
         #
         #
-        paths = self.do_task_product([test_path + "/a.test1", test_path + "/b.test1"],                          task.formatter("(?:.+/)?(?P<ID>\w+)\.(.+)"),
-                                     [test_path + "/c.test2", test_path + "/d.test2", test_path + "/e.ignore"], task.formatter("(?:.+/)?(?P<ID>\w+)\.(test2)"),
+        paths = self.do_task_product([tempdir + "/a.test1", tempdir + "/b.test1"],                          task.formatter("(?:.+/)?(?P<ID>\w+)\.(.+)"),
+                                     [tempdir + "/c.test2", tempdir + "/d.test2", tempdir + "/e.ignore"], task.formatter("(?:.+/)?(?P<ID>\w+)\.(test2)"),
                                      task.inputs(("{path[0][0]}/{basename[0][0]}.testwhat1", "{path[1][0]}/{basename[1][0]}.testwhat2") ),
                                      r"{path[0][0]}/{ID[0][0]}.{1[1][0]}.output")
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                          [
                             (('DIR/a.testwhat1','DIR/c.testwhat2'),'DIR/a.c.output'),
@@ -1578,12 +1594,12 @@ class Test_product_param_factory(unittest.TestCase):
         # add inputs
         #
         #
-        paths = self.do_task_product([test_path + "/a.test1", test_path + "/b.test1"],                          task.formatter("(?:.+/)?(?P<ID>\w+)\.(.+)"),
-                                     [test_path + "/c.test2", test_path + "/d.test2", test_path + "/e.ignore"], task.formatter("(?:.+/)?(?P<ID>\w+)\.(test2)"),
+        paths = self.do_task_product([tempdir + "/a.test1", tempdir + "/b.test1"],                          task.formatter("(?:.+/)?(?P<ID>\w+)\.(.+)"),
+                                     [tempdir + "/c.test2", tempdir + "/d.test2", tempdir + "/e.ignore"], task.formatter("(?:.+/)?(?P<ID>\w+)\.(test2)"),
                                      task.add_inputs("{path[0][0]}/{basename[0][0]}.testwhat1", "{path[1][0]}/{basename[1][0]}.testwhat2", ),
                                      r"{path[0][0]}/{ID[0][0]}.{1[1][0]}.output")
 
-        paths = recursive_replace(paths, test_path, "DIR")
+        paths = recursive_replace(paths, tempdir, "DIR")
         self.assertEqual(paths,
                          [
                             ((('DIR/a.test1','DIR/c.test2'), 'DIR/a.testwhat1','DIR/c.testwhat2'),'DIR/a.c.output'),
diff --git a/ruffus/test/test_files_decorator.py b/ruffus/test/test_files_decorator.py
index 2edbee1..b85bcec 100755
--- a/ruffus/test/test_files_decorator.py
+++ b/ruffus/test/test_files_decorator.py
@@ -10,6 +10,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,27 +20,10 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
+import ruffus
+from ruffus import transform, follows, pipeline_run, Pipeline, regex, mkdir, files
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-try:
-    attrlist = ruffus.file_name_parameters.__all__
-except AttributeError:
-    attrlist = dir (ruffus.file_name_parameters)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.file_name_parameters, attr)
-
+#from ruffus.file_name_parameters import
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -102,7 +86,7 @@ def test_job_io(infiles, outfiles, extra_params):
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "tempdir/"
+
 @follows(mkdir(tempdir))
 #
 #    task1
@@ -176,7 +160,7 @@ class Test_task(unittest.TestCase):
 
 
     def test_task (self):
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
 
     def test_newstyle_task (self):
         """
diff --git a/ruffus/test/test_filesre_combine.py b/ruffus/test/test_filesre_combine.py
index 2d938fc..1183df3 100755
--- a/ruffus/test/test_filesre_combine.py
+++ b/ruffus/test/test_filesre_combine.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,19 +20,12 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, follows, merge, posttask, split, collate, mkdir, regex, files_re, combine
 
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import CHECKSUM_FILE_TIMESTAMPS
+from ruffus.combinatorics import *
 
 
 
@@ -75,7 +69,6 @@ species_list["reptiles"].append("crocodile" )
 species_list["fish"   ].append("pufferfish")
 
 
-tempdir = "temp_filesre_combine/"
 def do_write(file_name, what):
     with open(file_name, "a") as oo:
         oo.write(what)
@@ -166,7 +159,7 @@ class Test_ruffus(unittest.TestCase):
 
     def test_ruffus (self):
         ""
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
         check_species_correct()
 
 
diff --git a/ruffus/test/test_filesre_split_and_combine.py b/ruffus/test/test_filesre_split_and_combine.py
index 6c074b2..08c11d2 100755
--- a/ruffus/test/test_filesre_split_and_combine.py
+++ b/ruffus/test/test_filesre_split_and_combine.py
@@ -20,6 +20,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -30,17 +31,12 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, follows, merge, posttask, split, collate, mkdir, regex, files, files_re, combine
+
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import CHECKSUM_FILE_TIMESTAMPS
+from ruffus.combinatorics import *
 
 
 
@@ -52,7 +48,6 @@ for attr in attrlist:
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
 JOBS_PER_TASK = 50
-tempdir = "temp_filesre_split_and_combine/"
 verbose_output = sys.stderr
 
 
@@ -183,7 +178,7 @@ class Test_ruffus(unittest.TestCase):
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 100, verbose = 0)
+        pipeline_run(multiprocess = 100, verbose = 0, pipeline= "main")
 
 
 if __name__ == '__main__':
diff --git a/ruffus/test/test_follows_mkdir.py b/ruffus/test/test_follows_mkdir.py
index 98be13c..3c9a58a 100755
--- a/ruffus/test/test_follows_mkdir.py
+++ b/ruffus/test/test_follows_mkdir.py
@@ -6,6 +6,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -15,20 +16,8 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
+import ruffus
+from ruffus import follows, pipeline_run, Pipeline, mkdir
 
 
 
@@ -39,8 +28,8 @@ for attr in attrlist:
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-directories = [os.path.abspath('a'), 'b']
- at follows(mkdir(directories), mkdir('c'), mkdir('d', 'e'), mkdir('e'))
+directories = [os.path.abspath(tempdir +'a'), tempdir + 'b']
+ at follows(mkdir(tempdir), mkdir(directories), mkdir(tempdir +'c'), mkdir(tempdir +'d', tempdir +'e'), mkdir(tempdir +'e'))
 def task_which_makes_directories ():
     pass
 
@@ -59,24 +48,25 @@ class Test_task_mkdir(unittest.TestCase):
         delete directories
         """
         for d in 'abcde':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             os.rmdir(fullpath)
+        os.rmdir(tempdir)
 
 
     def test_mkdir (self):
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
 
         for d in 'abcde':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             self.assertTrue(os.path.exists(fullpath))
 
     def test_newstyle_mkdir (self):
         test_pipeline = Pipeline("test")
-        test_pipeline.follows(task_which_makes_directories, mkdir(directories), mkdir('c'), mkdir('d', 'e'), mkdir('e'))
+        test_pipeline.follows(task_which_makes_directories, mkdir(directories), mkdir(tempdir + 'c'), mkdir(tempdir + 'd', tempdir + 'e'), mkdir(tempdir + 'e'))
         test_pipeline.run(multiprocess = 10, verbose = 0)
 
         for d in 'abcde':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             self.assertTrue(os.path.exists(fullpath))
 
 
diff --git a/ruffus/test/test_graphviz.py b/ruffus/test/test_graphviz.py
index 036495b..f296bd8 100755
--- a/ruffus/test/test_graphviz.py
+++ b/ruffus/test/test_graphviz.py
@@ -7,6 +7,7 @@ from __future__ import print_function
 import unittest
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -17,19 +18,10 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-JobSignalledBreak = ruffus.ruffus_exceptions.JobSignalledBreak
+import ruffus
+from ruffus import transform, graphviz, check_if_uptodate, follows, Pipeline, pipeline_run, pipeline_printout, pipeline_printout_graph
+from ruffus.ruffus_exceptions import JobSignalledBreak
 
 
 
@@ -182,7 +174,7 @@ class Test_graphviz(unittest.TestCase):
                                         # use flowchart file name extension to decide flowchart format
                                         #   e.g. svg, jpg etc.
                                         "dot",
-                                        [Final_target, Up_to_date_final_target])
+                                        [Final_target, Up_to_date_final_target], pipeline= "main")
         self.assertTrue('[URL="http://cnn.com", color="#FF0000", fillcolor="#FFCCCC", fontcolor="#4B6000", height=1.5, label=<What is this?<BR/> What <FONT COLOR="red">is</FONT>this???>, pencolor="#FF0000", peripheries=5, shape=component, style=dashed]' in s.getvalue().decode())
 
 
diff --git a/ruffus/test/test_inputs_with_multiple_args_raising_exception.py b/ruffus/test/test_inputs_with_multiple_args_raising_exception.py
index 35214ac..ed1de0d 100755
--- a/ruffus/test/test_inputs_with_multiple_args_raising_exception.py
+++ b/ruffus/test/test_inputs_with_multiple_args_raising_exception.py
@@ -10,6 +10,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -22,18 +23,8 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
-
+import ruffus
+from ruffus import transform, Pipeline, pipeline_run, regex, inputs, mkdir
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -46,14 +37,6 @@ for attr in attrlist:
 import unittest
 
 import json
-## use simplejson in place of json for python < 2.6
-#try:
-#    import json
-#except ImportError:
-#    import simplejson
-#    json = simplejson
-
-
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -63,7 +46,7 @@ import json
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 try:
-    @transform(None, regex("b"), inputs("a", "b"), "task_1.output")
+    @transform(None, regex(tempdir + "b"), inputs(tempdir + "a", tempdir + "b"), "task_1.output")
     def task_1 (i, o):
         for f in o:
             open(f, 'w')
@@ -92,7 +75,7 @@ class Test_task_mkdir(unittest.TestCase):
 
     def test_no_re_match (self):
         try:
-            pipeline_run(multiprocess = 10, verbose = 0)
+            pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
         except:
             return
         raise Exception("Inputs(...) with multiple arguments should have thrown an exception")
@@ -102,8 +85,8 @@ class Test_task_mkdir(unittest.TestCase):
             test_pipeline = Pipeline("test")
             test_pipeline.transform(task_func = task_2,
                                     input = None,
-                                    filter = regex("b"),
-                                    replace_inputs = inputs("a", "b"),
+                                    filter = regex(tempdir + "b"),
+                                    replace_inputs = inputs(tempdir + "a", tempdir + "b"),
                                     output = "task_1.output")
             test_pipeline.run(multiprocess = 10, verbose = 0)
         except ruffus.ruffus_exceptions.error_task_transform_inputs_multiple_args:
diff --git a/ruffus/test/test_job_completion_checksums.py b/ruffus/test/test_job_completion_checksums.py
index ca96b84..85b7fd7 100755
--- a/ruffus/test/test_job_completion_checksums.py
+++ b/ruffus/test/test_job_completion_checksums.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -21,17 +22,12 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-get_default_history_file_name =  ruffus.task.get_default_history_file_name
-RUFFUS_HISTORY_FILE           = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS      = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
-CHECKSUM_HISTORY_TIMESTAMPS   = ruffus.ruffus_utility.CHECKSUM_HISTORY_TIMESTAMPS
-CHECKSUM_FUNCTIONS            = ruffus.ruffus_utility.CHECKSUM_FUNCTIONS
-CHECKSUM_FUNCTIONS_AND_PARAMS = ruffus.ruffus_utility.CHECKSUM_FUNCTIONS_AND_PARAMS
-RethrownJobError = ruffus.ruffus_exceptions.RethrownJobError
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, suffix, transform, split, merge, dbdict, Pipeline
+
+from ruffus.task import get_default_history_file_name
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS, CHECKSUM_HISTORY_TIMESTAMPS, CHECKSUM_FUNCTIONS, CHECKSUM_FUNCTIONS_AND_PARAMS
+from ruffus.ruffus_exceptions import RethrownJobError
 
 
 
@@ -54,12 +50,12 @@ import re
 
 
 possible_chksms = list(range(CHECKSUM_FUNCTIONS_AND_PARAMS + 1))
-workdir = 'tmp_test_job_completion/'
-input_file = os.path.join(workdir, 'input.txt')
+tempdir = 'tmp_test_job_completion/'
+input_file = os.path.join(tempdir, 'input.txt')
 transform1_out = input_file.replace('.txt', '.output')
-split1_outputs = [ os.path.join(workdir, 'split.out1.txt'),
-                   os.path.join(workdir, 'split.out2.txt')]
-merge2_output =  os.path.join(workdir, 'merged.out')
+split1_outputs = [ os.path.join(tempdir, 'split.out1.txt'),
+                   os.path.join(tempdir, 'split.out2.txt')]
+merge2_output =  os.path.join(tempdir, 'merged.out')
 
 runtime_data = []
 
@@ -100,14 +96,14 @@ def merge2(in_names, out_name):
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), get_default_history_file_name()))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), get_default_history_file_name()))
 
 count_pipelines = 0
 
 class TestJobCompletion(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
 
@@ -152,7 +148,7 @@ class TestJobCompletion(unittest.TestCase):
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             self.assertTrue(re.search(r'Job needs update:.*Missing file.*\[tmp_test_job_completion/input.output\]'
                                       , s.getvalue(), re.DOTALL))
 
@@ -170,7 +166,7 @@ class TestJobCompletion(unittest.TestCase):
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             self.assertIn('Job needs update:', s.getvalue())
             if chksm == CHECKSUM_FILE_TIMESTAMPS:
                 self.assertIn('Input files:', s.getvalue())
@@ -190,7 +186,7 @@ class TestJobCompletion(unittest.TestCase):
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm == CHECKSUM_FILE_TIMESTAMPS:
                 #self.assertIn('Job up-to-date', s.getvalue())
                 pass
@@ -205,11 +201,11 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             #self.assertIn('Job up-to-date', s.getvalue())
             pass
 
@@ -219,7 +215,7 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         if sys.hexversion >= 0x03000000:
             transform1.__code__ = split1.__code__  # simulate source change
         else:
@@ -227,7 +223,7 @@ class TestJobCompletion(unittest.TestCase):
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_FUNCTIONS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('Pipeline function has changed',
@@ -242,7 +238,7 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         # simulate source change
         if sys.hexversion >= 0x03000000:
             split1.__code__, transform1.__code__ = transform1.__code__, split1.__code__
@@ -251,7 +247,7 @@ class TestJobCompletion(unittest.TestCase):
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_FUNCTIONS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('Pipeline function has changed',
@@ -272,12 +268,12 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([transform1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         runtime_data.append('different')  # simulate change to config file
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_FUNCTIONS_AND_PARAMS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('Pipeline parameters have changed',
@@ -292,14 +288,14 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         time.sleep(.5)
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm, pipeline= "main")
             self.assertIn('Job needs update:', s.getvalue())
 
         # all outputs incorrectly generated
@@ -312,7 +308,7 @@ class TestJobCompletion(unittest.TestCase):
                 outfile.write('testme')
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_HISTORY_TIMESTAMPS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('left over from a failed run?',
@@ -325,13 +321,13 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         job_history = dbdict.open(get_default_history_file_name(), picklevalues=True)
         del job_history[os.path.relpath(split1_outputs[0])]
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [split1], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_HISTORY_TIMESTAMPS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('left over from a failed run?',
@@ -346,13 +342,13 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([split1], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         job_history = dbdict.open(get_default_history_file_name(), picklevalues=True)
         del job_history[os.path.relpath(split1_outputs[0])]
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [merge2], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [merge2], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_HISTORY_TIMESTAMPS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('left over from a failed run?', s.getvalue())
@@ -364,10 +360,10 @@ class TestJobCompletion(unittest.TestCase):
         cleanup_tmpdir()
         with open(input_file, 'w') as outfile:
             outfile.write('testme')
-        pipeline_run([merge2], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
+        pipeline_run([merge2], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [merge2], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [merge2], verbose=6, checksum_level=chksm, pipeline= "main")
             #self.assertIn('Job up-to-date', s.getvalue())
             self.assertNotIn('Job needs update:', s.getvalue())
             self.assertNotIn('left over from a failed run?', s.getvalue())
@@ -532,11 +528,11 @@ class TestJobCompletion(unittest.TestCase):
         time.sleep(.5)
         del runtime_data[:]
         with self.assertRaises(RethrownJobError):  # poo. Shouldn't this be RuntimeError?
-            pipeline_run([transform_raise_error], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS) # generates output then fails
+            pipeline_run([transform_raise_error], verbose=0, checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main") # generates output then fails
 
         for chksm in possible_chksms:
             s = StringIO()
-            pipeline_printout(s, [transform_raise_error], verbose=6, checksum_level=chksm)
+            pipeline_printout(s, [transform_raise_error], verbose=6, checksum_level=chksm, pipeline= "main")
             if chksm >= CHECKSUM_HISTORY_TIMESTAMPS:
                 self.assertIn('Job needs update:', s.getvalue())
                 self.assertIn('left over from a failed run?',
@@ -657,20 +653,20 @@ class TestJobCompletion(unittest.TestCase):
             self.assertNotIn('left over from a failed run?', s.getvalue())
 
     def tearDown(self):
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
         pass
 
 if __name__ == '__main__':
     unittest.main()
 
 #        try:
-#            os.mkdir(workdir)
+#            os.mkdir(tempdir)
 #        except OSError:
 #            pass
-#        #os.system('rm %s/*' % workdir)
+#        #os.system('rm %s/*' % tempdir)
 #        #open(input_file, 'w').close()
 #        s = StringIO()
-#        pipeline_run([transform1], checksum_level=CHECKSUM_HISTORY_TIMESTAMPS)
-#        pipeline_printout(s, [transform1], verbose=6, checksum_level=0)
+#        pipeline_run([transform1], checksum_level=CHECKSUM_HISTORY_TIMESTAMPS, pipeline= "main")
+#        pipeline_printout(s, [transform1], verbose=6, checksum_level=0, pipeline= "main")
 #        print s.getvalue()
 #        #open(transform1_out)  # raise an exception if test fails
diff --git a/ruffus/test/test_job_history_with_exceptions.py b/ruffus/test/test_job_history_with_exceptions.py
index f7b358f..055fa98 100755
--- a/ruffus/test/test_job_history_with_exceptions.py
+++ b/ruffus/test/test_job_history_with_exceptions.py
@@ -8,12 +8,12 @@ from __future__ import print_function
 
 """
 
-workdir = 'tmp_test_job_history_with_exceptions'
 #sub-1s resolution in system?
 one_second_per_job = None
 throw_exception = False
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -24,16 +24,11 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "originate", "collate", "formatter", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError =  ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE           = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS      = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
-get_default_history_file_name = ruffus.ruffus_utility.get_default_history_file_name
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, collate, mkdir, regex, suffix, formatter, originate, transform
+
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import CHECKSUM_FILE_TIMESTAMPS, RUFFUS_HISTORY_FILE, get_default_history_file_name
 
 
 
@@ -58,7 +53,7 @@ import re
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate([workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+ at originate([tempdir +  prefix + "_name.tmp1" for prefix in "abcd"])
 def generate_initial_files1(on):
     with open(on, 'w') as outfile:
         pass
@@ -67,7 +62,7 @@ def generate_initial_files1(on):
 #
 #   generate_initial_files2
 #___________________________________________________________________________
- at originate([workdir +  "/e_name.tmp1", workdir +  "/f_name.tmp1"])
+ at originate([tempdir +  "e_name.tmp1", tempdir +  "f_name.tmp1"])
 def generate_initial_files2(on):
     with open(on, 'w') as outfile:
         pass
@@ -76,7 +71,7 @@ def generate_initial_files2(on):
 #
 #   generate_initial_files3
 #___________________________________________________________________________
- at originate([workdir +  "/g_name.tmp1", workdir +  "/h_name.tmp1"])
+ at originate([tempdir +  "/g_name.tmp1", tempdir +  "/h_name.tmp1"])
 def generate_initial_files3(on):
     with open(on, 'w') as outfile:
         pass
@@ -85,7 +80,7 @@ def generate_initial_files3(on):
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate(workdir +  "/i_name.tmp1")
+ at originate(tempdir +  "i_name.tmp1")
 def generate_initial_files4(on):
     with open(on, 'w') as outfile:
         pass
@@ -138,7 +133,7 @@ def test_task4( infile, outfile):
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 
 VERBOSITY = 5
@@ -148,7 +143,7 @@ cnt_pipelines = 0
 class Test_job_history_with_exceptions(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
 
@@ -159,7 +154,7 @@ class Test_job_history_with_exceptions(unittest.TestCase):
     def test_job_history_with_exceptions(self):
         cleanup_tmpdir()
         s = StringIO()
-        pipeline_printout(s, [test_task4], verbose=VERBOSITY, wrap_width = 10000)
+        pipeline_printout(s, [test_task4], verbose=VERBOSITY, wrap_width = 10000, pipeline= "main")
         #print s.getvalue()
 
 
@@ -170,16 +165,16 @@ class Test_job_history_with_exceptions(unittest.TestCase):
         test_pipeline = Pipeline("test %d" % cnt_pipelines)
 
         test_pipeline.originate(task_func   = generate_initial_files1,
-                                output      = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+                                output      = [tempdir + prefix + "_name.tmp1" for prefix in "abcd"])
 
         test_pipeline.originate(task_func   = generate_initial_files2,
-                                output      = [workdir +  "/e_name.tmp1", workdir +  "/f_name.tmp1"])
+                                output      = [tempdir +  "e_name.tmp1", tempdir +  "f_name.tmp1"])
 
         test_pipeline.originate(task_func   = generate_initial_files3,
-                                output      = [workdir +  "/g_name.tmp1", workdir +  "/h_name.tmp1"])
+                                output      = [tempdir +  "g_name.tmp1", tempdir +  "h_name.tmp1"])
 
         test_pipeline.originate(task_func   = generate_initial_files4,
-                                output      = workdir +  "/i_name.tmp1")
+                                output      = tempdir +  "i_name.tmp1")
 
         test_pipeline.collate(  task_func   = test_task2,
                                 input       = [generate_initial_files1,
@@ -208,11 +203,11 @@ class Test_job_history_with_exceptions(unittest.TestCase):
             try:
                 pipeline_run([test_task4], verbose = 0,
                              #multithread = 2,
-                             one_second_per_job = one_second_per_job)
+                             one_second_per_job = one_second_per_job, pipeline= "main")
             except:
                 pass
             s = StringIO()
-            pipeline_printout(s, [test_task4], verbose=VERBOSITY, wrap_width = 10000)
+            pipeline_printout(s, [test_task4], verbose=VERBOSITY, wrap_width = 10000, pipeline= "main")
             #
             # task 2 should be up to date because exception was throw in task 3
             #
@@ -314,19 +309,19 @@ class Test_job_history_with_exceptions(unittest.TestCase):
         pipeline_run([test_task4], verbose = 0,
                      checksum_level = CHECKSUM_FILE_TIMESTAMPS,
                      multithread = 10,
-                     one_second_per_job = one_second_per_job)
+                     one_second_per_job = one_second_per_job, pipeline= "main")
 
         #
         #   print "printout without sqlite"
         #
         s = StringIO()
-        pipeline_printout(s, [test_task4], checksum_level = CHECKSUM_FILE_TIMESTAMPS)
+        pipeline_printout(s, [test_task4], checksum_level = CHECKSUM_FILE_TIMESTAMPS, pipeline= "main")
         self.assertTrue(not re.search('Tasks which will be run:.*\n(.*\n)*Task = ', s.getvalue()))
         #
         # print "printout expecting sqlite file"
         #
         s = StringIO()
-        pipeline_printout(s, [test_task4])
+        pipeline_printout(s, [test_task4], pipeline= "main")
         self.assertTrue(re.search('Tasks which will be run:.*\n(.*\n)*Task = ', s.getvalue()))
         #
         #   print "Regenerate sqlite file"
@@ -337,12 +332,12 @@ class Test_job_history_with_exceptions(unittest.TestCase):
                      multithread = 1,
                      verbose = 0,
                      touch_files_only = 2,
-                     one_second_per_job = one_second_per_job)
+                     one_second_per_job = one_second_per_job, pipeline= "main")
         #
         # print "printout expecting sqlite file"
         #
         s = StringIO()
-        pipeline_printout(s, [test_task4], verbose = VERBOSITY)
+        pipeline_printout(s, [test_task4], verbose = VERBOSITY, pipeline= "main")
         succeed = not re.search('Tasks which will be run:.*\n(.*\n)*Task = ', s.getvalue())
         if not succeed:
             print(s.getvalue(), file=sys.stderr)
@@ -355,7 +350,7 @@ class Test_job_history_with_exceptions(unittest.TestCase):
     #   cleanup
     #___________________________________________________________________________
     def tearDown(self):
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
         pass
 
 
@@ -365,5 +360,5 @@ class Test_job_history_with_exceptions(unittest.TestCase):
 #       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
 #
 if __name__ == '__main__':
-    #pipeline_printout(sys.stdout, [test_product_task], verbose = VERBOSITY)
+    #pipeline_printout(sys.stdout, [test_product_task], verbose = VERBOSITY, pipeline= "main")
     unittest.main()
diff --git a/ruffus/test/test_mkdir.py b/ruffus/test/test_mkdir.py
index 204bdbb..293ee46 100755
--- a/ruffus/test/test_mkdir.py
+++ b/ruffus/test/test_mkdir.py
@@ -7,7 +7,8 @@ from __future__ import print_function
 
 """
 
-workdir = 'tmp_test_mkdir'
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 
 
 import os
@@ -21,16 +22,8 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-for attr in "pipeline_run", "pipeline_printout", "transform", "split", "mkdir", "formatter", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError = ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE      = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
+from ruffus import pipeline_run, pipeline_printout, transform, split, mkdir, formatter, Pipeline
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 #
@@ -52,7 +45,7 @@ import time
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at split(1, [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+ at split(1, [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
 def generate_initial_files1(in_name, out_names):
     for on in out_names:
         with open(on, 'w') as outfile:
@@ -62,8 +55,8 @@ def generate_initial_files1(in_name, out_names):
 #
 #   test_product_task
 #___________________________________________________________________________
- at mkdir(workdir + "/test1")
- at mkdir(workdir + "/test2")
+ at mkdir(tempdir + "/test1")
+ at mkdir(tempdir + "/test2")
 @mkdir(generate_initial_files1, formatter(),
             ["{path[0]}/{basename[0]}.dir", 3, "{path[0]}/{basename[0]}.dir2"])
 @transform( generate_initial_files1,
@@ -73,7 +66,7 @@ def test_transform( infiles, outfile):
     with open(outfile, "w") as p: pass
 
 
- at mkdir(workdir + "/test3")
+ at mkdir(tempdir + "/test3")
 @mkdir(generate_initial_files1, formatter(),
             "{path[0]}/{basename[0]}.dir2")
 def test_transform2():
@@ -82,13 +75,13 @@ def test_transform2():
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 
 class Testmkdir(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
 
@@ -101,7 +94,7 @@ class Testmkdir(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_transform, test_transform2], verbose=5, wrap_width = 10000)
+        pipeline_printout(s, [test_transform, test_transform2], verbose=5, wrap_width = 10000, pipeline= "main")
         #self.assertIn('Job needs update: Missing files '
         #              '[tmp_test_mkdir/a_name.tmp1, '
         #              'tmp_test_mkdir/e_name.tmp1, '
@@ -112,7 +105,7 @@ class Testmkdir(unittest.TestCase):
         """Run mkdir"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_transform, test_transform2], verbose=0, multiprocess = 2)
+        pipeline_run([test_transform, test_transform2], verbose=0, multiprocess = 2, pipeline= "main")
 
 
     def test_newstyle_mkdir_run(self):
@@ -120,22 +113,22 @@ class Testmkdir(unittest.TestCase):
 
         test_pipeline.split(task_func = generate_initial_files1,
                             input = 1,
-                            output = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+                            output = [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
 
         test_pipeline.transform( task_func = test_transform,
                                  input     = generate_initial_files1,
                                  filter    = formatter(),
                                  output    = "{path[0]}/{basename[0]}.dir/{basename[0]}.tmp2")\
-            .mkdir(workdir + "/test1")\
-            .mkdir(workdir + "/test2")\
+            .mkdir(tempdir + "/test1")\
+            .mkdir(tempdir + "/test2")\
             .mkdir(generate_initial_files1, formatter(),
                         ["{path[0]}/{basename[0]}.dir", 3, "{path[0]}/{basename[0]}.dir2"])
 
-        test_pipeline.mkdir(test_transform2, workdir + "/test3")\
+        test_pipeline.mkdir(test_transform2, tempdir + "/test3")\
             .mkdir(generate_initial_files1, formatter(),
                     "{path[0]}/{basename[0]}.dir2")
         cleanup_tmpdir()
-        pipeline_run([test_transform, test_transform2], verbose=0, multiprocess = 2)
+        pipeline_run([test_transform, test_transform2], verbose=0, multiprocess = 2, pipeline= "main")
 
 
 
@@ -145,7 +138,7 @@ class Testmkdir(unittest.TestCase):
     #   cleanup
     #___________________________________________________________________________
     def tearDown(self):
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
 
 
 
diff --git a/ruffus/test/test_newstyle_combinatorics.py b/ruffus/test/test_newstyle_combinatorics.py
index c09ab31..018c1a9 100755
--- a/ruffus/test/test_newstyle_combinatorics.py
+++ b/ruffus/test/test_newstyle_combinatorics.py
@@ -8,8 +8,8 @@ from __future__ import print_function
 
 """
 
-
-workdir = 'tmp_test_combinatorics'
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 #sub-1s resolution in system?
 one_second_per_job = None
 
@@ -24,23 +24,9 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.combinatorics.__all__
-except AttributeError:
-    attrlist = dir (ruffus.combinatorics)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.combinatorics, attr)
-
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "Pipeline", "formatter", "output_from":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError = ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE      = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, formatter, output_from
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE
 
 
 import unittest
@@ -261,20 +247,20 @@ def test_combinations_with_replacement3_merged_task( infiles, outfile):
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 
 test_pipeline1 = Pipeline("test1")
 test_pipeline2 = Pipeline("test2")
 gen_task1 = test_pipeline1.originate(task_func  = generate_initial_files1,
                                      name       = "WOWWWEEE",
-                                     output     = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
+                                     output     = [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"])
 test_pipeline1.originate(            task_func  = generate_initial_files2,
-                                     output     = [workdir +  "/e_name.tmp1", workdir +  "/f_name.tmp1"])
+                                     output     = [tempdir +  "/e_name.tmp1", tempdir +  "/f_name.tmp1"])
 test_pipeline1.originate(            task_func  = generate_initial_files3,
-                                     output     = [workdir +  "/g_name.tmp1", workdir +  "/h_name.tmp1"])
+                                     output     = [tempdir +  "/g_name.tmp1", tempdir +  "/h_name.tmp1"])
 test_pipeline1.product(              task_func  = test_product_task,
-                                     input      = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"],
+                                     input      = [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcd"],
                                      filter     = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
                                      input2     = generate_initial_files2,
                                      filter2    = formatter(),
@@ -286,7 +272,7 @@ test_pipeline1.product(              task_func  = test_product_task,
                                                     "{subdir[0][0][0]}"]).follows("WOWWWEEE").follows(gen_task1).follows(generate_initial_files1).follows("generate_initial_files1")
 test_pipeline1.merge(               task_func   = test_product_merged_task,
                                     input       = test_product_task,
-                                    output      = workdir +  "/merged.results")
+                                    output      = tempdir +  "/merged.results")
 test_pipeline1.product(             task_func   = test_product_misspelt_capture_error_task,
                                     input       = gen_task1,
                                     filter      = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
@@ -311,7 +297,7 @@ test_pipeline1.combinations(task_func   = test_combinations2_task,
                                            "{subdir[0][0][0]}"])
 test_pipeline1.merge(task_func  = test_combinations2_merged_task,
                      input      = test_combinations2_task,
-                     output     = workdir +  "/merged.results")
+                     output     = tempdir +  "/merged.results")
 test_pipeline1.combinations(    task_func   = test_combinations3_task,
                                 input       = output_from("WOWWWEEE"),
                                 filter      = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
@@ -321,7 +307,7 @@ test_pipeline1.combinations(    task_func   = test_combinations3_task,
                                                "{subpath[0][0][0]}",      # extra: path for 2nd input, 1st file
                                                "{subdir[0][0][0]}"])
 
-test_pipeline1.merge(test_combinations3_merged_task, test_combinations3_task, workdir +  "/merged.results")
+test_pipeline1.merge(test_combinations3_merged_task, test_combinations3_task, tempdir +  "/merged.results")
 test_pipeline1.permutations(task_func   = test_permutations2_task,
                             input       = output_from("WOWWWEEE"),
                             filter      = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
@@ -331,7 +317,7 @@ test_pipeline1.permutations(task_func   = test_permutations2_task,
                                           "{subpath[0][0][0]}",      # extra: path for 2nd input, 1st file
                                           "{subdir[0][0][0]}"])
 
-test_pipeline2.merge(test_permutations2_merged_task, test_permutations2_task, workdir +  "/merged.results")
+test_pipeline2.merge(test_permutations2_merged_task, test_permutations2_task, tempdir +  "/merged.results")
 
 
 
@@ -343,7 +329,7 @@ test_pipeline2.permutations(task_func   = test_permutations3_task,
                             extras      = ["{basename[0][0][0]}{basename[1][0][0]}{basename[2][0][0]}",       # extra: prefices
                                           "{subpath[0][0][0]}",      # extra: path for 2nd input, 1st file
                                           "{subdir[0][0][0]}"])
-test_pipeline2.merge(test_permutations3_merged_task, test_permutations3_task, workdir +  "/merged.results")
+test_pipeline2.merge(test_permutations3_merged_task, test_permutations3_task, tempdir +  "/merged.results")
 test_pipeline2.combinations_with_replacement(test_combinations_with_replacement2_task,
                                             input = output_from("WOWWWEEE"),
                                             filter = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
@@ -353,7 +339,7 @@ test_pipeline2.combinations_with_replacement(test_combinations_with_replacement2
                                             "{subpath[0][0][0]}",      # extra: path for 2nd input, 1st file
                                             "{subdir[0][0][0]}"])
 test_pipeline2.merge(test_combinations_with_replacement2_merged_task,
-    test_combinations_with_replacement2_task, workdir +  "/merged.results")
+    test_combinations_with_replacement2_task, tempdir +  "/merged.results")
 test_pipeline2.combinations_with_replacement(   task_func   = test_combinations_with_replacement3_task,
                                                 input       = output_from("WOWWWEEE"),
                                                 filter      = formatter(".*/(?P<FILE_PART>.+).tmp1$" ),
@@ -363,13 +349,13 @@ test_pipeline2.combinations_with_replacement(   task_func   = test_combinations_
                                                               "{subpath[0][0][0]}",      # extra: path for 2nd input, 1st file
                                                               "{subdir[0][0][0]}"])
 test_pipeline2.merge(test_combinations_with_replacement3_merged_task,
-    test_combinations_with_replacement3_task, workdir +  "/merged.results")
+    test_combinations_with_replacement3_task, tempdir +  "/merged.results")
 
 
 class TestCombinatorics(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
 
@@ -383,17 +369,17 @@ class TestCombinatorics(unittest.TestCase):
         s = StringIO()
         test_pipeline2.printout(s, [test_product_merged_task], verbose=5, wrap_width = 10000)
         self.assertTrue(re.search('Job needs update:.*Missing files.*'
-                      '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/e_name.tmp1, '
-                      '.*tmp_test_combinatorics/h_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.e_name.h_name.tmp2\]', s.getvalue(), re.DOTALL))
+                      '\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/e_name.tmp1, '
+                      '.*{tempdir}/h_name.tmp1, '
+                      '.*{tempdir}/a_name.e_name.h_name.tmp2\]'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
     def test_product_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_product_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "aeg,aeh,afg,afh,beg,beh,bfg,bfh,ceg,ceh,cfg,cfh,deg,deh,dfg,dfh,")
 
@@ -411,7 +397,8 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_product_misspelt_capture_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field 'FILEPART'", s.getvalue())
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field 'FILEPART'", s.getvalue())
 
 
     def test_product_out_of_range_formatter_ref_error(self):
@@ -423,7 +410,8 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_product_out_of_range_formatter_ref_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field 2", s.getvalue())
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field 2", s.getvalue())
 
     def test_product_formatter_ref_index_error(self):
         """
@@ -434,7 +422,8 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_product_formatter_ref_index_error_task], verbose=3, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: Unmatched field string index out of range", s.getvalue())
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("Unmatched field string index out of range", s.getvalue())
         #print s.getvalue()
 
 
@@ -449,9 +438,9 @@ class TestCombinatorics(unittest.TestCase):
         s = StringIO()
         test_pipeline1.printout(s, [test_combinations2_merged_task], verbose=5, wrap_width = 10000)
         self.assertTrue(re.search('Job needs update:.*Missing files.*'
-                      '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                        '.*tmp_test_combinatorics/b_name.tmp1, '
-                        '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue(), re.DOTALL))
+                      '\[.*{tempdir}/a_name.tmp1, '
+                        '.*{tempdir}/b_name.tmp1, '
+                        '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
 
     def test_combinations2_run(self):
@@ -459,7 +448,7 @@ class TestCombinatorics(unittest.TestCase):
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_combinations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                               'ab,ac,ad,bc,bd,cd,')
 
@@ -474,17 +463,17 @@ class TestCombinatorics(unittest.TestCase):
         s = StringIO()
         test_pipeline2.printout(s, [test_combinations3_merged_task], verbose=5, wrap_width = 10000)
         self.assertTrue(re.search(
-                       '\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+                       '\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_combinations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "abc,abd,acd,bcd,")
 
@@ -499,16 +488,16 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_permutations2_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/b_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue()))
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/b_name.tmp1, '
+                      '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_permutations2_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_permutations2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "ab,ac,ad,ba,bc,bd,ca,cb,cd,da,db,dc,")
 
@@ -522,17 +511,17 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_permutations3_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_permutations3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_permutations3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          'abc,abd,acb,acd,adb,adc,bac,bad,bca,bcd,bda,bdc,cab,cad,cba,cbd,cda,cdb,dab,dac,dba,dbc,dca,dcb,')
 
@@ -547,16 +536,16 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_combinations_with_replacement2_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                      '.*tmp_test_combinatorics/b_name.tmp1, '
-                      '.*tmp_test_combinatorics/a_name.b_name.tmp2\]', s.getvalue()))
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                      '.*{tempdir}/b_name.tmp1, '
+                      '.*{tempdir}/a_name.b_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations_with_replacement2_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_combinations_with_replacement2_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          "aa,ab,ac,ad,bb,bc,bd,cc,cd,dd,")
 
@@ -570,17 +559,17 @@ class TestCombinatorics(unittest.TestCase):
 
         s = StringIO()
         test_pipeline2.printout(s, [test_combinations_with_replacement3_merged_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('\[.*tmp_test_combinatorics/a_name.tmp1, '
-                       '.*tmp_test_combinatorics/b_name.tmp1, '
-                       '.*tmp_test_combinatorics/c_name.tmp1, '
-                       '.*tmp_test_combinatorics/a_name.b_name.c_name.tmp2\]', s.getvalue()))
+        self.assertTrue(re.search('\[.*{tempdir}/a_name.tmp1, '
+                       '.*{tempdir}/b_name.tmp1, '
+                       '.*{tempdir}/c_name.tmp1, '
+                       '.*{tempdir}/a_name.b_name.c_name.tmp2\]'.format(tempdir=tempdir), s.getvalue()))
 
     def test_combinations_with_replacement3_run(self):
         """Run product"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
         test_pipeline2.run([test_combinations_with_replacement3_merged_task], verbose=0, multiprocess = 100, one_second_per_job = one_second_per_job)
-        with open(workdir +  "/merged.results") as oo:
+        with open(tempdir +  "/merged.results") as oo:
             self.assertEqual(oo.read(),
                          'aaa,aab,aac,aad,abb,abc,abd,acc,acd,add,bbb,bbc,bbd,bcc,bcd,bdd,ccc,ccd,cdd,ddd,')
 
@@ -590,7 +579,7 @@ class TestCombinatorics(unittest.TestCase):
     #   cleanup
     #___________________________________________________________________________
     def tearDown(self):
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
 
 
 
diff --git a/ruffus/test/test_newstyle_proxy.py b/ruffus/test/test_newstyle_proxy.py
index e47a383..63e6a38 100755
--- a/ruffus/test/test_newstyle_proxy.py
+++ b/ruffus/test/test_newstyle_proxy.py
@@ -9,6 +9,8 @@ from __future__ import print_function
 import os
 import sys
 
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
+
 # add grandparent to search path for testing
 grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
 sys.path.insert(0, grandparent_dir)
@@ -16,19 +18,7 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
+from ruffus import Pipeline, suffix, pipeline_run
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -70,7 +60,7 @@ def same_file_name_task(input_file_name, output_file_name, executed_tasks_proxy,
 #   Links file names, is always as up to date if links are not missing
 #
 def linked_file_name_task(input_file_name, output_file_name, executed_tasks_proxy, mutex_proxy):
-    os.symlink(input_file_name, output_file_name)
+    os.symlink(os.path.abspath(input_file_name), os.path.abspath(output_file_name))
     with mutex_proxy:
         executed_tasks_proxy["linked_file_name_task"] = executed_tasks_proxy.get("linked_file_name_task", 0) + 1
 
@@ -111,8 +101,9 @@ class Test_ruffus(unittest.TestCase):
 
         pipeline = Pipeline.pipelines["main"]
         pipeline.originate(task_func = start_task,
-                            output = ["a.1", "b.1"],
-                            extras = [executed_tasks_proxy, mutex_proxy])
+                            output = [tempdir + "a.1", tempdir + "b.1"],
+                            extras = [executed_tasks_proxy, mutex_proxy])\
+                .mkdir(tempdir)
         pipeline.transform(task_func = same_file_name_task,
                             input = start_task,
                             filter = suffix(".1"),
@@ -132,10 +123,14 @@ class Test_ruffus(unittest.TestCase):
 
     def cleanUp(self, check_expected = False):
         for f in ["a.1", "b.1", "a.linked.1", "b.linked.1", "a.3", "b.3", "a.linked.3", "b.linked.3"]:
-            if os.path.lexists(f):
-                os.unlink(f)
+            if os.path.lexists(tempdir + f):
+                os.unlink(tempdir + f)
             elif check_expected:
-                    raise Exception("Expected %s missing" % f)
+                raise Exception("Expected %s missing" % (tempdir + f))
+        if os.path.lexists(tempdir):
+            os.rmdir(tempdir)
+        elif check_expected:
+            raise Exception("Expected %s missing" % (tempdir))
 
     def tearDown(self):
         self.cleanUp(True)
@@ -145,14 +140,14 @@ class Test_ruffus(unittest.TestCase):
         #   Run task 1 only
         #
         print("    Run start_task only", file=sys.stderr)
-        pipeline_run(log_exceptions = True, verbose = 0)
+        pipeline_run(log_exceptions = True, verbose = 0, pipeline= "main")
 
 
         #
         #   Run task 3 only
         #
         print("    Run final_task: linked_file_name_task should run as well", file=sys.stderr)
-        pipeline_run(log_exceptions = True, verbose = 0)
+        pipeline_run(log_exceptions = True, verbose = 0, pipeline= "main")
 
 
         #
@@ -161,7 +156,7 @@ class Test_ruffus(unittest.TestCase):
         #       All jobs should be up to date
         #
         print("    Run final_task again: All jobs should be up to date", file=sys.stderr)
-        pipeline_run(log_exceptions = True, verbose = 0)
+        pipeline_run(log_exceptions = True, verbose = 0, pipeline= "main")
 
         #
         #   Make sure right number of jobs / tasks ran
diff --git a/ruffus/test/test_newstyle_regex_error_messages.py b/ruffus/test/test_newstyle_regex_error_messages.py
index ce72ab0..a4aa475 100755
--- a/ruffus/test/test_newstyle_regex_error_messages.py
+++ b/ruffus/test/test_newstyle_regex_error_messages.py
@@ -28,7 +28,8 @@ from __future__ import print_function
 
 """
 
-workdir = 'tmp_test_regex_error_messages'
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 #sub-1s resolution in system?
 one_second_per_job = None
 parallelism = 2
@@ -45,15 +46,11 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "originate", "Pipeline", "regex":
-    globals()[attr] = getattr (ruffus, attr)
-RUFFUS_HISTORY_FILE = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-fatal_error_input_file_does_not_match = ruffus.ruffus_exceptions.fatal_error_input_file_does_not_match
-RethrownJobError                      = ruffus.ruffus_exceptions.RethrownJobError
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, suffix, transform, split, merge, dbdict, follows, originate, Pipeline, regex
+
+from ruffus.ruffus_exceptions import RethrownJobError, fatal_error_input_file_does_not_match
+from ruffus.ruffus_utility import CHECKSUM_FILE_TIMESTAMPS, RUFFUS_HISTORY_FILE, get_default_history_file_name
 
 
 
@@ -168,7 +165,7 @@ def test_regex_out_of_range_regex_reference_error_task(infiles, outfile,
 test_pipeline = Pipeline("test")
 
 test_pipeline.originate(task_func = generate_initial_files1,
-                        output    = [workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcdefghi"])
+                        output    = [tempdir +  "/" + prefix + "_name.tmp1" for prefix in "abcdefghi"])
 
 test_pipeline.transform(task_func = test_regex_task,
                         input     = generate_initial_files1,
@@ -229,7 +226,7 @@ test_pipeline.transform(task_func = test_regex_out_of_range_regex_reference_erro
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 class _AssertRaisesContext_27(object):
     """A context manager used to implement TestCase.assertRaises* methods.
@@ -272,7 +269,7 @@ class _AssertRaisesContext_27(object):
 class Test_regex_error_messages(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
         if sys.hexversion < 0x03000000:
@@ -315,7 +312,7 @@ class Test_regex_error_messages(unittest.TestCase):
 
         s = StringIO()
         test_pipeline.printout(s, [test_regex_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('Missing files.*\[tmp_test_regex_error_messages/a_name.tmp1, tmp_test_regex_error_messages/a_name.tmp2', s.getvalue(), re.DOTALL))
+        self.assertTrue(re.search('Missing files.*\[{tempdir}/a_name.tmp1, {tempdir}/a_name.tmp2'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
 
     def test_regex_run(self):
@@ -333,7 +330,8 @@ class Test_regex_error_messages(unittest.TestCase):
         cleanup_tmpdir()
         s = StringIO()
         test_pipeline.printout(s, [test_regex_unmatched_task], verbose=5, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: File 'tmp_test_regex_error_messages/a_name.tmp1' does not match regex", s.getvalue())
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("File '{tempdir}/a_name.tmp1' does not match regex".format(tempdir=tempdir), s.getvalue())
 
     def test_regex_unmatched_run(self):
         """Run transform(...,regex()...)"""
@@ -351,7 +349,7 @@ class Test_regex_error_messages(unittest.TestCase):
 
         s = StringIO()
         test_pipeline.printout(s, [test_suffix_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('Missing files.*\[tmp_test_regex_error_messages/a_name.tmp1, tmp_test_regex_error_messages/a_name.tmp2', s.getvalue(), re.DOTALL))
+        self.assertTrue(re.search('Missing files.*\[{tempdir}/a_name.tmp1, {tempdir}/a_name.tmp2'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
     def test_suffix_run(self):
         """Run transform(...,suffix()...)"""
@@ -386,7 +384,8 @@ class Test_regex_error_messages(unittest.TestCase):
         cleanup_tmpdir()
         s = StringIO()
         test_pipeline.printout(s, [test_suffix_unmatched_task2], verbose=5, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: File 'tmp_test_regex_error_messages/a_name.tmp1' does not match suffix", s.getvalue())
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("File '{tempdir}/a_name.tmp1' does not match suffix".format(tempdir=tempdir), s.getvalue())
 
     def test_suffix_unmatched_run2(self):
         """Run transform(...,suffix()...)"""
@@ -455,7 +454,7 @@ class Test_regex_error_messages(unittest.TestCase):
     #___________________________________________________________________________
     def tearDown(self):
         pass
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
 
 
 
@@ -464,7 +463,7 @@ class Test_regex_error_messages(unittest.TestCase):
 #       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
 #
 if __name__ == '__main__':
-    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3)
+    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3, pipeline= "main")
     parallelism = 1
     suite = unittest.TestLoader().loadTestsFromTestCase(Test_regex_error_messages)
     unittest.TextTestRunner(verbosity=1).run(suite)
diff --git a/ruffus/test/test_pausing.py b/ruffus/test/test_pausing.py
index 7930452..3701d2d 100755
--- a/ruffus/test/test_pausing.py
+++ b/ruffus/test/test_pausing.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,19 +20,7 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
+from ruffus import  *
 
 
 
@@ -107,7 +96,6 @@ def test_job_io(infiles, outfiles, extra_params):
 #                   5   ->    6
 #
 
-tempdir = "test_pausing_dir/"
 def do_write(file_name, what):
     with open(file_name, "a") as oo:
         oo.write(what)
@@ -305,7 +293,7 @@ class Test_ruffus(unittest.TestCase):
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         check_final_output_correct()
         check_job_order_correct(tempdir + "jobs.start")
         check_job_order_correct(tempdir + "jobs.finish")
diff --git a/ruffus/test/test_pipeline_printout_graph.py b/ruffus/test/test_pipeline_printout_graph.py
index 2f6b201..492e904 100755
--- a/ruffus/test/test_pipeline_printout_graph.py
+++ b/ruffus/test/test_pipeline_printout_graph.py
@@ -6,9 +6,8 @@ from __future__ import print_function
 
 
 """
-tempdir = "testing_dir/"
-
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -21,10 +20,7 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout_graph", "originate", "split", "transform", "subdivide", "formatter", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
+from ruffus import pipeline_run, pipeline_printout_graph, originate, split, transform, subdivide, formatter, Pipeline
 
 
 
@@ -147,25 +143,26 @@ class Test_ruffus(unittest.TestCase):
 
         print("     Run pipeline normally...")
         if self.graph_viz_present:
-            pipeline_printout_graph(tempdir + "flowchart.dot")
+            pipeline_printout_graph(tempdir + "flowchart.dot", pipeline= "main")
             pipeline_printout_graph(tempdir + "flowchart.jpg",
                                         target_tasks =[subdivide_start],
                                         forcedtorun_tasks = [split_start],
                                         no_key_legend = True)
-            pipeline_printout_graph(tempdir + "flowchart.svg", no_key_legend = False)
+            pipeline_printout_graph(tempdir + "flowchart.svg", no_key_legend = False, pipeline= "main")
             # Unknown format
             try:
-                pipeline_printout_graph(tempdir + "flowchart.unknown", no_key_legend = False)
+                pipeline_printout_graph(tempdir + "flowchart.unknown", no_key_legend = False, pipeline= "main")
                 raise Exception("Failed to throw exception for pipeline_printout_graph unknown extension ")
             except CalledProcessError as err:
                 pass
-            pipeline_printout_graph(tempdir + "flowchart.unknown", "svg", no_key_legend = False)
+            pipeline_printout_graph(tempdir + "flowchart.unknown", "svg", no_key_legend = False, pipeline= "main")
 
         else:
             pipeline_printout_graph(tempdir + "flowchart.dot",
                                         target_tasks =[subdivide_start],
                                         forcedtorun_tasks = [split_start],
-                                        no_key_legend = True)
+                                        no_key_legend = True,
+                                        pipeline= "main")
 
     def test_newstyle_ruffus (self):
 
diff --git a/ruffus/test/test_posttask_merge.py b/ruffus/test/test_posttask_merge.py
index ed5ccef..fa31ace 100755
--- a/ruffus/test/test_posttask_merge.py
+++ b/ruffus/test/test_posttask_merge.py
@@ -7,7 +7,8 @@ from __future__ import print_function
         bug where @files follows merge and extra parenthesis inserted
 
 """
-tempdir = "temp_filesre_split_and_combine/"
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 test_file = tempdir  + "test_output"
 jobs_per_task = 50
 
@@ -21,21 +22,7 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
-
+from ruffus import  *
 
 
 
@@ -187,7 +174,7 @@ Results recombined
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         with open(test_file) as ii:
             post_task_text =  ii.read()
         self.assertEqual(post_task_text, self.expected_text)
diff --git a/ruffus/test/test_proxy_logger.py b/ruffus/test/test_proxy_logger.py
new file mode 100755
index 0000000..274fc14
--- /dev/null
+++ b/ruffus/test/test_proxy_logger.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+from __future__ import print_function
+"""
+
+    test_cmdline.py
+
+
+
+"""
+
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
+import sys
+
+# add grandparent to search path for testing
+grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+sys.path.insert(0, grandparent_dir)
+
+# module name = script name without extension
+module_name = os.path.splitext(os.path.basename(__file__))[0]
+
+
+# funky code to import by file name
+import ruffus
+from ruffus import *
+from ruffus.cmdline import handle_verbose
+from ruffus.proxy_logger import make_shared_logger_and_proxy, setup_std_shared_logger
+
+import logging
+import unittest
+import re
+import shutil
+
+
+
+#import traceback
+
+
+class Test_Logging(unittest.TestCase):
+
+
+
+    def test_rotating_log(self):
+        """
+            test rotating via proxy
+        """
+        open("/tmp/lg.log", "w").close()
+        args={}
+        args["file_name"] = "/tmp/lg.log"
+        args["rotating"] = True
+        args["maxBytes"]=20000
+        args["backupCount"]=10
+        #args["level"]= logging.INFO
+        (my_log,
+         logging_mutex) = make_shared_logger_and_proxy (setup_std_shared_logger,
+                                                        "my_logger", args)
+        with logging_mutex:
+            my_log.debug('This is a debug message')
+            my_log.info('This is an info message')
+            my_log.warning('This is a warning message')
+            my_log.error('This is an error message')
+            my_log.critical('This is a critical error message')
+            my_log.log(logging.ERROR, 'This is a debug message')
+        with open("/tmp/lg.log") as ii:
+            self.assertTrue(ii.read() == \
+"""This is a warning message
+This is an error message
+This is a critical error message
+This is a debug message
+""")
+
+
+#
+#   debug code not run if called as a module
+#
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/ruffus/test/test_regex_error_messages.py b/ruffus/test/test_regex_error_messages.py
index 5db1c4b..a2cf8f3 100755
--- a/ruffus/test/test_regex_error_messages.py
+++ b/ruffus/test/test_regex_error_messages.py
@@ -27,7 +27,8 @@ from __future__ import print_function
         SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
 
 """
-workdir = 'tmp_test_regex_error_messages'
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 #sub-1s resolution in system?
 one_second_per_job = None
 parallelism = 2
@@ -67,7 +68,7 @@ except:
 #
 #   generate_initial_files1
 #___________________________________________________________________________
- at originate([workdir +  "/" + prefix + "_name.tmp1" for prefix in "abcdefghi"])
+ at originate([tempdir +  prefix + "_name.tmp1" for prefix in "abcdefghi"])
 def generate_initial_files1(out_name):
     with open(out_name, 'w') as outfile:
         pass
@@ -210,7 +211,7 @@ def test_regex_out_of_range_regex_reference_error_task(infiles, outfile,
 
 
 def cleanup_tmpdir():
-    os.system('rm -f %s %s' % (os.path.join(workdir, '*'), RUFFUS_HISTORY_FILE))
+    os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
 
 class _AssertRaisesContext_27(object):
     """A context manager used to implement TestCase.assertRaises* methods.
@@ -253,7 +254,7 @@ class _AssertRaisesContext_27(object):
 class Test_regex_error_messages(unittest.TestCase):
     def setUp(self):
         try:
-            os.mkdir(workdir)
+            os.mkdir(tempdir)
         except OSError:
             pass
         if sys.hexversion < 0x03000000:
@@ -295,15 +296,15 @@ class Test_regex_error_messages(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_regex_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('Missing files.*\[tmp_test_regex_error_messages/a_name.tmp1, tmp_test_regex_error_messages/a_name.tmp2', s.getvalue(), re.DOTALL))
+        pipeline_printout(s, [test_regex_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('Missing files.*\[{tempdir}a_name.tmp1, {tempdir}a_name.tmp2'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
 
     def test_regex_run(self):
         """Run transform(...,regex()...)"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_regex_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job)
+        pipeline_run([test_regex_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job, pipeline= "main")
 
 
     #___________________________________________________________________________
@@ -313,14 +314,15 @@ class Test_regex_error_messages(unittest.TestCase):
     def test_regex_unmatched_printout(self):
         cleanup_tmpdir()
         s = StringIO()
-        pipeline_printout(s, [test_regex_unmatched_task], verbose=5, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: File 'tmp_test_regex_error_messages/a_name.tmp1' does not match regex", s.getvalue())
+        pipeline_printout(s, [test_regex_unmatched_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("File '{tempdir}a_name.tmp1' does not match regex".format(tempdir=tempdir), s.getvalue())
 
     def test_regex_unmatched_run(self):
         """Run transform(...,regex()...)"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_regex_unmatched_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job)
+        pipeline_run([test_regex_unmatched_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job, pipeline= "main")
 
 
     #___________________________________________________________________________
@@ -331,14 +333,14 @@ class Test_regex_error_messages(unittest.TestCase):
         cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [test_suffix_task], verbose=5, wrap_width = 10000)
-        self.assertTrue(re.search('Missing files.*\[tmp_test_regex_error_messages/a_name.tmp1, tmp_test_regex_error_messages/a_name.tmp2', s.getvalue(), re.DOTALL))
+        pipeline_printout(s, [test_suffix_task], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertTrue(re.search('Missing files.*\[{tempdir}a_name.tmp1, {tempdir}a_name.tmp2'.format(tempdir=tempdir), s.getvalue(), re.DOTALL))
 
     def test_suffix_run(self):
         """Run transform(...,suffix()...)"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_suffix_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job)
+        pipeline_run([test_suffix_task], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job, pipeline= "main")
 
 
     #___________________________________________________________________________
@@ -366,14 +368,15 @@ class Test_regex_error_messages(unittest.TestCase):
     def test_suffix_unmatched_printout2(self):
         cleanup_tmpdir()
         s = StringIO()
-        pipeline_printout(s, [test_suffix_unmatched_task2], verbose=5, wrap_width = 10000)
-        self.assertIn("Warning: File match failure: File 'tmp_test_regex_error_messages/a_name.tmp1' does not match suffix", s.getvalue())
+        pipeline_printout(s, [test_suffix_unmatched_task2], verbose=5, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Warning: Input substitution failed:", s.getvalue())
+        self.assertIn("File '{tempdir}a_name.tmp1' does not match suffix".format(tempdir=tempdir), s.getvalue())
 
     def test_suffix_unmatched_run2(self):
         """Run transform(...,suffix()...)"""
         # output is up to date, but function body changed (e.g., source different)
         cleanup_tmpdir()
-        pipeline_run([test_suffix_unmatched_task2], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job)
+        pipeline_run([test_suffix_unmatched_task2], verbose=0, multiprocess = parallelism, one_second_per_job = one_second_per_job, pipeline= "main")
 
 
 
@@ -436,7 +439,7 @@ class Test_regex_error_messages(unittest.TestCase):
     #___________________________________________________________________________
     def tearDown(self):
         pass
-        shutil.rmtree(workdir)
+        shutil.rmtree(tempdir)
 
 
 
@@ -445,7 +448,7 @@ class Test_regex_error_messages(unittest.TestCase):
 #       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
 #
 if __name__ == '__main__':
-    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3)
+    #pipeline_printout(sys.stdout, [test_product_task], verbose = 3, pipeline= "main")
     parallelism = 1
     suite = unittest.TestLoader().loadTestsFromTestCase(Test_regex_error_messages)
     unittest.TextTestRunner(verbosity=1).run(suite)
diff --git a/ruffus/test/test_ruffus_utility.py b/ruffus/test/test_ruffus_utility.py
index 7349c17..70bfbc6 100755
--- a/ruffus/test/test_ruffus_utility.py
+++ b/ruffus/test/test_ruffus_utility.py
@@ -21,24 +21,8 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-try:
-    attrlist = ruffus.ruffus_utility.__all__
-except AttributeError:
-    attrlist = dir (ruffus.ruffus_utility)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.ruffus_utility, attr)
+from ruffus import  *
+from ruffus.ruffus_utility import *
 
 
 import unittest
diff --git a/ruffus/test/test_ruffus_utility_parse_task_arguments.py b/ruffus/test/test_ruffus_utility_parse_task_arguments.py
index 8e83918..6ad0c77 100755
--- a/ruffus/test/test_ruffus_utility_parse_task_arguments.py
+++ b/ruffus/test/test_ruffus_utility_parse_task_arguments.py
@@ -22,25 +22,8 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-try:
-    attrlist = ruffus.ruffus_utility.__all__
-except AttributeError:
-    attrlist = dir (ruffus.ruffus_utility)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.ruffus_utility, attr)
-
+from ruffus import  *
+from ruffus.ruffus_utility import  *
 import unittest
 #_________________________________________________________________________________________
 
@@ -60,18 +43,21 @@ class Test_parse_transform_args (unittest.TestCase):
                              'filter'   : orig_unnamed_arguments[1],
                              'output'   : orig_unnamed_arguments[2],
                              'extras'   : orig_unnamed_arguments[3:],
+                             'named_extras'   : {},
                              'modify_inputs_mode': 2,
                              'modify_inputs': None}
         add_inputs_expected_results = { 'input'    : orig_unnamed_arguments[0],
                                          'filter'   : orig_unnamed_arguments[1],
                                          'output'   : orig_unnamed_arguments[2],
                                          'extras'   : orig_unnamed_arguments[3:],
+                                         'named_extras'   : {},
                                          'modify_inputs_mode': 0,
                                          'modify_inputs': ("a.test", "b.test")}
         replace_inputs_expected_results = { 'input'    : orig_unnamed_arguments[0],
                                          'filter'   : orig_unnamed_arguments[1],
                                          'output'   : orig_unnamed_arguments[2],
                                          'extras'   : orig_unnamed_arguments[3:],
+                                         'named_extras'   : {},
                                          'modify_inputs_mode': 1,
                                          'modify_inputs': ("a.test", "b.test")}
 
@@ -103,7 +89,7 @@ class Test_parse_transform_args (unittest.TestCase):
                                         {'input'    : orig_unnamed_arguments[0],
                                          'filter'   : orig_unnamed_arguments[1],
                                          'output'   : orig_unnamed_arguments[2],
-                                         'extras'   : orig_unnamed_arguments[3:]
+                                         'extras'   : orig_unnamed_arguments[3:],
                                          },
                                         expected_arguments, task_description)
         self.assertEqual(results, expected_results)
@@ -114,7 +100,7 @@ class Test_parse_transform_args (unittest.TestCase):
                                             {'input'    : orig_unnamed_arguments[0],
                                              'filter'   : "a",
                                              'output'   : orig_unnamed_arguments[2],
-                                             'extras'   : orig_unnamed_arguments[3:]
+                                             'extras'   : orig_unnamed_arguments[3:],
                                              },
                                             expected_arguments, task_description)
 
@@ -132,7 +118,8 @@ class Test_parse_transform_args (unittest.TestCase):
         with self.assertRaises(error_too_many_args):
             results = parse_task_arguments (orig_unnamed_arguments,
                                         {'input'    : orig_unnamed_arguments[0],
-                                         'extras'   : orig_unnamed_arguments[3:]
+                                         'extras'   : orig_unnamed_arguments[3:],
+                                         'named_extras'   : {},
                                          },
                                         expected_arguments, task_description)
 
@@ -232,18 +219,21 @@ class Test_parse_product_args (unittest.TestCase):
                              'filter'   : [orig_unnamed_arguments[1], orig_unnamed_arguments[3], orig_unnamed_arguments[5]],
                              'output'   : orig_unnamed_arguments[6],
                              'extras'   : orig_unnamed_arguments[7:],
+                             'named_extras'   : {},
                              'modify_inputs_mode': 2,
                              'modify_inputs': None}
         add_inputs_expected_results = { 'input'    : [orig_unnamed_arguments[0], orig_unnamed_arguments[2], orig_unnamed_arguments[4]],
                                          'filter'   : [orig_unnamed_arguments[1], orig_unnamed_arguments[3], orig_unnamed_arguments[5]],
                                          'output'   : orig_unnamed_arguments[6],
                                          'extras'   : orig_unnamed_arguments[7:],
+                                         'named_extras'   : {},
                                          'modify_inputs_mode': 0,
                                          'modify_inputs': ("a.test", "b.test")}
         replace_inputs_expected_results = { 'input'    : [orig_unnamed_arguments[0], orig_unnamed_arguments[2], orig_unnamed_arguments[4]],
                                             'filter'   : [orig_unnamed_arguments[1], orig_unnamed_arguments[3], orig_unnamed_arguments[5]],
                                             'output'   : orig_unnamed_arguments[6],
                                             'extras'   : orig_unnamed_arguments[7:],
+                                            'named_extras'   : {},
                                             'modify_inputs_mode': 1,
                                             'modify_inputs': ("a.test", "b.test")}
 
@@ -279,7 +269,7 @@ class Test_parse_product_args (unittest.TestCase):
                                          'input3'   : orig_unnamed_arguments[4],
                                          'filter3'  : orig_unnamed_arguments[5],
                                          'output'   : orig_unnamed_arguments[6],
-                                         'extras'   : orig_unnamed_arguments[7:]
+                                         'extras'   : orig_unnamed_arguments[7:],
                                          },
                                         expected_arguments, task_description)
         self.assertEqual(results, expected_results)
@@ -303,7 +293,7 @@ class Test_parse_product_args (unittest.TestCase):
         with self.assertRaises(error_too_many_args):
             results = parse_task_arguments (orig_unnamed_arguments,
                                         {'input'    : orig_unnamed_arguments[0],
-                                         'extras'   : orig_unnamed_arguments[7:]
+                                         'extras'   : orig_unnamed_arguments[7:],
                                          },
                                         expected_arguments, task_description)
 
@@ -400,6 +390,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
                              'tuple_size': orig_unnamed_arguments[2],
                              'output'   : orig_unnamed_arguments[3],
                              'extras'   : orig_unnamed_arguments[4:],
+                             'named_extras'   : {},
                              'modify_inputs_mode': 2,
                              'modify_inputs': None}
         add_inputs_expected_results = { 'input'    : orig_unnamed_arguments[0],
@@ -407,6 +398,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
                                          'tuple_size': orig_unnamed_arguments[2],
                                          'output'   : orig_unnamed_arguments[3],
                                          'extras'   : orig_unnamed_arguments[4:],
+                                         'named_extras'   : {},
                                          'modify_inputs_mode': 0,
                                          'modify_inputs': ("a.test", "b.test")}
         replace_inputs_expected_results = { 'input'    : orig_unnamed_arguments[0],
@@ -414,6 +406,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
                                          'tuple_size': orig_unnamed_arguments[2],
                                          'output'   : orig_unnamed_arguments[3],
                                          'extras'   : orig_unnamed_arguments[4:],
+                                         'named_extras'   : {},
                                          'modify_inputs_mode': 1,
                                          'modify_inputs': ("a.test", "b.test")}
 
@@ -457,7 +450,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
                                          'filter'   : orig_unnamed_arguments[1],
                                          'tuple_size': orig_unnamed_arguments[2],
                                          'output'   : orig_unnamed_arguments[3],
-                                         'extras'   : orig_unnamed_arguments[4:]
+                                         'extras'   : orig_unnamed_arguments[4:],
                                          },
                                         expected_arguments, task_description)
         self.assertEqual(results, expected_results)
@@ -471,7 +464,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
                                              'filter'   : orig_unnamed_arguments[1],
                                              'tuple_size': "a",
                                              'output'   : orig_unnamed_arguments[3],
-                                             'extras'   : orig_unnamed_arguments[4:]
+                                             'extras'   : orig_unnamed_arguments[4:],
                                              },
                                             expected_arguments, task_description)
 
@@ -491,7 +484,7 @@ class Test_parse_combinatorics_args (unittest.TestCase):
         with self.assertRaises(error_too_many_args):
             results = parse_task_arguments (orig_unnamed_arguments,
                                         {'input'    : orig_unnamed_arguments[0],
-                                         'extras'   : orig_unnamed_arguments[3:]
+                                         'extras'   : orig_unnamed_arguments[3:],
                                          },
                                         expected_arguments, task_description)
 
@@ -582,7 +575,9 @@ class Test_parse_originate_args (unittest.TestCase):
         orig_unnamed_arguments   = [["a.1","b.1"], 1,2,3,4]
         task_description = "@originate(%s)\ndef myfunc(...)\n"
         expected_results = { 'output'   : orig_unnamed_arguments[0],
-                             'extras'   : orig_unnamed_arguments[1:]}
+                             'extras'   : orig_unnamed_arguments[1:],
+                             'named_extras'   : {},
+                             }
 
         # Error: empty list
         with self.assertRaises(error_missing_args):
diff --git a/ruffus/test/test_runtime_data.py b/ruffus/test/test_runtime_data.py
index e958dc9..06bc925 100755
--- a/ruffus/test/test_runtime_data.py
+++ b/ruffus/test/test_runtime_data.py
@@ -2,14 +2,20 @@
 from __future__ import print_function
 """
 
-    test_tasks.py
+    test_runtime_data.py
+
+        Tests having inputs depend on a run time parameter
+        (named element in the dict runtime_data passed to pipeline_run)
 
 """
 
-runtime_files = ["a.3"]
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
+runtime_files = [tempdir + "a.3"]
 import sys
+import shutil
+
 
 # add grandparent to search path for testing
 grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
@@ -21,16 +27,9 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, originate, transform, suffix, follows, runtime_parameter, mkdir
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
 
 
 
@@ -70,7 +69,7 @@ import json
 #
 #    task1
 #
- at originate(['a.1'] + runtime_files)
+ at originate([tempdir + 'a.1'] + runtime_files)
 def task1(outfile):
     """
     First task
@@ -148,19 +147,27 @@ def task4(infile, outfile):
 class Test_ruffus(unittest.TestCase):
     def setUp(self):
         for f in ["a.1", "a.2","a.3","a.4"]:
+            f = os.path.join(tempdir, f)
             if os.path.exists(f):
                 os.unlink(f)
+        try:
+            os.makedirs(tempdir)
+        except:
+            pass
+
 
     def tearDown(self):
         for f in ["a.1", "a.2","a.3","a.4"]:
+            f = os.path.join(tempdir, f)
             if os.path.exists(f):
                 os.unlink(f)
             else:
                 raise Exception("%s is missing" % f)
 
+        shutil.rmtree(tempdir)
 
     def test_ruffus (self):
-        pipeline_run(verbose = 0, runtime_data = {"a": runtime_files})
+        pipeline_run(verbose = 0, runtime_data = {"a": runtime_files}, pipeline= "main")
 
 
     def test_newstyle_ruffus (self):
@@ -168,7 +175,7 @@ class Test_ruffus(unittest.TestCase):
 
         test_pipeline = Pipeline("test")
         test_pipeline.originate(task_func = task1,
-                                output = ['a.1'] + runtime_files)
+                                output = [tempdir + 'a.1'] + runtime_files)
         test_pipeline.transform(task2, task1, suffix(".1"), ".2")
         test_pipeline.transform(task_func = task3,
                                    input = task2,
diff --git a/ruffus/test/test_softlink_uptodate.py b/ruffus/test/test_softlink_uptodate.py
index 5071966..95df7a0 100755
--- a/ruffus/test/test_softlink_uptodate.py
+++ b/ruffus/test/test_softlink_uptodate.py
@@ -6,6 +6,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -18,16 +19,9 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
+import ruffus
+from ruffus import originate, transform, Pipeline, pipeline_run, suffix
 
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
 
 
 
@@ -58,7 +52,11 @@ def same_file_name_task(input_file_name, output_file_name):
 #
 @transform(start_task, suffix(".1"), ".linked.1")
 def linked_file_name_task(input_file_name, output_file_name):
-    os.symlink(input_file_name, output_file_name)
+    try:
+        os.symlink(input_file_name, output_file_name)
+    except:
+        print (input_file_name, output_file_name)
+        raise
 
 
 #
@@ -87,8 +85,10 @@ class Test_ruffus(unittest.TestCase):
     def setUp(self):
         for f in ["a.1", "b.1", "a.linked.1", "b.linked.1", "a.3", "b.3", "a.linked.3", "b.linked.3"]:
             try:
-                os.unlink(f)
+                if os.path.exists(f):
+                    os.unlink(f)
             except:
+                print ("    !!!!OOPs. Can't unlink %s" % f, file = sys.stderr)
                 pass
 
     def tearDown(self):
@@ -99,7 +99,7 @@ class Test_ruffus(unittest.TestCase):
                 raise Exception("Expected %s missing" % f)
 
     def test_ruffus (self):
-        pipeline_run(log_exceptions = True, verbose = 0)
+        pipeline_run(log_exceptions = True, verbose = 0, pipeline= "main")
 
     def test_newstyle_ruffus (self):
         test_pipeline = Pipeline("test")
diff --git a/ruffus/test/test_split_and_combine.py b/ruffus/test/test_split_and_combine.py
index 56eec56..e595257 100755
--- a/ruffus/test/test_split_and_combine.py
+++ b/ruffus/test/test_split_and_combine.py
@@ -10,11 +10,10 @@ from __future__ import print_function
 
 
 import sys
-tempdir = "temp_filesre_split_and_combine/"
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 verbose_output = sys.stderr
 
-import os
-import sys
 
 # add grandparent to search path for testing
 grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
@@ -26,11 +25,7 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-for attr in "posttask", "split", "merge", "transform", "pipeline_printout", "pipeline_run", "Pipeline", "suffix":
-    globals()[attr] = getattr (ruffus, attr)
+from ruffus import  posttask, split, merge, transform, pipeline_printout, pipeline_run, Pipeline, suffix
 
 
 
@@ -150,7 +145,7 @@ class Test_ruffus(unittest.TestCase):
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
         if not os.path.exists(tempdir + "all.combine_results"):
             raise Exception("Missing %s" % (tempdir + "all.combine_results"))
 
diff --git a/ruffus/test/test_split_regex_and_collate.py b/ruffus/test/test_split_regex_and_collate.py
index 3504e89..f430eb4 100755
--- a/ruffus/test/test_split_regex_and_collate.py
+++ b/ruffus/test/test_split_regex_and_collate.py
@@ -8,6 +8,7 @@ from __future__ import print_function
 JOBS_PER_TASK = 5
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 import re
 
@@ -21,23 +22,14 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, suffix, regex, formatter, originate, follows, merge, mkdir, posttask, subdivide, transform, collate, split
 
-try:
-    attrlist = ruffus.combinatorics.__all__
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
+from ruffus.combinatorics import *
 
-except AttributeError:
-    attrlist = dir (ruffus.combinatorics)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus.combinatorics, attr)
 
-for attr in "collate", "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "mkdir", "originate", "posttask", "subdivide", "regex", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError = ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
@@ -81,7 +73,6 @@ except ImportError:
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "temp_filesre_split_and_combine"
 
 #
 #   Three starting files
@@ -209,11 +200,11 @@ class Test_ruffus(unittest.TestCase):
         self.cleanup_tmpdir()
 
         s = StringIO()
-        pipeline_printout(s, [combine_results], verbose=5, wrap_width = 10000)
+        pipeline_printout(s, [combine_results], verbose=5, wrap_width = 10000, pipeline= "main")
         self.assertTrue(re.search('Job needs update:.*Missing files.*', s.getvalue(), re.DOTALL) is not None)
         #print s.getvalue()
 
-        pipeline_run([combine_results], verbose=0)
+        pipeline_run([combine_results], verbose=0, pipeline= "main")
 
 
     def test_newstyle_collate (self):
diff --git a/ruffus/test/test_split_subdivide_checkpointing.py b/ruffus/test/test_split_subdivide_checkpointing.py
index 95c7e4d..4eba908 100755
--- a/ruffus/test/test_split_subdivide_checkpointing.py
+++ b/ruffus/test/test_split_subdivide_checkpointing.py
@@ -6,10 +6,8 @@ from __future__ import print_function
 
 
 """
-tempdir = "testing_dir/"
-
-
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -22,10 +20,10 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout", "originate", "split", "transform", "subdivide", "formatter", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
+
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, formatter, originate, subdivide, split
+
 
 
 
@@ -197,36 +195,36 @@ class Test_ruffus(unittest.TestCase):
         expected_files_after_4_runs = ["3.split", "0_3.subdivided", "1_2.subdivided", "2_1.subdivided", "3_0.subdivided"]
 
         print("     Run pipeline normally...")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs,
                                                  expected_files_after_2_runs)
 
         print("     Check that running again does nothing. (All up to date).")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs,
                                                  expected_files_after_2_runs)
 
         print("     Running again with forced tasks to generate more files...")
-        pipeline_run(forcedtorun_tasks = [make_start], multiprocess = 10, verbose=0)
+        pipeline_run(forcedtorun_tasks = [make_start], multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs
                                                  + expected_files_after_2_runs,
                                                  expected_files_after_3_runs)
 
         print("     Check that running again does nothing. (All up to date).")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs
                                                  + expected_files_after_2_runs,
                                                  expected_files_after_3_runs)
 
 
         print("     Running again with forced tasks to generate even more files...")
-        pipeline_run(forcedtorun_tasks = [make_start], multiprocess = 10, verbose=0)
+        pipeline_run(forcedtorun_tasks = [make_start], multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs
                                                  + expected_files_after_2_runs
                                                  + expected_files_after_3_runs,
                                                  expected_files_after_4_runs)
         print("     Check that running again does nothing. (All up to date).")
-        pipeline_run(multiprocess = 10, verbose=0)
+        pipeline_run(multiprocess = 10, verbose=0, pipeline= "main")
         self.check_file_exists_or_not_as_expected(expected_files_after_1_runs
                                                  + expected_files_after_2_runs
                                                  + expected_files_after_3_runs,
diff --git a/ruffus/test/test_subpipeline.py b/ruffus/test/test_subpipeline.py
index 01a86d7..fb50563 100755
--- a/ruffus/test/test_subpipeline.py
+++ b/ruffus/test/test_subpipeline.py
@@ -1,12 +1,13 @@
 #!/usr/bin/env python
 from __future__ import print_function
 """
-    test_subpipeline.py
+
 
         Demonstrates the new Ruffus syntax in version 2.6
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 import sys
 
 # add grandparent to search path for testing
@@ -15,7 +16,6 @@ sys.path.insert(0, grandparent_dir)
 
 import ruffus
 from ruffus import add_inputs, suffix, mkdir, regex, Pipeline, output_from, touch_file
-print("\tRuffus Version = ", ruffus.__version__)
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
@@ -37,7 +37,6 @@ def touch (outfile):
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "tempdir/"
 def task_originate(o):
     """
     Makes new files
@@ -79,15 +78,15 @@ def make_pipeline1(pipeline_name,   # Pipelines need to have a unique name
     #   But it can be more convenient to just pass this to the function making the pipeline
     #
     test_pipeline.originate(task_originate, starting_file_names)\
-        .follows(mkdir(tempdir), mkdir(tempdir + "testdir", tempdir + "testdir2"))\
-        .posttask(touch_file(tempdir + "testdir/whatever.txt"))
+        .follows(mkdir(tempdir), mkdir(tempdir + "/testdir", tempdir + "/testdir2"))\
+        .posttask(touch_file(tempdir + "/testdir/whatever.txt"))
     test_pipeline.transform(task_func   = task_m_to_1,
                             name        = "add_input",
                             # Lookup Task from function name task_originate()
                             #   So long as this is unique in the pipeline
                             input       = task_originate,
                             filter      = regex(r"(.*)"),
-                            add_inputs  = add_inputs(tempdir + "testdir/whatever.txt"),
+                            add_inputs  = add_inputs(tempdir + "/testdir/whatever.txt"),
                             output      = r"\1.22")
     test_pipeline.transform(task_func   = task_1_to_1,
                             name        = "22_to_33",
@@ -135,7 +134,7 @@ def make_pipeline2( pipeline_name = "pipeline2"):
                             output      = ".55")
     test_pipeline2.merge(   task_func   = task_m_to_1,
                             input       = test_pipeline2["44_to_55"],
-                            output      = tempdir + "final.output",)
+                            output      = tempdir + "/final.output",)
 
     # Set head and tail
     test_pipeline2.set_tail_tasks([test_pipeline2[task_m_to_1]])
@@ -148,8 +147,8 @@ def make_pipeline2( pipeline_name = "pipeline2"):
 def run_pipeline():
 
     #   First two pipelines are created as separate instances by the make_pipeline1 function
-    pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + ss for ss in ("a.1", "b.1")])
-    pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + ss for ss in ("c.1", "d.1")])
+    pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + "/" + ss for ss in ("a.1", "b.1")])
+    pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + "/" + ss for ss in ("c.1", "d.1")])
 
     #   The Third pipeline is a clone of pipeline1b
     pipeline1c = pipeline1b.clone(new_name = "pipeline1c")
@@ -157,7 +156,7 @@ def run_pipeline():
     #   Set the "originate" files for pipeline1c to ("e.1" and "f.1")
     #       Otherwise they would use the original ("c.1", "d.1")
     pipeline1c.set_output(output = [])
-    pipeline1c.set_output(output = [tempdir + ss for ss in ("e.1", "f.1")])
+    pipeline1c.set_output(output = [tempdir + "/" + ss for ss in ("e.1", "f.1")])
 
     #   Join all pipeline1a-c to pipeline2
     pipeline2 = make_pipeline2()
@@ -185,13 +184,13 @@ class Test_task(unittest.TestCase):
         run_pipeline()
 
         # Check that the output reflecting the pipeline topology is correct.
-        correct_output = 'tempdir/a.1.55=tempdir/a.1.44+tempdir/a.1.33+tempdir/a.1.22+tempdir/a.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/b.1.55=tempdir/b.1.44+tempdir/b.1.33+tempdir/b.1.22+tempdir/b.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/c.1.55=tempdir/c.1.44+tempdir/c.1.33+tempdir/c.1.22+tempdir/c.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/d.1.55=tempdir/d.1.44+tempdir/d.1.33+tempdir/d.1.22+tempdir/d.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/e.1.55=tempdir/e.1.44+tempdir/e.1.33+tempdir/e.1.22+tempdir/e.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/f.1.55=tempdir/f.1.44+tempdir/f.1.33+tempdir/f.1.22+tempdir/f.1=; tempdir/testdir/whatever.txt=; ; '
-        with open(tempdir + "final.output") as real_output:
+        correct_output = '{tempdir}/a.1.55={tempdir}/a.1.44+{tempdir}/a.1.33+{tempdir}/a.1.22+{tempdir}/a.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                         '{tempdir}/b.1.55={tempdir}/b.1.44+{tempdir}/b.1.33+{tempdir}/b.1.22+{tempdir}/b.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                         '{tempdir}/c.1.55={tempdir}/c.1.44+{tempdir}/c.1.33+{tempdir}/c.1.22+{tempdir}/c.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                         '{tempdir}/d.1.55={tempdir}/d.1.44+{tempdir}/d.1.33+{tempdir}/d.1.22+{tempdir}/d.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                         '{tempdir}/e.1.55={tempdir}/e.1.44+{tempdir}/e.1.33+{tempdir}/e.1.22+{tempdir}/e.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                         '{tempdir}/f.1.55={tempdir}/f.1.44+{tempdir}/f.1.33+{tempdir}/f.1.22+{tempdir}/f.1=; {tempdir}/testdir/whatever.txt=; ; '.format(tempdir = tempdir)
+        with open(tempdir + "/final.output") as real_output:
             real_output_str = real_output.read()
         self.assertEqual(correct_output, real_output_str)
 
diff --git a/ruffus/test/test_subpipeline.py b/ruffus/test/test_subpipeline_cmdline.py
similarity index 57%
copy from ruffus/test/test_subpipeline.py
copy to ruffus/test/test_subpipeline_cmdline.py
index 01a86d7..22d5afe 100755
--- a/ruffus/test/test_subpipeline.py
+++ b/ruffus/test/test_subpipeline_cmdline.py
@@ -1,12 +1,13 @@
 #!/usr/bin/env python
 from __future__ import print_function
 """
-    test_subpipeline.py
+
 
         Demonstrates the new Ruffus syntax in version 2.6
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
 import sys
 
 # add grandparent to search path for testing
@@ -15,7 +16,6 @@ sys.path.insert(0, grandparent_dir)
 
 import ruffus
 from ruffus import add_inputs, suffix, mkdir, regex, Pipeline, output_from, touch_file
-print("\tRuffus Version = ", ruffus.__version__)
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
@@ -37,7 +37,6 @@ def touch (outfile):
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "tempdir/"
 def task_originate(o):
     """
     Makes new files
@@ -79,15 +78,15 @@ def make_pipeline1(pipeline_name,   # Pipelines need to have a unique name
     #   But it can be more convenient to just pass this to the function making the pipeline
     #
     test_pipeline.originate(task_originate, starting_file_names)\
-        .follows(mkdir(tempdir), mkdir(tempdir + "testdir", tempdir + "testdir2"))\
-        .posttask(touch_file(tempdir + "testdir/whatever.txt"))
+        .follows(mkdir(tempdir), mkdir(tempdir + "/testdir", tempdir + "/testdir2"))\
+        .posttask(touch_file(tempdir + "/testdir/whatever.txt"))
     test_pipeline.transform(task_func   = task_m_to_1,
                             name        = "add_input",
                             # Lookup Task from function name task_originate()
                             #   So long as this is unique in the pipeline
                             input       = task_originate,
                             filter      = regex(r"(.*)"),
-                            add_inputs  = add_inputs(tempdir + "testdir/whatever.txt"),
+                            add_inputs  = add_inputs(tempdir + "/testdir/whatever.txt"),
                             output      = r"\1.22")
     test_pipeline.transform(task_func   = task_1_to_1,
                             name        = "22_to_33",
@@ -135,7 +134,7 @@ def make_pipeline2( pipeline_name = "pipeline2"):
                             output      = ".55")
     test_pipeline2.merge(   task_func   = task_m_to_1,
                             input       = test_pipeline2["44_to_55"],
-                            output      = tempdir + "final.output",)
+                            output      = tempdir + "/final.output",)
 
     # Set head and tail
     test_pipeline2.set_tail_tasks([test_pipeline2[task_m_to_1]])
@@ -145,57 +144,110 @@ def make_pipeline2( pipeline_name = "pipeline2"):
     return test_pipeline2
 
 
-def run_pipeline():
 
-    #   First two pipelines are created as separate instances by the make_pipeline1 function
-    pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + ss for ss in ("a.1", "b.1")])
-    pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + ss for ss in ("c.1", "d.1")])
 
-    #   The Third pipeline is a clone of pipeline1b
-    pipeline1c = pipeline1b.clone(new_name = "pipeline1c")
 
-    #   Set the "originate" files for pipeline1c to ("e.1" and "f.1")
-    #       Otherwise they would use the original ("c.1", "d.1")
-    pipeline1c.set_output(output = [])
-    pipeline1c.set_output(output = [tempdir + ss for ss in ("e.1", "f.1")])
 
-    #   Join all pipeline1a-c to pipeline2
-    pipeline2 = make_pipeline2()
-    pipeline2.set_input(input = [pipeline1a, pipeline1b, pipeline1c])
 
+#   First two pipelines are created as separate instances by the make_pipeline1 function
+pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + "/" + ss for ss in ("a.1", "b.1")])
+pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + "/" + ss for ss in ("c.1", "d.1")])
+
+#   The Third pipeline is a clone of pipeline1b
+pipeline1c = pipeline1b.clone(new_name = "pipeline1c")
+
+#   Set the "originate" files for pipeline1c to ("e.1" and "f.1")
+#       Otherwise they would use the original ("c.1", "d.1")
+pipeline1c.set_output(output = [])
+pipeline1c.set_output(output = [tempdir + "/" + ss for ss in ("e.1", "f.1")])
+
+#   Join all pipeline1a-c to pipeline2
+pipeline2 = make_pipeline2()
+pipeline2.set_input(input = [pipeline1a, pipeline1b, pipeline1c])
+
+
+import ruffus.cmdline as cmdline
+parser = cmdline.get_argparse(description='Demonstrates the new Ruffus syntax in version 2.6')
+
+parser.add_argument('--cleanup', "-C",
+                    action="store_true",
+                    help="Cleanup before and after.")
 
-    #pipeline2.printout_graph("test.svg", "svg", [task_m_to_1])
-    #pipeline2.printout(verbose = 0)
-    pipeline2.run(multiprocess = 10, verbose = 0)
 
+options = parser.parse_args()
+
+
+
+#  standard python logger which can be synchronised across concurrent Ruffus tasks
+logger, logger_mutex = cmdline.setup_logging (__file__, options.log_file, options.verbose)
+
+logger.debug("\tRuffus Version = " + ruffus.__version__)
+
+if options.cleanup:
+    try:
+        shutil.rmtree(tempdir)
+    except:
+        pass
 
-class Test_task(unittest.TestCase):
+correct = False
+# if we are not printing but running
+if  not options.just_print and \
+    not options.flowchart and \
+    not options.touch_files_only:
 
-    def tearDown (self):
-        """
-        """
+    #
+    #   Cleanup
+    #
+    if options.cleanup:
         try:
             shutil.rmtree(tempdir)
         except:
             pass
 
+    #
+    #   Run
+    #
+    cmdline.run (options)
+
+    # Check that the output reflecting the pipeline topology is correct.
+    correct_output = '{tempdir}/a.1.55={tempdir}/a.1.44+{tempdir}/a.1.33+{tempdir}/a.1.22+{tempdir}/a.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                     '{tempdir}/b.1.55={tempdir}/b.1.44+{tempdir}/b.1.33+{tempdir}/b.1.22+{tempdir}/b.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                     '{tempdir}/c.1.55={tempdir}/c.1.44+{tempdir}/c.1.33+{tempdir}/c.1.22+{tempdir}/c.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                     '{tempdir}/d.1.55={tempdir}/d.1.44+{tempdir}/d.1.33+{tempdir}/d.1.22+{tempdir}/d.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                     '{tempdir}/e.1.55={tempdir}/e.1.44+{tempdir}/e.1.33+{tempdir}/e.1.22+{tempdir}/e.1=; {tempdir}/testdir/whatever.txt=; ; ' \
+                     '{tempdir}/f.1.55={tempdir}/f.1.44+{tempdir}/f.1.33+{tempdir}/f.1.22+{tempdir}/f.1=; {tempdir}/testdir/whatever.txt=; ; '.format(tempdir = tempdir)
+    try:
+        with open(tempdir + "/final.output") as real_output:
+            real_output_str = real_output.read()
+    except Exception as e:
+        real_output_str = str(e) + "\n"
+    if (correct_output != real_output_str):
+        print ("_" * 80 + "\n" +
+               "      " * 25 + "Warning\n" +
+               "_" * 80 + "\n" +
+               "If you had run the whole of pipeline 2, "
+               "you should have obtained:<\n\t%s>\n\n Rather than:<\n\t%s>\n\n"
+                   % (correct_output.replace("; ", ";\n\t"),
+                      real_output_str.replace("; ", ";\n\t")) +
+               "_" * 80, "\n",)
+    else:
+        logger.debug("\tAll Correct.\n")
+        correct = True
 
-    def test_subpipelines (self):
+    #
+    #   Cleanup
+    #
+    if options.cleanup:
+        try:
+            shutil.rmtree(tempdir)
+        except:
+            pass
 
-        run_pipeline()
 
-        # Check that the output reflecting the pipeline topology is correct.
-        correct_output = 'tempdir/a.1.55=tempdir/a.1.44+tempdir/a.1.33+tempdir/a.1.22+tempdir/a.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/b.1.55=tempdir/b.1.44+tempdir/b.1.33+tempdir/b.1.22+tempdir/b.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/c.1.55=tempdir/c.1.44+tempdir/c.1.33+tempdir/c.1.22+tempdir/c.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/d.1.55=tempdir/d.1.44+tempdir/d.1.33+tempdir/d.1.22+tempdir/d.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/e.1.55=tempdir/e.1.44+tempdir/e.1.33+tempdir/e.1.22+tempdir/e.1=; tempdir/testdir/whatever.txt=; ; ' \
-                         'tempdir/f.1.55=tempdir/f.1.44+tempdir/f.1.33+tempdir/f.1.22+tempdir/f.1=; tempdir/testdir/whatever.txt=; ; '
-        with open(tempdir + "final.output") as real_output:
-            real_output_str = real_output.read()
-        self.assertEqual(correct_output, real_output_str)
+else:
+    cmdline.run (options)
+    correct = True
 
 
 
-if __name__ == '__main__':
-    unittest.main()
+sys.exit(0 if correct else 1)
diff --git a/ruffus/test/test_suffix_output_dir.py b/ruffus/test/test_suffix_output_dir.py
index d20b449..a211abe 100755
--- a/ruffus/test/test_suffix_output_dir.py
+++ b/ruffus/test/test_suffix_output_dir.py
@@ -7,6 +7,9 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
+data_dir  = tempdir + "/data"
+work_dir  = tempdir + "/work"
 import sys
 
 # add grandparent to search path for testing
@@ -16,22 +19,8 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
-
-
+import ruffus
+from ruffus import transform, subdivide, merge, suffix, mkdir, pipeline_run, Pipeline, originate
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@@ -54,27 +43,54 @@ import json
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
-def helper (infiles, outfiles):
-    if not isinstance(infiles, (tuple, list)):
-        infiles = [infiles]
+def helper(infiles, outfiles):
+    """
+    Helper function: Writes input output file names and input contents to outfile
+    """
+    # None = []
+    if outfiles is None:
+        outfiles = []
+    # str = [str]
     if not isinstance(outfiles, list):
         outfiles = [outfiles]
 
-    output_text = ""
-    preamble_len = 0
-    for infile in infiles:
-        if infile:
-            with open(infile) as ii:
-                for line in ii:
-                    output_text  += line
-                    preamble_len = max(preamble_len, len(line) - len(line.lstrip()))
-
-    preamble = " " * (preamble_len + 4) if len(output_text) else ""
 
     for outfile in outfiles:
-        file_output_text = preamble + json.dumps(infiles) + " -> " + json.dumps(outfiles) + "\n"
         with open(outfile, "w") as oo:
-            oo.write(output_text + file_output_text)
+            # save file name strings before we turn infiles into a list
+            fn_str = "%s -> %s" % (infiles, outfile)
+
+            # None = []
+            if infiles is None:
+                infiles = []
+            # str = [str]
+            if not isinstance(infiles, list):
+                infiles = [infiles]
+
+
+            # write header
+            #if len(infiles):
+            #    for infile in infiles:
+            #        with open(infile) as ii:
+            #            for line in ii:
+            #                line = line.rstrip()
+            #                if line[-1:] == ":":
+            #                    oo.write(outfile + ":" + line + "\n")
+            #else:
+            #    oo.write(outfile + ":\n")
+
+            max_white_space = -2
+            # write content of infiles indented
+            for infile in infiles:
+                with open(infile) as ii:
+                    for line in ii:
+                        line = line.rstrip()
+                        if line[-1:] != ":":
+                            oo.write(line + "\n")
+                        max_white_space = max([max_white_space, len(line) - len(line.lstrip())])
+
+            # add extra spaces before filenames
+            oo.write(" " * (max_white_space + 2) + fn_str + "\n")
 
 
 
@@ -88,14 +104,11 @@ def helper (infiles, outfiles):
 #
 #    task1
 #
-root_dir  = "test_suffix_output_dir"
-data_dir  = "test_suffix_output_dir/data"
-work_dir  = "test_suffix_output_dir/work"
 @mkdir(data_dir, work_dir)
 @originate([os.path.join(data_dir, "%s.1" % aa) for aa in "abcd"])
 def task1(outfile):
     """
-    First task
+    originate
     """
     # N.B. originate works with an extra parameter
     helper (None, outfile)
@@ -109,24 +122,25 @@ def task1(outfile):
         suffix(".1"),
         ".dir",
         output_dir = work_dir)
- at transform(task1, suffix(".1"), ".1", "extra.tst", 4, r"orig_dir=\1", output_dir = work_dir)
+ at transform(task1, suffix(".1"), [".1", ".bak"], "extra.tst", 4, r"orig_dir=\1", output_dir  = work_dir)
 def task2(infile, outfile, extra_str, extra_num, extra_dir):
     """
-    Second task
+    transform switch directory
     """
-    if (extra_str, extra_num) != ("extra.tst", 4) and \
-       extra_dir[:len("orig_dir=" + data_dir)] != "orig_dir=" + data_dir:
-        raise Exception("transform with output_dir has changed extras")
+    if (extra_str, extra_num) != ("extra.tst", 4) or \
+       extra_dir[:len("orig_dir=" + work_dir)] != "orig_dir=" + work_dir:
+        raise Exception("transform with output_dir has changed extras: %s, %s %s"
+                        %(extra_str, extra_num, extra_dir))
     helper (infile, outfile)
 
 
 #
 #    task3
 #
- at subdivide(task2, suffix(".1"), r"\1.*.2", [r"\1.a.2", r"\1.b.2"])
+ at subdivide(task2, suffix(".1"), r"\1.*.2", [r"\1.a.2", r"\1.b.2"], output_dir = data_dir)
 def task3(infile, ignore_outfiles, outfiles):
     """
-    Third task
+    subdivide
     """
     helper (infile, outfiles)
 
@@ -138,7 +152,7 @@ def task3(infile, ignore_outfiles, outfiles):
 @transform(task3, suffix(".2"), ".3", output_dir = work_dir)
 def task4(infile, outfile):
     """
-    Fourth task
+    transform
     """
     helper (infile, outfile)
 
@@ -148,46 +162,62 @@ def task4(infile, outfile):
 @merge(task4, os.path.join(data_dir, "summary.5"))
 def task5(infiles, outfile):
     """
-    Fifth task
+    merge
     """
     helper (infiles, outfile)
 
 
-expected_active_text = """[null] -> ["test_suffix_output_dir/data/a.1"]
-    ["test_suffix_output_dir/data/a.1"] -> ["test_suffix_output_dir/work/a.1"]
-        ["test_suffix_output_dir/work/a.1"] -> ["test_suffix_output_dir/work/a.a.2", "test_suffix_output_dir/work/a.b.2"]
-            ["test_suffix_output_dir/work/a.a.2"] -> ["test_suffix_output_dir/work/a.a.3"]
-[null] -> ["test_suffix_output_dir/data/a.1"]
-    ["test_suffix_output_dir/data/a.1"] -> ["test_suffix_output_dir/work/a.1"]
-        ["test_suffix_output_dir/work/a.1"] -> ["test_suffix_output_dir/work/a.a.2", "test_suffix_output_dir/work/a.b.2"]
-            ["test_suffix_output_dir/work/a.b.2"] -> ["test_suffix_output_dir/work/a.b.3"]
-[null] -> ["test_suffix_output_dir/data/b.1"]
-    ["test_suffix_output_dir/data/b.1"] -> ["test_suffix_output_dir/work/b.1"]
-        ["test_suffix_output_dir/work/b.1"] -> ["test_suffix_output_dir/work/b.a.2", "test_suffix_output_dir/work/b.b.2"]
-            ["test_suffix_output_dir/work/b.a.2"] -> ["test_suffix_output_dir/work/b.a.3"]
-[null] -> ["test_suffix_output_dir/data/b.1"]
-    ["test_suffix_output_dir/data/b.1"] -> ["test_suffix_output_dir/work/b.1"]
-        ["test_suffix_output_dir/work/b.1"] -> ["test_suffix_output_dir/work/b.a.2", "test_suffix_output_dir/work/b.b.2"]
-            ["test_suffix_output_dir/work/b.b.2"] -> ["test_suffix_output_dir/work/b.b.3"]
-[null] -> ["test_suffix_output_dir/data/c.1"]
-    ["test_suffix_output_dir/data/c.1"] -> ["test_suffix_output_dir/work/c.1"]
-        ["test_suffix_output_dir/work/c.1"] -> ["test_suffix_output_dir/work/c.a.2", "test_suffix_output_dir/work/c.b.2"]
-            ["test_suffix_output_dir/work/c.a.2"] -> ["test_suffix_output_dir/work/c.a.3"]
-[null] -> ["test_suffix_output_dir/data/c.1"]
-    ["test_suffix_output_dir/data/c.1"] -> ["test_suffix_output_dir/work/c.1"]
-        ["test_suffix_output_dir/work/c.1"] -> ["test_suffix_output_dir/work/c.a.2", "test_suffix_output_dir/work/c.b.2"]
-            ["test_suffix_output_dir/work/c.b.2"] -> ["test_suffix_output_dir/work/c.b.3"]
-[null] -> ["test_suffix_output_dir/data/d.1"]
-    ["test_suffix_output_dir/data/d.1"] -> ["test_suffix_output_dir/work/d.1"]
-        ["test_suffix_output_dir/work/d.1"] -> ["test_suffix_output_dir/work/d.a.2", "test_suffix_output_dir/work/d.b.2"]
-            ["test_suffix_output_dir/work/d.a.2"] -> ["test_suffix_output_dir/work/d.a.3"]
-[null] -> ["test_suffix_output_dir/data/d.1"]
-    ["test_suffix_output_dir/data/d.1"] -> ["test_suffix_output_dir/work/d.1"]
-        ["test_suffix_output_dir/work/d.1"] -> ["test_suffix_output_dir/work/d.a.2", "test_suffix_output_dir/work/d.b.2"]
-            ["test_suffix_output_dir/work/d.b.2"] -> ["test_suffix_output_dir/work/d.b.3"]
-                ["test_suffix_output_dir/work/a.a.3", "test_suffix_output_dir/work/a.b.3", "test_suffix_output_dir/work/b.a.3", "test_suffix_output_dir/work/b.b.3", "test_suffix_output_dir/work/c.a.3", "test_suffix_output_dir/work/c.b.3", "test_suffix_output_dir/work/d.a.3", "test_suffix_output_dir/work/d.b.3"] -> ["test_suffix_output_dir/data/summary.5"]
-"""
-
+expected_active_text = \
+"""None -> {tempdir}/data/a.1
+  {tempdir}/data/a.1 -> {tempdir}/work/a.1
+None -> {tempdir}/data/a.1
+  ['{tempdir}/data/a.1'] -> {tempdir}/work/a.bak
+    ['{tempdir}/work/a.1', '{tempdir}/work/a.bak'] -> {tempdir}/data/a.a.2
+      {tempdir}/data/a.a.2 -> {tempdir}/work/a.a.3
+None -> {tempdir}/data/a.1
+  {tempdir}/data/a.1 -> {tempdir}/work/a.1
+None -> {tempdir}/data/a.1
+  ['{tempdir}/data/a.1'] -> {tempdir}/work/a.bak
+    ['{tempdir}/work/a.1', '{tempdir}/work/a.bak'] -> {tempdir}/data/a.b.2
+      {tempdir}/data/a.b.2 -> {tempdir}/work/a.b.3
+None -> {tempdir}/data/b.1
+  {tempdir}/data/b.1 -> {tempdir}/work/b.1
+None -> {tempdir}/data/b.1
+  ['{tempdir}/data/b.1'] -> {tempdir}/work/b.bak
+    ['{tempdir}/work/b.1', '{tempdir}/work/b.bak'] -> {tempdir}/data/b.a.2
+      {tempdir}/data/b.a.2 -> {tempdir}/work/b.a.3
+None -> {tempdir}/data/b.1
+  {tempdir}/data/b.1 -> {tempdir}/work/b.1
+None -> {tempdir}/data/b.1
+  ['{tempdir}/data/b.1'] -> {tempdir}/work/b.bak
+    ['{tempdir}/work/b.1', '{tempdir}/work/b.bak'] -> {tempdir}/data/b.b.2
+      {tempdir}/data/b.b.2 -> {tempdir}/work/b.b.3
+None -> {tempdir}/data/c.1
+  {tempdir}/data/c.1 -> {tempdir}/work/c.1
+None -> {tempdir}/data/c.1
+  ['{tempdir}/data/c.1'] -> {tempdir}/work/c.bak
+    ['{tempdir}/work/c.1', '{tempdir}/work/c.bak'] -> {tempdir}/data/c.a.2
+      {tempdir}/data/c.a.2 -> {tempdir}/work/c.a.3
+None -> {tempdir}/data/c.1
+  {tempdir}/data/c.1 -> {tempdir}/work/c.1
+None -> {tempdir}/data/c.1
+  ['{tempdir}/data/c.1'] -> {tempdir}/work/c.bak
+    ['{tempdir}/work/c.1', '{tempdir}/work/c.bak'] -> {tempdir}/data/c.b.2
+      {tempdir}/data/c.b.2 -> {tempdir}/work/c.b.3
+None -> {tempdir}/data/d.1
+  {tempdir}/data/d.1 -> {tempdir}/work/d.1
+None -> {tempdir}/data/d.1
+  ['{tempdir}/data/d.1'] -> {tempdir}/work/d.bak
+    ['{tempdir}/work/d.1', '{tempdir}/work/d.bak'] -> {tempdir}/data/d.a.2
+      {tempdir}/data/d.a.2 -> {tempdir}/work/d.a.3
+None -> {tempdir}/data/d.1
+  {tempdir}/data/d.1 -> {tempdir}/work/d.1
+None -> {tempdir}/data/d.1
+  ['{tempdir}/data/d.1'] -> {tempdir}/work/d.bak
+    ['{tempdir}/work/d.1', '{tempdir}/work/d.bak'] -> {tempdir}/data/d.b.2
+      {tempdir}/data/d.b.2 -> {tempdir}/work/d.b.3
+        ['{tempdir}/work/a.a.3', '{tempdir}/work/a.b.3', '{tempdir}/work/b.a.3', '{tempdir}/work/b.b.3', '{tempdir}/work/c.a.3', '{tempdir}/work/c.b.3', '{tempdir}/work/d.a.3', '{tempdir}/work/d.b.3'] -> {tempdir}/data/summary.5
+""".format(tempdir = tempdir)
 
 
 
@@ -196,12 +226,12 @@ expected_active_text = """[null] -> ["test_suffix_output_dir/data/a.1"]
 class Test_ruffus(unittest.TestCase):
     def setUp(self):
         try:
-            shutil.rmtree(root_dir)
+            shutil.rmtree(tempdir)
         except:
             pass
-        for tempdir in root_dir, work_dir, data_dir:
+        for dd in tempdir, work_dir, data_dir:
             try:
-                os.makedirs(tempdir)
+                os.makedirs(dd)
             except:
                 pass
 
@@ -209,13 +239,13 @@ class Test_ruffus(unittest.TestCase):
 
     def tearDown(self):
         try:
-            shutil.rmtree(root_dir)
+            shutil.rmtree(tempdir)
         except:
-            sys.stderr.write("Can't remove %s" % root_dir)
+            sys.stderr.write("Can't remove %s" % tempdir)
             pass
 
     def test_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
 
         with open(os.path.join(data_dir, "summary.5")) as ii:
             active_text = ii.read()
@@ -238,11 +268,11 @@ class Test_ruffus(unittest.TestCase):
         test_pipeline.transform(task_func   = task2,
                                 input       = task1,
                                 filter      = suffix(".1"),
-                                output      = ".1",
+                                output      = [".1", ".bak"],
                                 extras      = ["extra.tst", 4, r"orig_dir=\1"],
                                 output_dir  = work_dir)
 
-        test_pipeline.subdivide(task3, task2, suffix(".1"), r"\1.*.2", [r"\1.a.2", r"\1.b.2"])
+        test_pipeline.subdivide(task3, task2, suffix(".1"), r"\1.*.2", [r"\1.a.2", r"\1.b.2"], output_dir = data_dir)
         test_pipeline.transform(task4, task3, suffix(".2"), ".3", output_dir = work_dir)
         test_pipeline.merge(task5, task4, os.path.join(data_dir, "summary.5"))
         test_pipeline.run(multiprocess = 50, verbose = 0)
diff --git a/ruffus/test/test_task_file_dependencies.py b/ruffus/test/test_task_file_dependencies.py
index df90f13..976a4dd 100755
--- a/ruffus/test/test_task_file_dependencies.py
+++ b/ruffus/test/test_task_file_dependencies.py
@@ -1,10 +1,5 @@
 #!/usr/bin/env python
 from __future__ import print_function
-################################################################################
-#
-#   test_task_file_dependencies.py
-#
-#################################################################################
 """
     test_task_file_dependencies.py
 """
@@ -24,13 +19,10 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
 # funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "parallel", "pipeline_run", "Pipeline", "task":
-    globals()[attr] = getattr (ruffus, attr)
-open_job_history                = ruffus.file_name_parameters.open_job_history
-CHECKSUM_HISTORY_TIMESTAMPS     = ruffus.ruffus_utility.CHECKSUM_HISTORY_TIMESTAMPS
+import ruffus
+from ruffus import parallel, pipeline_run, Pipeline, task
+from ruffus.file_name_parameters import open_job_history
+from ruffus.ruffus_utility import CHECKSUM_HISTORY_TIMESTAMPS
 
 
 import unittest
diff --git a/ruffus/test/test_task_misc.py b/ruffus/test/test_task_misc.py
index 2a7f360..cbbc5bc 100755
--- a/ruffus/test/test_task_misc.py
+++ b/ruffus/test/test_task_misc.py
@@ -17,11 +17,8 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-for attr in "task",:
-    globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import task
 
 
 
diff --git a/ruffus/test/test_transform_add_inputs.py b/ruffus/test/test_transform_add_inputs.py
index e7b9988..7fc1ad1 100755
--- a/ruffus/test/test_transform_add_inputs.py
+++ b/ruffus/test/test_transform_add_inputs.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -54,7 +55,6 @@ def touch (outfile):
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "tempdir/"
 @follows(mkdir(tempdir))
 @ruffus.files([[None, tempdir+ "a.1"], [None, tempdir+ "b.1"]])
 def task1(i, o):
@@ -107,9 +107,9 @@ class Test_task(unittest.TestCase):
 
 
     def test_task (self):
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
 
-        correct_output = "tempdir/a.1.output:tempdir/a.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;tempdir/b.1.output:tempdir/b.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;"
+        correct_output = "{tempdir}a.1.output:{tempdir}a.1,{tempdir}c.1,{tempdir}d.1,test_transform_inputs.py;{tempdir}b.1.output:{tempdir}b.1,{tempdir}c.1,{tempdir}d.1,test_transform_inputs.py;".format(tempdir = tempdir)
         with open(tempdir + "final.output") as real_output:
             real_output_str = real_output.read()
         self.assertEqual(correct_output, real_output_str)
@@ -132,7 +132,7 @@ class Test_task(unittest.TestCase):
                                 output      = tempdir + "final.output")
         test_pipeline.run(multiprocess = 10, verbose = 0)
 
-        correct_output = "tempdir/a.1.output:tempdir/a.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;tempdir/b.1.output:tempdir/b.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;"
+        correct_output = "{tempdir}a.1.output:{tempdir}a.1,{tempdir}c.1,{tempdir}d.1,test_transform_inputs.py;{tempdir}b.1.output:{tempdir}b.1,{tempdir}c.1,{tempdir}d.1,test_transform_inputs.py;".format(tempdir = tempdir)
         with open(tempdir + "final.output") as real_output:
             real_output_str = real_output.read()
         self.assertEqual(correct_output, real_output_str)
diff --git a/ruffus/test/test_transform_formatter.py b/ruffus/test/test_transform_formatter.py
new file mode 100755
index 0000000..9d0a578
--- /dev/null
+++ b/ruffus/test/test_transform_formatter.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+from __future__ import print_function
+"""
+
+    test_transform_formatter.py
+
+"""
+JOBS_PER_TASK = 5
+
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0]))
+import sys
+import re
+
+# add grandparent to search path for testing
+grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+sys.path.insert(0, grandparent_dir)
+
+# module name = script name without extension
+module_name = os.path.splitext(os.path.basename(__file__))[0]
+
+
+# funky code to import by file name
+parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+import ruffus
+from ruffus import pipeline_run, pipeline_printout, Pipeline, formatter, transform, mkdir,originate
+
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.task import t_stream_logger
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   options
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+import shutil
+import unittest
+
+
+try:
+    from StringIO import StringIO
+except:
+    from io import StringIO
+
+
+
+
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   imports
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   Main logic
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   Tasks
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+ at mkdir(tempdir)
+ at originate([os.path.join(tempdir, ff + ".tmp") for ff in "abcd"])
+def generate_initial_files(out_name):
+    with open(out_name, 'w') as outfile:
+        pass
+
+
+ at transform(input = generate_initial_files, filter=formatter(), output = "{path[0]}/{basename[0]}.task1.{whatever}",
+                        extras=['echo {dynamic_message} > {some_file}'])
+def transform_with_missing_formatter_args(input_file, output_files, output1):
+    print ("input = %r, output = %r, extras = %r" % (input_file, output_files, output1))
+
+
+class Test_ruffus(unittest.TestCase):
+    #___________________________________________________________________________
+    #
+    #   setup and cleanup
+    #___________________________________________________________________________
+    def setUp(self):
+        import os
+        try:
+            shutil.rmtree(tempdir)
+        except:
+            pass
+        os.makedirs(tempdir)
+
+    def tearDown(self):
+        shutil.rmtree(tempdir)
+
+    #___________________________________________________________________________
+    #
+    #   test product() pipeline_printout and pipeline_run
+    #___________________________________________________________________________
+    def test_transform_with_missing_formatter_args(self):
+        s = StringIO()
+        pipeline_printout(s, [transform_with_missing_formatter_args], verbose=4, wrap_width = 10000, pipeline= "main")
+        self.assertIn("Missing key = {dynamic_message}", s.getvalue())
+        pipeline_run([transform_with_missing_formatter_args], verbose=0, pipeline= "main")
+
+
+    def test_transform_with_missing_formatter_args_b(self):
+        test_pipeline = Pipeline("test")
+
+
+        test_pipeline.originate(task_func   = generate_initial_files,
+                                output      = [os.path.join(tempdir, ff + ".tmp") for ff in "abcd"])\
+            .mkdir(tempdir)
+
+
+        test_pipeline.transform(task_func   = transform_with_missing_formatter_args,
+                                input       = generate_initial_files,
+                                filter      = formatter(),
+                                output      = "{path[0]}/{basename[0]}.task1",
+                                extras      =['echo {dynamic_message} > {some_file}'])
+        s = StringIO()
+        test_pipeline.printout(s, [transform_with_missing_formatter_args], verbose=4, wrap_width = 10000, pipeline= "test")
+        self.assertIn("Missing key = {dynamic_message}", s.getvalue())
+
+        #log to stream
+        s = StringIO()
+        logger = t_stream_logger(s)
+        test_pipeline.run([transform_with_missing_formatter_args], verbose=5, pipeline= "test", logger=logger)
+        self.assertIn("Missing key = {dynamic_message}", s.getvalue())
+
+
+#
+#   Necessary to protect the "entry point" of the program under windows.
+#       see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
+#
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/ruffus/test/test_transform_inputs.py b/ruffus/test/test_transform_inputs.py
index 611873b..b59e097 100755
--- a/ruffus/test/test_transform_inputs.py
+++ b/ruffus/test/test_transform_inputs.py
@@ -9,6 +9,7 @@ from __future__ import print_function
 """
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,14 +20,9 @@ sys.path.insert(0, grandparent_dir)
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 
-# funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-
-ruffus = __import__ (ruffus_name)
-
-for attr in "follows", "mkdir", "transform", "regex", "merge", "Pipeline", "pipeline_run":
-    globals()[attr] = getattr (ruffus, attr)
+import ruffus
+from ruffus import follows, mkdir, transform, regex, merge, Pipeline, pipeline_run
 
 
 
@@ -51,7 +47,6 @@ def touch (outfile):
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-tempdir = "tempdir/"
 @follows(mkdir(tempdir))
 @ruffus.files([[None, tempdir+ "a.1"], [None, tempdir+ "b.1"]])
 def task1(i, o):
@@ -92,9 +87,9 @@ class Test_task(unittest.TestCase):
 
 
     def test_task (self):
-        pipeline_run([task4], multiprocess = 10, verbose = 0)
+        pipeline_run([task4], multiprocess = 10, verbose = 0, pipeline= "main")
 
-        correct_output = "tempdir/a.1.output:tempdir/a.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;tempdir/b.1.output:tempdir/b.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;"
+        correct_output = "{tempdir}a.1.output:test_transform_inputs.py,{tempdir}a.1,{tempdir}c.1,{tempdir}d.1;{tempdir}b.1.output:test_transform_inputs.py,{tempdir}b.1,{tempdir}c.1,{tempdir}d.1;".format(tempdir = tempdir)
         with open(tempdir + "final.output") as ff:
             real_output = ff.read()
         self.assertEqual(correct_output, real_output)
@@ -119,12 +114,14 @@ class Test_task(unittest.TestCase):
 
         test_pipeline.run([task4], multiprocess = 10, verbose = 0)
 
-        correct_output = "tempdir/a.1.output:tempdir/a.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;tempdir/b.1.output:tempdir/b.1,tempdir/c.1,tempdir/d.1,test_transform_inputs.py;"
+        correct_output = "{tempdir}a.1.output:test_transform_inputs.py,{tempdir}a.1,{tempdir}c.1,{tempdir}d.1;{tempdir}b.1.output:test_transform_inputs.py,{tempdir}b.1,{tempdir}c.1,{tempdir}d.1;".format(tempdir = tempdir)
         with open(tempdir + "final.output") as ff:
             real_output = ff.read()
         self.assertEqual(correct_output, real_output)
 
 
+
+
 if __name__ == '__main__':
         unittest.main()
 
diff --git a/ruffus/test/test_transform_with_no_re_matches.py b/ruffus/test/test_transform_with_no_re_matches.py
index ba9ad17..d1004e4 100755
--- a/ruffus/test/test_transform_with_no_re_matches.py
+++ b/ruffus/test/test_transform_with_no_re_matches.py
@@ -18,6 +18,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -27,14 +28,8 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "files", "transform", "regex", "pipeline_run", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-
+from ruffus import transform, regex, pipeline_run, Pipeline, originate, mkdir
+import ruffus
 print("    Ruffus Version = ", ruffus.__version__)
 
 
@@ -57,11 +52,10 @@ print("    Ruffus Version = ", ruffus.__version__)
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
- at files(None, "a")
-def task_1 (i, o):
-    for f in o:
-        with open(f, 'w') as oo:
-            pass
+ at mkdir(tempdir)
+ at originate(tempdir + "a")
+def task_1 (o):
+    open(o, 'w').close()
 
 @transform(task_1, regex("b"), "task_2.output")
 def task_2 (i, o):
@@ -98,28 +92,31 @@ class Test_task_mkdir(unittest.TestCase):
         """
         """
         for d in ['a']:
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir + d)
             os.unlink(fullpath)
+        os.rmdir(tempdir)
 
 
     def test_no_re_match (self):
 
         save_to_str_logger = t_save_to_str_logger()
-        pipeline_run(multiprocess = 10, logger = save_to_str_logger, verbose = 1)
+        pipeline_run(multiprocess = 10, logger = save_to_str_logger, verbose = 1, pipeline= "main")
 
-        self.assertTrue("no files names matched" in save_to_str_logger.warning_str)
+        print(save_to_str_logger.warning_str)
+        self.assertTrue("no file names matched" in save_to_str_logger.warning_str)
         print("\n    Warning printed out correctly", file=sys.stderr)
 
     def test_newstyle_no_re_match (self):
 
         test_pipeline = Pipeline("test")
-        test_pipeline.files(task_1, None, "a")
+        test_pipeline.originate(task_1, tempdir + "a").mkdir(tempdir)
         test_pipeline.transform(task_2, task_1, regex("b"), "task_2.output")
 
 
         save_to_str_logger = t_save_to_str_logger()
         test_pipeline.run(multiprocess = 10, logger = save_to_str_logger, verbose = 1)
-        self.assertTrue("no files names matched" in save_to_str_logger.warning_str)
+        print(save_to_str_logger.warning_str)
+        self.assertTrue("no file names matched" in save_to_str_logger.warning_str)
         print("\n    Warning printed out correctly", file=sys.stderr)
 
 
diff --git a/ruffus/test/test_tutorial7.py b/ruffus/test/test_tutorial7.py
index 02d660b..e1d5c7d 100755
--- a/ruffus/test/test_tutorial7.py
+++ b/ruffus/test/test_tutorial7.py
@@ -4,12 +4,12 @@ from __future__ import print_function
 
 NUMBER_OF_RANDOMS = 10000
 CHUNK_SIZE = 1000
-working_dir = "temp_tutorial7/"
 
 
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -19,13 +19,7 @@ sys.path.insert(0, grandparent_dir)
 # module name = script name without extension
 module_name = os.path.splitext(os.path.basename(__file__))[0]
 
-
-# funky code to import by file name
-parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-for attr in "follows", "split", "mkdir", "files", "transform", "suffix", "posttask", "touch_file", "merge", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
+from ruffus import follows, split, mkdir, files, transform, suffix, posttask, touch_file, merge, Pipeline
 
 import random
 import glob
@@ -35,8 +29,8 @@ import glob
 #
 #   Create random numbers
 #
- at follows(mkdir(working_dir))
- at files(None, working_dir + "random_numbers.list")
+ at follows(mkdir(tempdir))
+ at files(None, tempdir + "random_numbers.list")
 def create_random_numbers(input_file_name, output_file_name):
     f = open(output_file_name, "w")
     for i in range(NUMBER_OF_RANDOMS):
@@ -48,7 +42,7 @@ def create_random_numbers(input_file_name, output_file_name):
 #   Split initial file
 #
 @follows(create_random_numbers)
- at split(working_dir + "random_numbers.list", working_dir + "*.chunks")
+ at split(tempdir + "random_numbers.list", tempdir + "*.chunks")
 def step_4_split_numbers_into_chunks (input_file_name, output_files):
     """
         Splits random numbers file into XXX files of CHUNK_SIZE each
@@ -70,7 +64,7 @@ def step_4_split_numbers_into_chunks (input_file_name, output_files):
                 cnt_files += 1
                 if output_file:
                     output_file.close()
-                output_file = open(working_dir + "%d.chunks" % cnt_files, "w")
+                output_file = open(tempdir + "%d.chunks" % cnt_files, "w")
             output_file.write(line)
     if output_file:
         output_file.close()
@@ -105,8 +99,8 @@ def print_whoppee_again():
 #   Calculate sum and sum of squares for each chunk
 #
 @posttask(lambda: sys.stdout.write("     hooray\n"))
- at posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(working_dir, "done")))
- at merge(step_5_calculate_sum_of_squares, os.path.join(working_dir, "variance.result"))
+ at posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(tempdir, "done")))
+ at merge(step_5_calculate_sum_of_squares, os.path.join(tempdir, "variance.result"))
 def step_6_calculate_variance (input_file_names, output_file_name):
     """
     Calculate variance naively
@@ -145,20 +139,20 @@ except:
 class Test_ruffus(unittest.TestCase):
     def setUp(self):
         try:
-            shutil.rmtree(working_dir)
+            shutil.rmtree(tempdir)
         except:
             pass
 
     def tearDown(self):
         try:
-            shutil.rmtree(working_dir)
+            shutil.rmtree(tempdir)
             pass
         except:
             pass
 
     def atest_ruffus (self):
-        pipeline_run(multiprocess = 50, verbose = 0)
-        output_file = os.path.join(working_dir, "variance.result")
+        pipeline_run(multiprocess = 50, verbose = 0, pipeline= "main")
+        output_file = os.path.join(tempdir, "variance.result")
         if not os.path.exists (output_file):
             raise Exception("Missing %s" % output_file)
 
@@ -166,13 +160,13 @@ class Test_ruffus(unittest.TestCase):
     def test_newstyle_ruffus (self):
         test_pipeline = Pipeline("test")
 
-        test_pipeline.files(create_random_numbers, None, working_dir + "random_numbers.list")\
-            .follows(mkdir(working_dir))
+        test_pipeline.files(create_random_numbers, None, tempdir + "random_numbers.list")\
+            .follows(mkdir(tempdir))
 
 
         test_pipeline.split(task_func = step_4_split_numbers_into_chunks,
-                       input = working_dir + "random_numbers.list",
-                       output = working_dir + "*.chunks")\
+                       input = tempdir + "random_numbers.list",
+                       output = tempdir + "*.chunks")\
             .follows(create_random_numbers)
 
         test_pipeline.transform(task_func = step_5_calculate_sum_of_squares,
@@ -180,12 +174,12 @@ class Test_ruffus(unittest.TestCase):
                            filter = suffix(".chunks"),
                            output = ".sums")
 
-        test_pipeline.merge(task_func = step_6_calculate_variance, input = step_5_calculate_sum_of_squares, output = os.path.join(working_dir, "variance.result"))\
+        test_pipeline.merge(task_func = step_6_calculate_variance, input = step_5_calculate_sum_of_squares, output = os.path.join(tempdir, "variance.result"))\
             .posttask(lambda: sys.stdout.write("     hooray\n"))\
-            .posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(working_dir, "done")))
+            .posttask(print_hooray_again, print_whoppee_again, touch_file(os.path.join(tempdir, "done")))
 
         test_pipeline.run(multiprocess = 50, verbose = 0)
-        output_file = os.path.join(working_dir, "variance.result")
+        output_file = os.path.join(tempdir, "variance.result")
         if not os.path.exists (output_file):
             raise Exception("Missing %s" % output_file)
 
diff --git a/ruffus/test/test_unicode_filenames.py b/ruffus/test/test_unicode_filenames.py
index 73e3dd7..d6c9c7d 100755
--- a/ruffus/test/test_unicode_filenames.py
+++ b/ruffus/test/test_unicode_filenames.py
@@ -15,6 +15,7 @@ from __future__ import print_function
 
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -27,18 +28,7 @@ module_name = os.path.splitext(os.path.basename(__file__))[0]
 
 # funky code to import by file name
 parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
-ruffus_name = os.path.basename(parent_dir)
-ruffus = __import__ (ruffus_name)
-
-try:
-    attrlist = ruffus.__all__
-except AttributeError:
-    attrlist = dir (ruffus)
-for attr in attrlist:
-    if attr[0:2] != "__":
-        globals()[attr] = getattr (ruffus, attr)
-
-
+from ruffus import follows, posttask, files, suffix, mkdir, Pipeline, touch_file, originate, pipeline_run
 
 
 
@@ -53,13 +43,6 @@ for attr in attrlist:
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
 import json
-# use simplejson in place of json for python < 2.6
-#try:
-#    import json
-#except ImportError:
-#    import simplejson
-#    json = simplejson
-
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
 
 #   Main logic
@@ -85,16 +68,15 @@ if sys.hexversion >= 0x03000000:
 
 
 #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
-directories = [os.path.abspath(unicode("a")), unicode("b")]
- at follows(mkdir(directories), mkdir(unicode("c")), mkdir(unicode("d"), unicode("e")), mkdir(unicode("e")))
- at posttask(touch_file(unicode("f")))
+directories = [os.path.abspath(unicode(tempdir + "a")), unicode(tempdir + "b")]
+ at follows(mkdir(directories), mkdir(unicode(tempdir + "c")), mkdir(unicode(tempdir + "d"), unicode(tempdir + "e")), mkdir(unicode(tempdir + "e")))
+ at posttask(touch_file(unicode(tempdir + "f")))
 def task_which_makes_directories ():
     pass
 
- at files(None, ["g", "h"])
-def task_which_makes_files (i, o):
-    for f in o:
-        touch(f)
+ at originate([tempdir + "g", tempdir + "h"])
+def task_which_makes_files (o):
+        touch(o)
 
 import unittest
 
@@ -103,6 +85,7 @@ class Test_task_mkdir(unittest.TestCase):
     def setUp (self):
         """
         """
+        os.makedirs(tempdir)
         pass
 
     def tearDown (self):
@@ -110,18 +93,20 @@ class Test_task_mkdir(unittest.TestCase):
         delete directories
         """
         for d in 'abcde':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             os.rmdir(fullpath)
         for d in 'fgh':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             os.unlink(fullpath)
 
+        os.rmdir(tempdir)
+
 
     def test_mkdir (self):
-        pipeline_run(multiprocess = 10, verbose = 0)
+        pipeline_run(multiprocess = 10, verbose = 0, pipeline= "main")
 
         for d in 'abcdefgh':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             self.assertTrue(os.path.exists(fullpath))
 
 
@@ -130,17 +115,17 @@ class Test_task_mkdir(unittest.TestCase):
 
         test_pipeline.follows(task_which_makes_directories,
                          mkdir(directories),
-                         mkdir(unicode("c")),
-                         mkdir(unicode("d"),
-                               unicode("e")),
-                         mkdir(unicode("e")))\
-            .posttask(touch_file(unicode("f")))
+                         mkdir(unicode(tempdir + "c")),
+                         mkdir(unicode(tempdir + "d"),
+                               unicode(tempdir + "e")),
+                         mkdir(unicode(tempdir + "e")))\
+            .posttask(touch_file(unicode(tempdir + "f")))
 
-        test_pipeline.files(task_which_makes_files, None, ["g", "h"])
+        test_pipeline.originate(task_which_makes_files, [tempdir + "g", tempdir + "h"])
         test_pipeline.run(multiprocess = 10, verbose = 0)
 
         for d in 'abcdefgh':
-            fullpath = os.path.join(os.path.dirname(__file__), d)
+            fullpath = os.path.join(os.path.dirname(__file__), tempdir, d)
             self.assertTrue(os.path.exists(fullpath))
 
 
diff --git a/ruffus/test/test_verbosity.py b/ruffus/test/test_verbosity.py
index 55115e9..ad7f7b9 100755
--- a/ruffus/test/test_verbosity.py
+++ b/ruffus/test/test_verbosity.py
@@ -5,11 +5,11 @@ from __future__ import print_function
     test_verbosity.py
 
 """
-temp_dir = "test_verbosity/"
 
 import unittest
 
 import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
 import sys
 
 # add grandparent to search path for testing
@@ -33,12 +33,9 @@ except:
     from io import StringIO
 import re
 
-ruffus = __import__ (ruffus_name)
-for attr in "pipeline_run", "pipeline_printout", "suffix", "transform", "split", "merge", "dbdict", "follows", "mkdir", "originate", "Pipeline":
-    globals()[attr] = getattr (ruffus, attr)
-RethrownJobError =  ruffus.ruffus_exceptions.RethrownJobError
-RUFFUS_HISTORY_FILE      = ruffus.ruffus_utility.RUFFUS_HISTORY_FILE
-CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
+from ruffus import pipeline_run, pipeline_printout, suffix, transform, split, merge, dbdict, follows, mkdir, originate, Pipeline
+from ruffus.ruffus_exceptions import RethrownJobError
+from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
 
 
 
@@ -46,10 +43,10 @@ CHECKSUM_FILE_TIMESTAMPS = ruffus.ruffus_utility.CHECKSUM_FILE_TIMESTAMPS
 #---------------------------------------------------------------
 #   create initial files
 #
- at mkdir(temp_dir + 'data/scratch/lg/what/one/two/three/')
- at originate([   [temp_dir + 'data/scratch/lg/what/one/two/three/job1.a.start', temp_dir + 'job1.b.start'],
-               [temp_dir + 'data/scratch/lg/what/one/two/three/job2.a.start', temp_dir + 'job2.b.start'],
-               [temp_dir + 'data/scratch/lg/what/one/two/three/job3.a.start', temp_dir + 'job3.b.start']    ])
+ at mkdir(tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven')
+ at originate([   [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job1.a.start', tempdir + 'job1.b.start'],
+               [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job2.a.start', tempdir + 'job2.b.start'],
+               [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job3.a.start', tempdir + 'job3.b.start']    ])
 def create_initial_file_pairs(output_files):
     # create both files as necessary
     for output_file in output_files:
@@ -69,9 +66,9 @@ def second_task(input_files, output_file):
     with open(output_file, "w"): pass
 
 test_pipeline = Pipeline("test")
-test_pipeline.originate(output = [    [temp_dir + 'data/scratch/lg/what/one/two/three/job1.a.start',  temp_dir + 'job1.b.start'],
-                                       [temp_dir + 'data/scratch/lg/what/one/two/three/job2.a.start', temp_dir + 'job2.b.start'],
-                                       [temp_dir + 'data/scratch/lg/what/one/two/three/job3.a.start', temp_dir + 'job3.b.start']    ],
+test_pipeline.originate(output = [    [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job1.a.start',  tempdir + 'job1.b.start'],
+                                       [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job2.a.start', tempdir + 'job2.b.start'],
+                                       [tempdir + 'data/scratch/lg/what/one/two/three/four/five/six/seven/job3.a.start', tempdir + 'job3.b.start']    ],
                                        task_func = create_initial_file_pairs)
 test_pipeline.transform(task_func = first_task, input = create_initial_file_pairs, filter = suffix(".start"), output = ".output.1")
 test_pipeline.transform(input = first_task, filter = suffix(".output.1"), output = ".output.2", task_func= second_task)
@@ -92,10 +89,9 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 1)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 1, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 1, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            self.assertTrue(re.search('Job needs update:.*Missing files.*'
-                          '\[\.\.\./job2\.a\.start, test_verbosity/job2\.b\.start, \.\.\./job2.a.output.1\]', ret, re.DOTALL) is not None)
+            self.assertTrue("[[.../job1.a.start, test_verbosity/job1.b.start]" in ret)
 
 
     #___________________________________________________________________________
@@ -109,9 +105,9 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 2, wrap_width = 500)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 2, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 2, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            self.assertTrue('[.../three/job1.a.start, test_verbosity/job1.b.start, .../three/job1.a.output.1]' in ret)
+            self.assertTrue("[[.../seven/job1.a.start, test_verbosity/job1.b.start]" in ret)
 
 
     #___________________________________________________________________________
@@ -125,11 +121,11 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 3, wrap_width = 500)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 3, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 3, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            self.assertTrue('[.../two/three/job1.a.start, test_verbosity/job1.b.start, .../two/three/job1.a.output.1]' in s.getvalue())
+            self.assertTrue("[[.../six/seven/job1.a.start, test_verbosity/job1.b.start]" in ret)
 
-    #___________________________________________________________________________
+        #___________________________________________________________________________
     #
     #   test_printout_abbreviated_path9
     #___________________________________________________________________________
@@ -140,9 +136,9 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 9, wrap_width = 500)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 9, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 9, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            self.assertTrue('[%sdata/scratch/lg/what/one/two/three/job2.a.start, test_verbosity/job2.b.start,' % temp_dir in ret)
+            self.assertTrue("[[.../what/one/two/three/four/five/six/seven/job1.a.start, test_verbosity/job1.b.start]" in ret)
 
 
     #___________________________________________________________________________
@@ -156,12 +152,10 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 0, wrap_width = 500)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 0, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = 0, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            path_str = os.path.abspath('%sdata/scratch/lg/what/one/two/three/job2.a.start'  % temp_dir)
+            path_str = os.path.abspath('%sdata/scratch/lg/what/one/two/three/four/five/six/seven/job2.a.start'  % tempdir)
             path_str = '[[%s' % path_str
-            self.assertTrue(path_str in ret)
-        self.assertTrue(temp_dir + 'job2.b.start]' in ret)
 
 
 
@@ -176,9 +170,10 @@ class Test_verbosity(unittest.TestCase):
             if syntax == oop_syntax:
                 test_pipeline.printout(s, [second_task], verbose = 5, verbose_abbreviated_path = -60, wrap_width = 500)
             else:
-                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = -60, wrap_width = 500)
+                pipeline_printout(s, [second_task], verbose = 5, verbose_abbreviated_path = -60, wrap_width = 500, pipeline= "main")
             ret = s.getvalue()
-            self.assertTrue('[<???> ne/two/three/job2.a.start, test_verbosity/job2.b.start]' in ret)
+
+            self.assertTrue('[<???> ve/six/seven/job1.a.start, test_verbosity/job1.b.start]' in ret)
 
 
 #
diff --git a/ruffus/test/test_with_logger.py b/ruffus/test/test_with_logger.py
new file mode 100755
index 0000000..62c4e6f
--- /dev/null
+++ b/ruffus/test/test_with_logger.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+from __future__ import print_function
+"""
+
+    test_with_logger.py
+
+"""
+
+
+import os
+tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
+input_file_names = [os.path.join(tempdir, "%d.1"  % fn) for fn in range(20)]
+final_file_name = os.path.join(tempdir, "final.result")
+try:
+    os.makedirs(tempdir)
+except:
+    pass
+
+
+import sys
+
+# add grandparent to search path for testing
+grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+sys.path.insert(0, grandparent_dir)
+
+# module name = script name without extension
+module_name = os.path.splitext(os.path.basename(__file__))[0]
+
+
+# funky code to import by file name
+import ruffus
+from ruffus import originate, transform, suffix, merge, pipeline_run, Pipeline
+from ruffus.proxy_logger import make_shared_logger_and_proxy, setup_std_shared_logger
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   imports
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+import unittest
+import re
+import logging
+import sys
+import os
+import json
+import shutil
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+#   Tasks
+
+
+#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
+
+def write_input_output_filenames_to_output(infiles, outfile, logger_proxy, logging_mutex):
+    """
+    Helper function: Writes input output file names and input contents to outfile
+    """
+    with open(outfile, "w") as oo:
+        # save file name strings before we turn infiles into a list
+        fn_str = "%s -> %s" % (infiles, outfile)
+
+        # None = []
+        if infiles is None:
+            infiles = []
+        # str = [str]
+        if not isinstance(infiles, list):
+            infiles = [infiles]
+
+        max_white_space = -2
+        # write content of infiles indented
+        for infile in infiles:
+            with open(infile) as ii:
+                for line in ii:
+                    oo.write(line)
+                    max_white_space = max([max_white_space, len(line) - len(line.lstrip())])
+        # add extra spaces before filenames
+        oo.write(" " * (max_white_space + 2) + fn_str + "\n")
+
+    with logging_mutex:
+        logger_proxy.info(fn_str)
+
+
+
+#
+#   Make logger
+#
+#import logging
+args=dict()
+args["file_name"] = os.path.join(tempdir, module_name + ".log")
+args["level"] = logging.DEBUG
+args["rotating"] = True
+args["maxBytes"]=20000
+args["backupCount"]=10
+args["formatter"]="%(asctime)s - %(name)s - %(levelname)6s - %(message)s"
+
+if sys.version_info[0] == 3 and sys.version_info[1] == 2 and __name__ != "__main__":
+    print (
+"""
+    888888888888888888888888888888888888888888888888888888888888888888888888888
+
+        ERROR:
+
+    This unit test can not be run as a python module (python -m unittest xxx)
+    due to the interaction of bugs / misfeatures in the multiprocessing module
+    and python3.2
+
+        See http://bugs.python.org/issue15914
+            http://bugs.python.org/issue9573
+
+    In detail:
+
+    Making a shared logger calls code within the multiprocessing module.
+    This in turn tries to import the hmac module inside deliver_challenge().
+    This hangs if it happens after a module fork.
+
+    The only way around this is to only make calls to multiprocessing
+    (i.e. make_shared_logger_and_proxy(...)) after the import phase of
+    module loading.
+
+    This python bug will be triggered if your make_shared_logger_and_proxy()
+    call is at global scope in a module (i.e. not __main__) and only for
+    python version 3.2
+
+    888888888888888888888888888888888888888888888888888888888888888888888888888
+
+""")
+    sys.exit()
+
+(logger_proxy,
+ logging_mutex) = make_shared_logger_and_proxy (setup_std_shared_logger,
+                                                "my_logger", args)
+
+
+
+#
+#    task1
+#
+ at originate(input_file_names, logger_proxy, logging_mutex)
+def task1(outfile, logger_proxy, logging_mutex):
+    write_input_output_filenames_to_output(None, outfile, logger_proxy, logging_mutex)
+
+
+
+#
+#    task2
+#
+ at transform(task1, suffix(".1"), ".2", logger_proxy, logging_mutex)
+def task2(infile, outfile, logger_proxy, logging_mutex):
+    write_input_output_filenames_to_output(infile, outfile, logger_proxy, logging_mutex)
+
+
+
+#
+#    task3
+#
+ at transform(task2, suffix(".2"), ".3", logger_proxy, logging_mutex)
+def task3(infile, outfile, logger_proxy, logging_mutex):
+    """
+    Third task
+    """
+    write_input_output_filenames_to_output(infile, outfile, logger_proxy, logging_mutex)
+
+
+
+#
+#    task4
+#
+ at merge(task3, final_file_name, logger_proxy, logging_mutex)
+def task4(infile, outfile, logger_proxy, logging_mutex):
+    """
+    Fourth task
+    """
+    write_input_output_filenames_to_output(infile, outfile, logger_proxy, logging_mutex)
+
+
+
+
+
+class Test_ruffus(unittest.TestCase):
+    def setUp(self):
+        self.tearDown()
+        try:
+            os.makedirs(tempdir)
+            #sys.stderr.write("    Created %s\n" % tempdir)
+        except:
+            pass
+
+    def tearDown(self):
+        try:
+            shutil.rmtree(tempdir)
+            #sys.stderr.write("    Removed %s\n" % tempdir)
+            pass
+        except:
+            pass
+
+    def test_simpler (self):
+        pipeline_run(multiprocess = 500, verbose = 0, pipeline= "main")
+
+    def test_newstyle_simpler (self):
+        test_pipeline = Pipeline("test")
+        test_pipeline.originate(task1, input_file_names, extras = [logger_proxy, logging_mutex])
+        test_pipeline.transform(task2, task1, suffix(".1"), ".2", extras = [logger_proxy, logging_mutex])
+        test_pipeline.transform(task3, task2, suffix(".2"), ".3", extras = [logger_proxy, logging_mutex])
+        test_pipeline.merge(task4, task3, final_file_name, extras = [logger_proxy, logging_mutex])
+        #test_pipeline.merge(task4, task3, final_file_name, extras = {"logger_proxy": logger_proxy, "logging_mutex": logging_mutex})
+        test_pipeline.run(multiprocess = 500, verbose = 0)
+
+
+
+if __name__ == '__main__':
+    unittest.main()
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-ruffus.git



More information about the debian-med-commit mailing list