[med-svn] [snakemake] 01/04: Imported Upstream version 3.4.2
Kevin Murray
daube-guest at moszumanska.debian.org
Tue Sep 15 13:58:10 UTC 2015
This is an automated email from the git hooks/post-receive script.
daube-guest pushed a commit to branch master
in repository snakemake.
commit d977f15fd9f03f56e40a8df53b36cffc56768801
Author: Kevin Murray <spam at kdmurray.id.au>
Date: Tue Sep 15 23:48:25 2015 +1000
Imported Upstream version 3.4.2
---
.gitignore | 3 +-
conda/build.sh | 12 --
conda/snakemake/bld.bat | 8 -
conda/snakemake/build.sh | 9 -
conda/snakemake/meta.yaml | 22 ---
docker/Dockerfile | 7 -
setup.py | 24 ++-
snakemake/__init__.py | 213 ++++++++++++---------
snakemake/exceptions.py | 13 +-
snakemake/executors.py | 211 +++++++++++---------
snakemake/io.py | 43 ++++-
snakemake/jobs.py | 2 +-
snakemake/logging.py | 5 +
snakemake/persistence.py | 7 +-
snakemake/report.css | 25 ++-
snakemake/report.py | 17 +-
snakemake/scheduler.py | 4 +-
snakemake/shell.py | 2 +
snakemake/utils.py | 24 +++
snakemake/version.py | 2 +-
snakemake/workflow.py | 15 +-
tests/test_benchmark/Snakefile | 4 +-
.../{test.benchmark.json => test.benchmark.txt} | 0
tests/test_report/Snakefile | 11 +-
tests/test_symlink_temp/Snakefile | 22 +++
.../test.benchmark.json => test_symlink_temp/a} | 0
.../test.benchmark.json => test_symlink_temp/b} | 0
.../expected-results/.gitignore} | 0
tests/test_update_config/Snakefile | 14 ++
tests/test_update_config/cp.rule | 16 ++
tests/test_update_config/echo.rule | 7 +
tests/test_update_config/echo.yaml | 2 +
.../expected-results/config.yaml | 4 +
tests/test_update_config/test.yaml | 4 +
tests/tests.py | 16 +-
35 files changed, 480 insertions(+), 288 deletions(-)
diff --git a/.gitignore b/.gitignore
index 7e505fc..95876b8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,4 +5,5 @@ build/
dist/
*.egg-info/
*.egg
-.snakemake*
\ No newline at end of file
+.eggs/
+.snakemake*
diff --git a/conda/build.sh b/conda/build.sh
deleted file mode 100644
index 2ff397f..0000000
--- a/conda/build.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-
-conda build snakemake
-conda convert ~/miniconda3/conda-bld/linux-64/snakemake-*.tar.bz2 -p all
-echo Due to a bug in conda convert, the subdir in info/index.json is not updated (to e.g. win-64).
-echo This has to be done manually in the tarball.
-exit 0
-# TODO reactivate this once conda convert has been fixed.
-for p in */snakemake-*.tar.bz2
-do
- binstar upload $p
-done
diff --git a/conda/snakemake/bld.bat b/conda/snakemake/bld.bat
deleted file mode 100644
index 87b1481..0000000
--- a/conda/snakemake/bld.bat
+++ /dev/null
@@ -1,8 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
-
-:: Add more build steps here, if they are necessary.
-
-:: See
-:: http://docs.continuum.io/conda/build.html
-:: for a list of environment variables that are set during the build process.
diff --git a/conda/snakemake/build.sh b/conda/snakemake/build.sh
deleted file mode 100644
index 4d7fc03..0000000
--- a/conda/snakemake/build.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
-
-# Add more build steps here, if they are necessary.
-
-# See
-# http://docs.continuum.io/conda/build.html
-# for a list of environment variables that are set during the build process.
diff --git a/conda/snakemake/meta.yaml b/conda/snakemake/meta.yaml
deleted file mode 100644
index 9e75120..0000000
--- a/conda/snakemake/meta.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-package:
- name: snakemake
- version: "3.4"
-source:
- fn: snakemake-3.3.tar.gz
- url: https://pypi.python.org/packages/source/s/snakemake/snakemake-3.3.tar.gz
- md5: 92b9166e43cb1ee26bedfec0013b57de
-build:
- entry_points:
- - snakemake = snakemake:main
- - snakemake-bash-completion = snakemake:bash_completion
-requirements:
- build:
- - python >=3.2
- - setuptools
- run:
- - python >=3.2
- - docutils
-about:
- home: https://bitbucket.org/johanneskoester/snakemake
- license: MIT License
- summary: 'Build systems like GNU Make are frequently used to create complicated workflows, e.g. in bioinformatics. This project aims to reduce the complexity of creating workflows by providing a clean and modern domain specific language (DSL) in python style, together with a fast and comfortable execution environment.'
diff --git a/docker/Dockerfile b/docker/Dockerfile
deleted file mode 100644
index 9792c7a..0000000
--- a/docker/Dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-# a docker image based on Ubuntu with snakemake installed
-FROM ubuntu:14.04
-MAINTAINER Johannes Köster <johannes.koester at tu-dortmund.de>
-RUN apt-get -qq update
-RUN apt-get install -qqy python3-setuptools python3-docutils python3-flask
-RUN easy_install3 snakemake
-ENTRYPOINT ["snakemake"]
diff --git a/setup.py b/setup.py
index 900c0fc..dfea1dd 100644
--- a/setup.py
+++ b/setup.py
@@ -3,12 +3,16 @@ __copyright__ = "Copyright 2015, Johannes Köster"
__email__ = "koester at jimmy.harvard.edu"
__license__ = "MIT"
+
+from setuptools.command.test import test as TestCommand
import sys
-if sys.version_info < (3, 2):
- print("At least Python 3.2 is required.\n", file=sys.stderr)
+
+if sys.version_info < (3, 3):
+ print("At least Python 3.3 is required.\n", file=sys.stderr)
exit(1)
+
try:
from setuptools import setup
except ImportError:
@@ -16,9 +20,23 @@ except ImportError:
file=sys.stderr)
exit(1)
+
# load version info
exec(open("snakemake/version.py").read())
+
+class NoseTestCommand(TestCommand):
+ def finalize_options(self):
+ TestCommand.finalize_options(self)
+ self.test_args = []
+ self.test_suite = True
+
+ def run_tests(self):
+ # Run nose ensuring that argv simulates running nosetests directly
+ import nose
+ nose.run_exit(argv=['nosetests'])
+
+
setup(
name='snakemake',
version=__version__,
@@ -40,6 +58,8 @@ setup(
"snakemake-bash-completion = snakemake:bash_completion"]
},
package_data={'': ['*.css', '*.sh', '*.html']},
+ tests_require=['nose>=1.3'],
+ cmdclass={'test': NoseTestCommand},
classifiers=
["Development Status :: 5 - Production/Stable", "Environment :: Console",
"Intended Audience :: Science/Research",
diff --git a/snakemake/__init__.py b/snakemake/__init__.py
index c068e14..b7225f4 100644
--- a/snakemake/__init__.py
+++ b/snakemake/__init__.py
@@ -30,6 +30,7 @@ def snakemake(snakefile,
list_target_rules=False,
cores=1,
nodes=1,
+ local_cores=1,
resources=dict(),
config=dict(),
configfile=None,
@@ -84,6 +85,7 @@ def snakemake(snakefile,
jobscript=None,
timestamp=False,
greediness=None,
+ no_hooks=False,
overwrite_shellcmd=None,
updated_files=None,
log_handler=None,
@@ -99,6 +101,7 @@ def snakemake(snakefile,
list_target_rules (bool): list target rules (default False)
cores (int): the number of provided cores (ignored when using cluster support) (default 1)
nodes (int): the number of provided cluster nodes (ignored without cluster support) (default 1)
+ local_cores (int): the number of provided local cores if in cluster mode (ignored without cluster support) (default 1)
resources (dict): provided resources, a dictionary assigning integers to resource names, e.g. {gpu=1, io=5} (default {})
config (dict): override values for workflow config
workdir (str): path to working directory (default None)
@@ -236,12 +239,14 @@ def snakemake(snakefile,
return False
snakefile = os.path.abspath(snakefile)
- cluster_mode = (cluster is not None) + (cluster_sync is not None) + (drmaa is not None)
+ cluster_mode = (cluster is not None) + (cluster_sync is not
+ None) + (drmaa is not None)
if cluster_mode > 1:
logger.error("Error: cluster and drmaa args are mutually exclusive")
return False
if debug and (cores > 1 or cluster_mode):
- logger.error("Error: debug mode cannot be used with more than one core or cluster execution.")
+ logger.error(
+ "Error: debug mode cannot be used with more than one core or cluster execution.")
return False
overwrite_config = dict()
@@ -296,6 +301,7 @@ def snakemake(snakefile,
subsnakemake = partial(snakemake,
cores=cores,
nodes=nodes,
+ local_cores=local_cores,
resources=resources,
dryrun=dryrun,
touch=touch,
@@ -326,6 +332,7 @@ def snakemake(snakefile,
jobscript=jobscript,
timestamp=timestamp,
greediness=greediness,
+ no_hooks=no_hooks,
overwrite_shellcmd=overwrite_shellcmd,
config=config,
config_args=config_args,
@@ -336,6 +343,7 @@ def snakemake(snakefile,
touch=touch,
cores=cores,
nodes=nodes,
+ local_cores=local_cores,
forcetargets=forcetargets,
forceall=forceall,
forcerun=forcerun,
@@ -376,13 +384,13 @@ def snakemake(snakefile,
subsnakemake=subsnakemake,
updated_files=updated_files,
allowed_rules=allowed_rules,
- greediness=greediness)
+ greediness=greediness,
+ no_hooks=no_hooks)
- # BrokenPipeError is not present in Python 3.2, so lets wait until everbody uses > 3.2
- #except BrokenPipeError:
- # ignore this exception and stop. It occurs if snakemake output is piped into less and less quits before reading the whole output.
- # in such a case, snakemake shall stop scheduling and quit with error 1
- # success = False
+ except BrokenPipeError:
+ # ignore this exception and stop. It occurs if snakemake output is piped into less and less quits before reading the whole output.
+ # in such a case, snakemake shall stop scheduling and quit with error 1
+ success = False
except (Exception, BaseException) as ex:
print_exception(ex, workflow.linemaps)
success = False
@@ -470,7 +478,6 @@ def get_argument_parser():
parser.add_argument(
"--cores", "--jobs", "-j",
action="store",
- default=1,
const=multiprocessing.cpu_count(),
nargs="?",
metavar="N",
@@ -479,6 +486,16 @@ def get_argument_parser():
"If N is omitted, the limit is set to the number of "
"available cores."))
parser.add_argument(
+ "--local-cores",
+ action="store",
+ default=multiprocessing.cpu_count(),
+ metavar="N",
+ type=int,
+ help=
+ ("In cluster mode, use at most N cores of the host machine in parallel "
+ " (default: number of CPU cores of the host). The cores are used to execute "
+ "local rules. This option is ignored when not in cluster mode."))
+ parser.add_argument(
"--resources", "--res",
nargs="*",
metavar="NAME=INT",
@@ -625,9 +642,10 @@ def get_argument_parser():
cluster_group.add_argument(
"--cluster-sync",
metavar="CMD",
- help=("cluster submission command will block, returning the remote exit"
- "status upon remote termination (for example, this should be used"
- "if the cluster command is 'qsub -sync y' (SGE)")),
+ help=
+ ("cluster submission command will block, returning the remote exit"
+ "status upon remote termination (for example, this should be used"
+ "if the cluster command is 'qsub -sync y' (SGE)")),
cluster_group.add_argument(
"--drmaa",
nargs="?",
@@ -655,14 +673,13 @@ def get_argument_parser():
parser.add_argument(
"--immediate-submit", "--is",
action="store_true",
- help=
- "Immediately submit all jobs to the cluster instead of waiting "
- "for present input files. This will fail, unless you make "
- "the cluster aware of job dependencies, e.g. via:\n"
- "$ snakemake --cluster 'sbatch --dependency {dependencies}.\n"
- "Assuming that your submit script (here sbatch) outputs the "
- "generated job id to the first stdout line, {dependencies} will "
- "be filled with space separated job ids this job depends on.")
+ help="Immediately submit all jobs to the cluster instead of waiting "
+ "for present input files. This will fail, unless you make "
+ "the cluster aware of job dependencies, e.g. via:\n"
+ "$ snakemake --cluster 'sbatch --dependency {dependencies}.\n"
+ "Assuming that your submit script (here sbatch) outputs the "
+ "generated job id to the first stdout line, {dependencies} will "
+ "be filled with space separated job ids this job depends on.")
parser.add_argument(
"--jobscript", "--js",
metavar="SCRIPT",
@@ -673,8 +690,7 @@ def get_argument_parser():
"--jobname", "--jn",
default="snakejob.{rulename}.{jobid}.sh",
metavar="NAME",
- help=
- "Provide a custom name for the jobscript that is submitted to the "
+ help="Provide a custom name for the jobscript that is submitted to the "
"cluster (see --cluster). NAME is \"snakejob.{rulename}.{jobid}.sh\" "
"per default. The wildcard {jobid} has to be present in the name.")
parser.add_argument("--reason", "-r",
@@ -783,19 +799,21 @@ def get_argument_parser():
"--greediness",
type=float,
default=None,
- help=
- "Set the greediness of scheduling. This value between 0 and 1 "
+ help="Set the greediness of scheduling. This value between 0 and 1 "
"determines how careful jobs are selected for execution. The default "
"value (1.0) provides the best speed and still acceptable scheduling "
"quality.")
parser.add_argument(
+ "--no-hooks",
+ action="store_true",
+ help="Do not invoke onsuccess or onerror hooks after execution.")
+ parser.add_argument(
"--print-compilation",
action="store_true",
help="Print the python representation of the workflow.")
parser.add_argument(
"--overwrite-shellcmd",
- help=
- "Provide a shell command that shall be executed instead of those "
+ help="Provide a shell command that shall be executed instead of those "
"given in the workflow. "
"This is for debugging purposes only.")
parser.add_argument("--verbose",
@@ -803,8 +821,7 @@ def get_argument_parser():
help="Print debugging output.")
parser.add_argument("--debug",
action="store_true",
- help=
- "Allow to debug rules with e.g. PDB. This flag "
+ help="Allow to debug rules with e.g. PDB. This flag "
"allows to set breakpoints in run blocks.")
parser.add_argument(
"--profile",
@@ -815,8 +832,7 @@ def get_argument_parser():
parser.add_argument(
"--bash-completion",
action="store_true",
- help=
- "Output code to register bash completion for snakemake. Put the "
+ help="Output code to register bash completion for snakemake. Put the "
"following in your .bashrc (including the accents): "
"`snakemake --bash-completion` or issue it in an open terminal "
"session.")
@@ -845,13 +861,23 @@ def main():
parser.print_help()
sys.exit(1)
+ if (args.cluster or args.cluster_sync or args.drmaa):
+ if args.cores is None:
+ if args.dryrun:
+ args.cores = 1
+ else:
+ print(
+ "Error: you need to specify the maximum number of jobs to "
+ "be queued or executed at the same time with --jobs.",
+ file=sys.stderr)
+ sys.exit(1)
+ elif args.cores is None:
+ args.cores = 1
+
if args.profile:
import yappi
yappi.start()
- _snakemake = partial(snakemake, args.snakefile,
- snakemakepath=snakemakepath)
-
if args.gui is not None:
try:
import snakemake.gui as gui
@@ -862,6 +888,9 @@ def main():
sys.exit(1)
_logging.getLogger("werkzeug").setLevel(_logging.ERROR)
+
+ _snakemake = partial(snakemake, os.path.abspath(args.snakefile),
+ snakemakepath=snakemakepath)
gui.register(_snakemake, args)
url = "http://127.0.0.1:{}".format(args.gui)
print("Listening on {}.".format(url), file=sys.stderr)
@@ -882,64 +911,66 @@ def main():
# silently close
pass
else:
- success = _snakemake(listrules=args.list,
- list_target_rules=args.list_target_rules,
- cores=args.cores,
- nodes=args.cores,
- resources=resources,
- config=config,
- configfile=args.configfile,
- config_args=args.config,
- workdir=args.directory,
- targets=args.target,
- dryrun=args.dryrun,
- printshellcmds=args.printshellcmds,
- printreason=args.reason,
- printdag=args.dag,
- printrulegraph=args.rulegraph,
- printd3dag=args.d3dag,
- touch=args.touch,
- forcetargets=args.force,
- forceall=args.forceall,
- forcerun=args.forcerun,
- prioritytargets=args.prioritize,
- stats=args.stats,
- nocolor=args.nocolor,
- quiet=args.quiet,
- keepgoing=args.keep_going,
- cluster=args.cluster,
- cluster_config=args.cluster_config,
- cluster_sync=args.cluster_sync,
- drmaa=args.drmaa,
- jobname=args.jobname,
- immediate_submit=args.immediate_submit,
- standalone=True,
- ignore_ambiguity=args.allow_ambiguity,
- snakemakepath=snakemakepath,
- lock=not args.nolock,
- unlock=args.unlock,
- cleanup_metadata=args.cleanup_metadata,
- force_incomplete=args.rerun_incomplete,
- ignore_incomplete=args.ignore_incomplete,
- list_version_changes=args.list_version_changes,
- list_code_changes=args.list_code_changes,
- list_input_changes=args.list_input_changes,
- list_params_changes=args.list_params_changes,
- summary=args.summary,
- detailed_summary=args.detailed_summary,
- print_compilation=args.print_compilation,
- verbose=args.verbose,
- debug=args.debug,
- jobscript=args.jobscript,
- notemp=args.notemp,
- timestamp=args.timestamp,
- greediness=args.greediness,
- overwrite_shellcmd=args.overwrite_shellcmd,
- latency_wait=args.latency_wait,
- benchmark_repeats=args.benchmark_repeats,
- wait_for_files=args.wait_for_files,
- keep_target_files=args.keep_target_files,
- allowed_rules=args.allowed_rules)
+ success = snakemake(args.snakefile,
+ listrules=args.list,
+ list_target_rules=args.list_target_rules,
+ cores=args.cores,
+ nodes=args.cores,
+ resources=resources,
+ config=config,
+ configfile=args.configfile,
+ config_args=args.config,
+ workdir=args.directory,
+ targets=args.target,
+ dryrun=args.dryrun,
+ printshellcmds=args.printshellcmds,
+ printreason=args.reason,
+ printdag=args.dag,
+ printrulegraph=args.rulegraph,
+ printd3dag=args.d3dag,
+ touch=args.touch,
+ forcetargets=args.force,
+ forceall=args.forceall,
+ forcerun=args.forcerun,
+ prioritytargets=args.prioritize,
+ stats=args.stats,
+ nocolor=args.nocolor,
+ quiet=args.quiet,
+ keepgoing=args.keep_going,
+ cluster=args.cluster,
+ cluster_config=args.cluster_config,
+ cluster_sync=args.cluster_sync,
+ drmaa=args.drmaa,
+ jobname=args.jobname,
+ immediate_submit=args.immediate_submit,
+ standalone=True,
+ ignore_ambiguity=args.allow_ambiguity,
+ snakemakepath=snakemakepath,
+ lock=not args.nolock,
+ unlock=args.unlock,
+ cleanup_metadata=args.cleanup_metadata,
+ force_incomplete=args.rerun_incomplete,
+ ignore_incomplete=args.ignore_incomplete,
+ list_version_changes=args.list_version_changes,
+ list_code_changes=args.list_code_changes,
+ list_input_changes=args.list_input_changes,
+ list_params_changes=args.list_params_changes,
+ summary=args.summary,
+ detailed_summary=args.detailed_summary,
+ print_compilation=args.print_compilation,
+ verbose=args.verbose,
+ debug=args.debug,
+ jobscript=args.jobscript,
+ notemp=args.notemp,
+ timestamp=args.timestamp,
+ greediness=args.greediness,
+ no_hooks=args.no_hooks,
+ overwrite_shellcmd=args.overwrite_shellcmd,
+ latency_wait=args.latency_wait,
+ benchmark_repeats=args.benchmark_repeats,
+ wait_for_files=args.wait_for_files,
+ keep_target_files=args.keep_target_files,
+ allowed_rules=args.allowed_rules)
if args.profile:
with open(args.profile, "w") as out:
diff --git a/snakemake/exceptions.py b/snakemake/exceptions.py
index d606c99..0c547b3 100644
--- a/snakemake/exceptions.py
+++ b/snakemake/exceptions.py
@@ -55,7 +55,7 @@ def format_traceback(tb, linemaps):
yield ' File "{}", line {}, in {}'.format(file, lineno, function)
-def print_exception(ex, linemaps, print_traceback=True):
+def print_exception(ex, linemaps):
"""
Print an error message for a given exception.
@@ -64,12 +64,13 @@ def print_exception(ex, linemaps, print_traceback=True):
linemaps -- a dict of a dict that maps for each snakefile
the compiled lines to source code lines in the snakefile.
"""
- #traceback.print_exception(type(ex), ex, ex.__traceback__)
+ tb = "Full " + "".join(traceback.format_exception(type(ex), ex, ex.__traceback__))
+ logger.debug(tb)
if isinstance(ex, SyntaxError) or isinstance(ex, IndentationError):
logger.error(format_error(ex, ex.lineno,
linemaps=linemaps,
snakefile=ex.filename,
- show_traceback=print_traceback))
+ show_traceback=True))
return
origin = get_exception_origin(ex, linemaps)
if origin is not None:
@@ -77,7 +78,7 @@ def print_exception(ex, linemaps, print_traceback=True):
logger.error(format_error(ex, lineno,
linemaps=linemaps,
snakefile=file,
- show_traceback=print_traceback))
+ show_traceback=True))
return
elif isinstance(ex, TokenError):
logger.error(format_error(ex, None, show_traceback=False))
@@ -92,12 +93,12 @@ def print_exception(ex, linemaps, print_traceback=True):
logger.error(format_error(e, e.lineno,
linemaps=linemaps,
snakefile=e.filename,
- show_traceback=print_traceback))
+ show_traceback=True))
elif isinstance(ex, WorkflowError):
logger.error(format_error(ex, ex.lineno,
linemaps=linemaps,
snakefile=ex.snakefile,
- show_traceback=print_traceback))
+ show_traceback=True))
elif isinstance(ex, KeyboardInterrupt):
logger.info("Cancelling snakemake on user request.")
else:
diff --git a/snakemake/executors.py b/snakemake/executors.py
index d04fd31..e3ce9c5 100644
--- a/snakemake/executors.py
+++ b/snakemake/executors.py
@@ -1,4 +1,5 @@
-__authors__ = ["Johannes Köster", "David Alexander"]
+__author__ = "Johannes Köster"
+__contributors__ = ["David Alexander"]
__copyright__ = "Copyright 2015, Johannes Köster"
__email__ = "koester at jimmy.harvard.edu"
__license__ = "MIT"
@@ -19,6 +20,7 @@ import subprocess
import signal
from functools import partial
from itertools import chain
+from collections import namedtuple
from snakemake.jobs import Job
from snakemake.shell import shell
@@ -201,6 +203,7 @@ class CPUExecutor(RealExecutor):
self.pool = (concurrent.futures.ThreadPoolExecutor(max_workers=workers)
if threads else ProcessPoolExecutor(max_workers=workers))
+ self.threadpool = concurrent.futures.ThreadPoolExecutor(max_workers=workers)
def run(self, job,
callback=None,
@@ -213,11 +216,13 @@ class CPUExecutor(RealExecutor):
if job.benchmark is not None:
benchmark = str(job.benchmark)
- future = self.pool.submit(
+ pool = self.threadpool if job.shellcmd is not None else self.pool
+ future = pool.submit(
run_wrapper, job.rule.run_func, job.input.plainstrings(),
job.output.plainstrings(), job.params, job.wildcards, job.threads,
job.resources, job.log.plainstrings(), job.rule.version, benchmark,
self.benchmark_repeats, self.workflow.linemaps, self.workflow.debug)
+
future.add_done_callback(partial(self._callback, job, callback,
error_callback))
@@ -257,7 +262,7 @@ class ClusterExecutor(RealExecutor):
printshellcmds=False,
latency_wait=3,
benchmark_repeats=1,
- cluster_config=None, ):
+ cluster_config=None):
super().__init__(workflow, dag,
printreason=printreason,
quiet=quiet,
@@ -289,7 +294,7 @@ class ClusterExecutor(RealExecutor):
'--wait-for-files {job.input} --latency-wait {latency_wait} '
'--benchmark-repeats {benchmark_repeats} '
'{overwrite_workdir} {overwrite_config} --nocolor '
- '--notemp --quiet --nolock {target}')
+ '--notemp --quiet --no-hooks --nolock {target}')
if printshellcmds:
self.exec_job += " --printshellcmds "
@@ -298,14 +303,21 @@ class ClusterExecutor(RealExecutor):
# disable restiction to target rule in case of dynamic rules!
self.exec_job += " --allowed-rules {job.rule.name} "
self.jobname = jobname
- self.threads = []
self._tmpdir = None
self.cores = cores if cores else ""
self.cluster_config = cluster_config if cluster_config else dict()
+ self.active_jobs = list()
+ self.lock = threading.Lock()
+ self.wait = True
+ self.wait_thread = threading.Thread(target=self._wait_for_jobs)
+ self.wait_thread.daemon = True
+ self.wait_thread.start()
+
def shutdown(self):
- for thread in self.threads:
- thread.join()
+ with self.lock:
+ self.wait = False
+ self.wait_thread.join()
shutil.rmtree(self.tmpdir)
def cancel(self):
@@ -374,6 +386,9 @@ class ClusterExecutor(RealExecutor):
return Wildcards(fromdict=cluster)
+GenericClusterJob = namedtuple("GenericClusterJob", "job callback error_callback jobscript jobfinished jobfailed")
+
+
class GenericClusterExecutor(ClusterExecutor):
def __init__(self, workflow, dag, cores,
submitcmd="qsub",
@@ -391,7 +406,7 @@ class GenericClusterExecutor(ClusterExecutor):
printshellcmds=printshellcmds,
latency_wait=latency_wait,
benchmark_repeats=benchmark_repeats,
- cluster_config=cluster_config, )
+ cluster_config=cluster_config)
self.submitcmd = submitcmd
self.external_jobid = dict()
self.exec_job += ' && touch "{jobfinished}" || touch "{jobfailed}"'
@@ -440,36 +455,39 @@ class GenericClusterExecutor(ClusterExecutor):
logger.debug("Submitted job {} with external jobid {}.".format(
jobid, ext_jobid))
- thread = threading.Thread(target=self._wait_for_job,
- args=(job, callback, error_callback,
- jobscript, jobfinished, jobfailed))
- thread.daemon = True
- thread.start()
- self.threads.append(thread)
-
submit_callback(job)
+ with self.lock:
+ self.active_jobs.append(GenericClusterJob(job, callback, error_callback, jobscript, jobfinished, jobfailed))
- def _wait_for_job(self, job, callback, error_callback, jobscript,
- jobfinished, jobfailed):
+ def _wait_for_jobs(self):
while True:
- if os.path.exists(jobfinished):
- os.remove(jobfinished)
- os.remove(jobscript)
- self.finish_job(job)
- callback(job)
- return
- if os.path.exists(jobfailed):
- os.remove(jobfailed)
- os.remove(jobscript)
- self.print_job_error(job)
- print_exception(ClusterJobException(job, self.dag.jobid(job),
- self.get_jobscript(job)),
- self.workflow.linemaps)
- error_callback(job)
- return
+ with self.lock:
+ if not self.wait:
+ return
+ active_jobs = self.active_jobs
+ self.active_jobs = list()
+ for active_job in active_jobs:
+ if os.path.exists(active_job.jobfinished):
+ os.remove(active_job.jobfinished)
+ os.remove(active_job.jobscript)
+ self.finish_job(active_job.job)
+ active_job.callback(active_job.job)
+ elif os.path.exists(active_job.jobfailed):
+ os.remove(active_job.jobfailed)
+ os.remove(active_job.jobscript)
+ self.print_job_error(active_job.job)
+ print_exception(ClusterJobException(active_job.job, self.dag.jobid(active_job.job),
+ active_job.jobscript),
+ self.workflow.linemaps)
+ active_job.error_callback(active_job.job)
+ else:
+ self.active_jobs.append(active_job)
time.sleep(1)
+SynchronousClusterJob = namedtuple("SynchronousClusterJob", "job callback error_callback jobscript process")
+
+
class SynchronousClusterExecutor(ClusterExecutor):
"""
invocations like "qsub -sync y" (SGE) or "bsub -K" (LSF) are
@@ -522,31 +540,42 @@ class SynchronousClusterExecutor(ClusterExecutor):
except AttributeError as e:
raise WorkflowError(str(e), rule=job.rule)
- thread = threading.Thread(
- target=self._submit_job,
- args=(job, callback, error_callback, submitcmd, jobscript))
- thread.daemon = True
- thread.start()
- self.threads.append(thread)
+ process = subprocess.Popen('{submitcmd} "{jobscript}"'.format(submitcmd=submitcmd,
+ jobscript=jobscript), shell=True)
submit_callback(job)
- def _submit_job(self, job, callback, error_callback, submitcmd, jobscript):
- try:
- ext_jobid = subprocess.check_output(
- '{submitcmd} "{jobscript}"'.format(submitcmd=submitcmd,
- jobscript=jobscript),
- shell=True).decode().split("\n")
- os.remove(jobscript)
- self.finish_job(job)
- callback(job)
+ with self.lock:
+ self.active_jobs.append(SynchronousClusterJob(job, callback, error_callback, jobscript, process))
+
+ def _wait_for_jobs(self):
+ while True:
+ with self.lock:
+ if not self.wait:
+ return
+ active_jobs = self.active_jobs
+ self.active_jobs = list()
+ for active_job in active_jobs:
+ exitcode = active_job.process.poll()
+ if exitcode is None:
+ # job not yet finished
+ self.active_jobs.append(active_job)
+ elif exitcode == 0:
+ # job finished successfully
+ os.remove(active_job.jobscript)
+ self.finish_job(active_job.job)
+ active_job.callback(active_job.job)
+ else:
+ # job failed
+ os.remove(active_job.jobscript)
+ self.print_job_error(active_job.job)
+ print_exception(ClusterJobException(active_job.job, self.dag.jobid(active_job.job),
+ jobscript),
+ self.workflow.linemaps)
+ active_job.error_callback(active_job.job)
+ time.sleep(1)
- except subprocess.CalledProcessError as ex:
- os.remove(jobscript)
- self.print_job_error(job)
- print_exception(ClusterJobException(job, self.dag.jobid(job),
- self.get_jobscript(job)),
- self.workflow.linemaps)
- error_callback(job)
+
+DRMAAClusterJob = namedtuple("DRMAAClusterJob", "job jobid callback error_callback jobscript")
class DRMAAExecutor(ClusterExecutor):
@@ -618,40 +647,49 @@ class DRMAAExecutor(ClusterExecutor):
self.submitted.append(jobid)
self.session.deleteJobTemplate(jt)
- thread = threading.Thread(
- target=self._wait_for_job,
- args=(job, jobid, callback, error_callback, jobscript))
- thread.daemon = True
- thread.start()
- self.threads.append(thread)
-
submit_callback(job)
+ with self.lock:
+ self.active_jobs.append(DRMAAClusterJob(job, jobid, callback, error_callback, jobscript))
+
def shutdown(self):
super().shutdown()
self.session.exit()
- def _wait_for_job(self, job, jobid, callback, error_callback, jobscript):
+ def _wait_for_jobs(self):
import drmaa
- try:
- retval = self.session.wait(jobid,
- drmaa.Session.TIMEOUT_WAIT_FOREVER)
- except drmaa.errors.InternalException as e:
- print_exception(WorkflowError("DRMAA Error: {}".format(e)),
+ while True:
+ with self.lock:
+ if not self.wait:
+ return
+ active_jobs = self.active_jobs
+ self.active_jobs = list()
+ for active_job in active_jobs:
+ try:
+ retval = self.session.wait(active_job.jobid,
+ drmaa.Session.TIMEOUT_NO_WAIT)
+ except drmaa.errors.InternalException as e:
+ print_exception(WorkflowError("DRMAA Error: {}".format(e)),
+ self.workflow.linemaps)
+ os.remove(active_job.jobscript)
+ active_job.error_callback(active_job.job)
+ continue
+ except drmaa.errors.ExitTimeoutException as e:
+ # job still active
+ self.active_jobs.append(active_job)
+ continue
+ # job exited
+ os.remove(active_job.jobscript)
+ if retval.hasExited and retval.exitStatus == 0:
+ self.finish_job(active_job.job)
+ active_job.callback(active_job.job)
+ else:
+ self.print_job_error(active_job.job)
+ print_exception(
+ ClusterJobException(active_job.job, self.dag.jobid(active_job.job), active_job.jobscript),
self.workflow.linemaps)
- os.remove(jobscript)
- error_callback(job)
- return
- os.remove(jobscript)
- if retval.hasExited and retval.exitStatus == 0:
- self.finish_job(job)
- callback(job)
- else:
- self.print_job_error(job)
- print_exception(
- ClusterJobException(job, self.dag.jobid(job), jobscript),
- self.workflow.linemaps)
- error_callback(job)
+ active_job.error_callback(active_job.job)
+ time.sleep(1)
def run_wrapper(run, input, output, params, wildcards, threads, resources, log,
@@ -696,15 +734,8 @@ def run_wrapper(run, input, output, params, wildcards, threads, resources, log,
if benchmark is not None:
try:
with open(benchmark, "w") as f:
- json.dump({
- name: {
- "s": times,
- "h:m:s": [str(datetime.timedelta(seconds=t))
- for t in times]
- }
- for name, times in zip("wall_clock_times".split(),
- [wallclock])
- }, f,
- indent=4)
+ print("s", "h:m:s", sep="\t", file=f)
+ for t in wallclock:
+ print(t, str(datetime.timedelta(seconds=t)), sep="\t", file=f)
except (Exception, BaseException) as ex:
raise WorkflowError(ex)
diff --git a/snakemake/io.py b/snakemake/io.py
index f6bd974..0ba9cbd 100644
--- a/snakemake/io.py
+++ b/snakemake/io.py
@@ -14,6 +14,18 @@ from snakemake.exceptions import MissingOutputException, WorkflowError, Wildcard
from snakemake.logging import logger
+def lstat(f):
+ return os.stat(f, follow_symlinks=os.stat not in os.supports_follow_symlinks)
+
+
+def lutime(f, times):
+ return os.utime(f, times, follow_symlinks=os.utime not in os.supports_follow_symlinks)
+
+
+def lchmod(f, mode):
+ return os.chmod(f, mode, follow_symlinks=os.chmod not in os.supports_follow_symlinks)
+
+
def IOFile(file, rule=None):
f = _IOFile(file)
f.rule = rule
@@ -53,7 +65,19 @@ class _IOFile(str):
@property
def mtime(self):
- return os.stat(self.file).st_mtime
+ # do not follow symlinks for modification time
+ return lstat(self.file).st_mtime
+
+ @property
+ def size(self):
+ # follow symlinks but throw error if invalid
+ self.check_broken_symlink()
+ return os.path.getsize(self.file)
+
+ def check_broken_symlink(self):
+ """ Raise WorkflowError if file is a broken symlink. """
+ if not self.exists and lstat(self.file):
+ raise WorkflowError("File {} seems to be a broken symlink.".format(self.file))
def is_newer(self, time):
return self.mtime > time
@@ -70,23 +94,23 @@ class _IOFile(str):
raise e
def protect(self):
- mode = (os.stat(self.file).st_mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~
+ mode = (lstat(self.file).st_mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~
stat.S_IWOTH)
if os.path.isdir(self.file):
for root, dirs, files in os.walk(self.file):
for d in dirs:
- os.chmod(os.path.join(self.file, d), mode)
+ lchmod(os.path.join(self.file, d), mode)
for f in files:
- os.chmod(os.path.join(self.file, f), mode)
+ lchmod(os.path.join(self.file, f), mode)
else:
- os.chmod(self.file, mode)
+ lchmod(self.file, mode)
def remove(self):
remove(self.file)
def touch(self):
try:
- os.utime(self.file, None)
+ lutime(self.file, None)
except OSError as e:
if e.errno == 2:
raise MissingOutputException(
@@ -432,11 +456,10 @@ class Namedlist(list):
name -- a name
index -- the item index
"""
+ self._names[name] = (index, end)
if end is None:
- self._names[name] = (index, index + 1)
setattr(self, name, self[index])
else:
- self._names[name] = (index, end)
setattr(self, name, Namedlist(toclone=self[index:end]))
def get_names(self):
@@ -463,8 +486,10 @@ class Namedlist(list):
def allitems(self):
next = 0
for name, index in sorted(self._names.items(),
- key=lambda item: item[1]):
+ key=lambda item: item[1][0]):
start, end = index
+ if end is None:
+ end = start + 1
if start > next:
for item in self[next:start]:
yield None, item
diff --git a/snakemake/jobs.py b/snakemake/jobs.py
index 287c4de..fdba8b5 100644
--- a/snakemake/jobs.py
+++ b/snakemake/jobs.py
@@ -90,7 +90,7 @@ class Job:
Input files need to be present.
"""
if self._inputsize is None:
- self._inputsize = sum(map(os.path.getsize, self.input))
+ self._inputsize = sum(f.size for f in self.input)
return self._inputsize
@property
diff --git a/snakemake/logging.py b/snakemake/logging.py
index e069bfd..c404913 100644
--- a/snakemake/logging.py
+++ b/snakemake/logging.py
@@ -113,6 +113,9 @@ class Logger:
def info(self, msg):
self.handler(dict(level="info", msg=msg))
+ def warning(self, msg):
+ self.handler(dict(level="warning", msg=msg))
+
def debug(self, msg):
self.handler(dict(level="debug", msg=msg))
@@ -187,6 +190,8 @@ class Logger:
level = msg["level"]
if level == "info":
self.logger.warning(msg["msg"])
+ if level == "warning":
+ self.logger.warning(msg["msg"])
elif level == "error":
self.logger.error(msg["msg"])
elif level == "debug":
diff --git a/snakemake/persistence.py b/snakemake/persistence.py
index 7ccb9d8..eba80c0 100644
--- a/snakemake/persistence.py
+++ b/snakemake/persistence.py
@@ -115,7 +115,8 @@ class Persistence:
self._record(self._incomplete_path, "", f)
def finished(self, job):
- version = str(job.rule.version) if job.rule.version is not None else None
+ version = str(
+ job.rule.version) if job.rule.version is not None else None
code = self._code(job.rule)
input = self._input(job)
params = self._params(job)
@@ -273,7 +274,9 @@ class Persistence:
return
def _record_path(self, subject, id):
- max_len = os.pathconf(subject, "PC_NAME_MAX")
+ max_len = os.pathconf(
+ subject,
+ "PC_NAME_MAX") if os.name == "posix" else 255 # maximum NTFS and FAT32 filename length
b64id = self._b64id(id)
# split into chunks of proper length
b64id = [b64id[i:i + max_len - 1]
diff --git a/snakemake/report.css b/snakemake/report.css
index 105c791..cab9554 100644
--- a/snakemake/report.css
+++ b/snakemake/report.css
@@ -35,19 +35,34 @@ h6 {
}
div#attachments {
+ display: inline-block;
color: gray;
- padding: 0px;
- border: 1px solid white;
+ border-width: 1px;
+ border-style: solid;
+ border-color: white;
border-radius: 4px 4px 4px 4px;
- padding-top: 20px;
+ padding: 0px;
+}
+
+div#attachments dt {
+ margin-top: 2px;
+ margin-bottom: 2px;
+}
+
+div#attachments dd p {
+ margin-top: 2px;
+ margin-bottom: 2px;
+}
+
+div#attachments :target dt {
+ font-weight: bold;
}
div#attachments :target a {
color: rgb(70, 136, 71);
- border: 1px solid rgb(221, 221, 221);
- border-radius: 4px 4px 4px 4px;
}
+
h1.title {
text-align: center;
font-size: 180%;
diff --git a/snakemake/report.py b/snakemake/report.py
index 01e7561..3fd9b2b 100644
--- a/snakemake/report.py
+++ b/snakemake/report.py
@@ -102,16 +102,23 @@ def report(text, path,
:name: attachments
""")]
- for name, file in sorted(files.items()):
- data = data_uri(file)
+ for name, _files in sorted(files.items()):
+ if not isinstance(_files, list):
+ _files = [_files]
+ links = []
+ for file in _files:
+ data = data_uri(file)
+ links.append(':raw-html:`<a href="{data}" download="{filename}" draggable="true">{filename}</a>`'.format(
+ data=data, filename=os.path.basename(file)))
+ links = "\n\n ".join(links)
attachments.append('''
.. container::
:name: {name}
- [{name}] :raw-html:`<a href="{data}" download="{filename}" draggable="true">{filename}</a>`
+ {name}:
+ {links}
'''.format(name=name,
- filename=os.path.basename(file),
- data=data))
+ links=links))
text = definitions + text + "\n\n" + "\n\n".join(attachments) + metadata
diff --git a/snakemake/scheduler.py b/snakemake/scheduler.py
index abfdd52..a3258b8 100644
--- a/snakemake/scheduler.py
+++ b/snakemake/scheduler.py
@@ -27,6 +27,7 @@ _ERROR_MSG_FINAL = ("Exiting because a job execution failed. "
class JobScheduler:
def __init__(self, workflow, dag, cores,
+ local_cores=1,
dryrun=False,
touch=False,
cluster=None,
@@ -92,9 +93,8 @@ class JobScheduler:
printshellcmds=printshellcmds,
latency_wait=latency_wait)
elif cluster or cluster_sync or (drmaa is not None):
- # TODO properly set cores
workers = min(sum(1 for _ in dag.local_needrun_jobs),
- multiprocessing.cpu_count())
+ local_cores)
self._local_executor = CPUExecutor(
workflow, dag, workers,
printreason=printreason,
diff --git a/snakemake/shell.py b/snakemake/shell.py
index 95bc729..83e73c5 100644
--- a/snakemake/shell.py
+++ b/snakemake/shell.py
@@ -26,6 +26,8 @@ class shell:
@classmethod
def executable(cls, cmd):
+ if os.path.split(cmd)[-1] == "bash":
+ cls._process_prefix = "set -o pipefail; "
cls._process_args["executable"] = cmd
@classmethod
diff --git a/snakemake/utils.py b/snakemake/utils.py
index 0908a8d..6730d95 100644
--- a/snakemake/utils.py
+++ b/snakemake/utils.py
@@ -1,4 +1,5 @@
__author__ = "Johannes Köster"
+__contributors__ = ["Per Unneberg"]
__copyright__ = "Copyright 2015, Johannes Köster"
__email__ = "koester at jimmy.harvard.edu"
__license__ = "MIT"
@@ -9,6 +10,7 @@ import re
import inspect
import textwrap
from itertools import chain
+from collections import Mapping
from snakemake.io import regex, Namedlist
from snakemake.logging import logger
@@ -218,3 +220,25 @@ def min_version(version):
snakemake.__version__) < pkg_resources.parse_version(version):
raise WorkflowError(
"Expecting Snakemake version {} or higher.".format(version))
+
+
+def update_config(config, overwrite_config):
+ """Recursively update dictionary config with overwrite_config.
+
+ See
+ http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
+ for details.
+
+ Args:
+ config (dict): dictionary to update
+ overwrite_config (dict): dictionary whose items will overwrite those in config
+
+ """
+ def _update(d, u):
+ for (key, value) in u.items():
+ if (isinstance(value, Mapping)):
+ d[key]= _update(d.get(key, {}), value)
+ else:
+ d[key] = value
+ return d
+ _update(config, overwrite_config)
diff --git a/snakemake/version.py b/snakemake/version.py
index 31b8f46..46aa803 100644
--- a/snakemake/version.py
+++ b/snakemake/version.py
@@ -1 +1 @@
-__version__ = "3.4"
+__version__ = "3.4.2"
diff --git a/snakemake/workflow.py b/snakemake/workflow.py
index 4e98568..b035bc3 100644
--- a/snakemake/workflow.py
+++ b/snakemake/workflow.py
@@ -25,6 +25,7 @@ from snakemake.parser import parse
import snakemake.io
from snakemake.io import protected, temp, temporary, expand, dynamic, glob_wildcards, flag, not_iterable, touch
from snakemake.persistence import Persistence
+from snakemake.utils import update_config
class Workflow:
@@ -159,6 +160,7 @@ class Workflow:
touch=False,
cores=1,
nodes=1,
+ local_cores=1,
forcetargets=False,
forceall=False,
forcerun=None,
@@ -199,7 +201,8 @@ class Workflow:
updated_files=None,
keep_target_files=False,
allowed_rules=None,
- greediness=1.0):
+ greediness=1.0,
+ no_hooks=False):
self.global_resources = dict() if resources is None else resources
self.global_resources["_cores"] = cores
@@ -384,6 +387,7 @@ class Workflow:
return True
scheduler = JobScheduler(self, dag, cores,
+ local_cores=local_cores,
dryrun=dryrun,
touch=touch,
cluster=cluster,
@@ -432,11 +436,11 @@ class Workflow:
logger.run_info("\n".join(dag.stats()))
elif stats:
scheduler.stats.to_json(stats)
- if not dryrun:
+ if not dryrun and not no_hooks:
self._onsuccess(logger.get_logfile())
return True
else:
- if not dryrun:
+ if not dryrun and not no_hooks:
self._onerror(logger.get_logfile())
return False
@@ -500,9 +504,8 @@ class Workflow:
""" Update the global config with the given dictionary. """
global config
c = snakemake.io.load_configfile(jsonpath)
- for key, val in c.items():
- if key not in self.overwrite_config:
- config[key] = val
+ update_config(config, c)
+ update_config(config, self.overwrite_config)
def ruleorder(self, *rulenames):
self._ruleorder.add(*rulenames)
diff --git a/tests/test_benchmark/Snakefile b/tests/test_benchmark/Snakefile
index 90a4b63..4393786 100644
--- a/tests/test_benchmark/Snakefile
+++ b/tests/test_benchmark/Snakefile
@@ -2,10 +2,10 @@
rule all:
input:
- "test.benchmark.json"
+ "test.benchmark.txt"
rule:
benchmark:
- "{v}.benchmark.json"
+ "{v}.benchmark.txt"
shell:
"sleep 1"
diff --git a/tests/test_benchmark/expected-results/test.benchmark.json b/tests/test_benchmark/expected-results/test.benchmark.txt
similarity index 100%
copy from tests/test_benchmark/expected-results/test.benchmark.json
copy to tests/test_benchmark/expected-results/test.benchmark.txt
diff --git a/tests/test_report/Snakefile b/tests/test_report/Snakefile
index 8587d45..67a51f8 100644
--- a/tests/test_report/Snakefile
+++ b/tests/test_report/Snakefile
@@ -2,7 +2,8 @@
from snakemake.utils import report
rule report:
- input: "Snakefile", fig1="fig.png", fig2="fig2.png"
+ input:
+ F1=["fig.png", "fig2.png"]
output: "report.html"
run:
report("""
@@ -13,19 +14,19 @@ rule report:
Here is an embedded image:
- .. embeddedimage:: {input.fig1}
+ .. embeddedimage:: {input.F1[0]}
:width: 200px
Here is an example embedded figure:
- .. embeddedfigure:: {input.fig2}
+ .. embeddedfigure:: {input.F1[1]}
Figure title goes here
Descriptive figure legend goes here
- Embedded data F1_ and F2_.
+ Embedded data F1_.
- """, output[0], F1=input[0], F2=input[0])
+ """, output[0], **input)
diff --git a/tests/test_symlink_temp/Snakefile b/tests/test_symlink_temp/Snakefile
new file mode 100644
index 0000000..697dcc4
--- /dev/null
+++ b/tests/test_symlink_temp/Snakefile
@@ -0,0 +1,22 @@
+rule all:
+ input: 'a.out', 'b.out'
+
+rule one: #the first rule in the work-flow
+ input: '{sample}'
+ output: temp('1.{sample}')
+ shell: 'touch {output}'
+
+rule two: #this rule simply creates symbol link, following rule one
+ input: '1.{sample}'
+ output: temp('2.{sample}')
+ shell: 'ln -s {input} {output}'
+
+rule three: #this rule simply creates symbol link, following rule two
+ input: '2.{sample}'
+ output: temp('3.{sample}')
+ shell: 'ln -s {input} {output}'
+
+rule four: #this rule creates the final output, following rule three
+ input: '3.{sample}'
+ output: '{sample}.out'
+ shell: 'touch {output}'
diff --git a/tests/test_benchmark/expected-results/test.benchmark.json b/tests/test_symlink_temp/a
similarity index 100%
copy from tests/test_benchmark/expected-results/test.benchmark.json
copy to tests/test_symlink_temp/a
diff --git a/tests/test_benchmark/expected-results/test.benchmark.json b/tests/test_symlink_temp/b
similarity index 100%
copy from tests/test_benchmark/expected-results/test.benchmark.json
copy to tests/test_symlink_temp/b
diff --git a/tests/test_benchmark/expected-results/test.benchmark.json b/tests/test_symlink_temp/expected-results/.gitignore
similarity index 100%
rename from tests/test_benchmark/expected-results/test.benchmark.json
rename to tests/test_symlink_temp/expected-results/.gitignore
diff --git a/tests/test_update_config/Snakefile b/tests/test_update_config/Snakefile
new file mode 100644
index 0000000..9dc5b77
--- /dev/null
+++ b/tests/test_update_config/Snakefile
@@ -0,0 +1,14 @@
+import yaml
+
+include: "echo.rule"
+include: "cp.rule"
+
+configfile: "test.yaml"
+
+rule all:
+ input: "test.out.copy"
+ output: yaml = "config.yaml"
+ run:
+ with open(str(output.yaml), "w") as fh:
+ fh.write(yaml.dump(config, default_flow_style=False))
+
diff --git a/tests/test_update_config/cp.rule b/tests/test_update_config/cp.rule
new file mode 100644
index 0000000..9b52be5
--- /dev/null
+++ b/tests/test_update_config/cp.rule
@@ -0,0 +1,16 @@
+# -*- snakemake -*-
+from snakemake.utils import update_config
+
+c = {
+ 'cp': {
+ 'options': '-f',
+ },
+ }
+
+update_config(config, c)
+
+rule cp:
+ params: options = config['cp']['options']
+ input: '{prefix}.out'
+ output: temporary('{prefix}.out.copy')
+ shell: 'cp {params.options} {input} {output}'
diff --git a/tests/test_update_config/echo.rule b/tests/test_update_config/echo.rule
new file mode 100644
index 0000000..31aabd5
--- /dev/null
+++ b/tests/test_update_config/echo.rule
@@ -0,0 +1,7 @@
+# -*- snakemake -*-
+configfile: 'echo.yaml'
+
+rule echo:
+ params: options = config['echo']['options']
+ output: temporary('{prefix}.out')
+ shell: 'echo "{params.options}" > {output}'
diff --git a/tests/test_update_config/echo.yaml b/tests/test_update_config/echo.yaml
new file mode 100644
index 0000000..d0e1211
--- /dev/null
+++ b/tests/test_update_config/echo.yaml
@@ -0,0 +1,2 @@
+echo:
+ options: ''
diff --git a/tests/test_update_config/expected-results/config.yaml b/tests/test_update_config/expected-results/config.yaml
new file mode 100644
index 0000000..6bd1e17
--- /dev/null
+++ b/tests/test_update_config/expected-results/config.yaml
@@ -0,0 +1,4 @@
+cp:
+ options: -u
+echo:
+ options: echo
diff --git a/tests/test_update_config/test.yaml b/tests/test_update_config/test.yaml
new file mode 100644
index 0000000..6bd1e17
--- /dev/null
+++ b/tests/test_update_config/test.yaml
@@ -0,0 +1,4 @@
+cp:
+ options: -u
+echo:
+ options: echo
diff --git a/tests/tests.py b/tests/tests.py
index 5cc9be6..37dd180 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -1,4 +1,4 @@
-__author__ = "Tobias Marschall, Marcel Martin, Johannes Köster"
+__authors__ = ["Tobias Marschall", "Marcel Martin", "Johannes Köster"]
__copyright__ = "Copyright 2015, Johannes Köster"
__email__ = "koester at jimmy.harvard.edu"
__license__ = "MIT"
@@ -18,7 +18,7 @@ from snakemake import snakemake
def dpath(path):
"""get path to a data file (relative to the directory this
test lives in)"""
- return join(os.path.dirname(__file__), path)
+ return os.path.realpath(join(os.path.dirname(__file__), path))
SCRIPTPATH = dpath("../bin/snakemake")
@@ -230,6 +230,10 @@ def test_config():
run(dpath("test_config"))
+def test_update_config():
+ run(dpath("test_update_config"))
+
+
def test_benchmark():
run(dpath("test_benchmark"), check_md5=False)
@@ -266,3 +270,11 @@ def test_cluster_sync():
run(dpath("test14"),
snakefile="Snakefile.nonstandard",
cluster_sync="./qsub")
+
+def test_symlink_temp():
+ run(dpath("test_symlink_temp"), shouldfail=True)
+
+
+if __name__ == '__main__':
+ import nose
+ nose.run(defaultTest=__name__)
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/snakemake.git
More information about the debian-med-commit
mailing list