[med-svn] [python-pbcommand] 01/03: Imported Upstream version 0.2.17
Afif Elghraoui
afif-guest at moszumanska.debian.org
Sat Nov 28 00:56:18 UTC 2015
This is an automated email from the git hooks/post-receive script.
afif-guest pushed a commit to branch master
in repository python-pbcommand.
commit a2a4df74662af1b2b33287522f93ca907967307c
Author: Afif Elghraoui <afif at ghraoui.name>
Date: Fri Nov 27 16:49:07 2015 -0800
Imported Upstream version 0.2.17
---
Makefile | 4 ++++
README.md | 35 +++++++++++++++++++++--------------
REQUIREMENTS_TEST.txt | 3 ++-
circle.yml | 1 +
extract-readme-snippets.py | 27 +++++++++++++++++++++++++++
pbcommand/__init__.py | 2 +-
pbcommand/cli/quick.py | 1 +
pbcommand/models/common.py | 15 +++++++++++----
pbcommand/models/report.py | 2 +-
9 files changed, 69 insertions(+), 21 deletions(-)
diff --git a/Makefile b/Makefile
index 399490b..3acdc74 100644
--- a/Makefile
+++ b/Makefile
@@ -40,3 +40,7 @@ run-auto-pep8:
build-java-classes:
avro-tools compile schema pbcommand/schemas java-classes/
+
+extract-readme-snippets:
+ rm -rf readme-snippet-*.py
+ pandoc -t markdown README.md | pandoc --filter ./extract-readme-snippets.py
diff --git a/README.md b/README.md
index c370780..2dca82e 100644
--- a/README.md
+++ b/README.md
@@ -35,9 +35,8 @@ registry = registry_builder("pbcommand", "python -m pbcommand.cli.examples.dev_q
def _example_main(input_files, output_files, **kwargs):
- log.info("Running example main with {i} {o} kw:{k}".format(i=input_files,
- o=output_files, k=kwargs))
- # write mock output files, otherwise the End-to-End test will fail
+ # Simple Function that should imported from your library code
+ # write mock output files for testing purposes, otherwise the End-to-End test will fail
xs = output_files if isinstance(output_files, (list, tuple)) else [output_files]
for x in xs:
with open(x, 'w') as writer:
@@ -92,6 +91,7 @@ Define a function that will add inputs, outputs and options to your parser.
```python
from pbcommand.models import FileTypes
+
def add_args_and_options(p):
# FileType, label, name, description
p.add_input_file_type(FileTypes.FASTA, "fasta_in", "Fasta File", "PacBio Spec'ed fasta file")
@@ -106,8 +106,13 @@ def add_args_and_options(p):
Define Parser
```python
-from pbcommand.models import TaskTypes, ResourceTypes, SymbolTypes
+from pbcommand.models import TaskTypes, SymbolTypes, get_pbparser
+
+
def get_contract_parser():
+ tool_id = "example_namespace.tasks.my_id"
+ version = "0.1.0" # or reuse __version__
+ display_name = "My Example Tool"
# Number of processors to use, can also be SymbolTypes.MAX_NPROC
nproc = 1
# Log file, tmp dir, tmp file. See ResourceTypes in models, ResourceTypes.TMP_DIR
@@ -115,13 +120,12 @@ def get_contract_parser():
# Commandline exe to call "{exe}" /path/to/resolved-tool-contract.json
driver_exe = "python -m pbcommand.cli.example.dev_app --resolved-tool-contract "
desc = "Dev app for Testing that supports emitting tool contracts"
- task_type = TaskTypes.LOCAL
- # TaskTypes.DISTRIBUTED if you want your task to be submitted to the cluster manager (e.g., SGE) if
+ is_distributed = False
+ # set to True if you want your task to be submitted to the cluster manager (e.g., SGE) if
# one is provided to the workflow engine.
- p = get_pbparser(TOOL_ID, __version__, desc, driver_exe, task_type, nproc, resource_types)
+ p = get_pbparser(tool_id, version, display_name, desc, driver_exe, is_distributed=is_distributed, nproc=nproc, resource_types=resource_types)
add_args_and_options(p)
return p
-
```
@@ -131,13 +135,16 @@ Define a Wrapping layer to call your main from both the tool contract and raw ar
def _args_runner(args):
# this is the args from parser.parse_args()
# the properties of args are defined as "labels" in the add_args_and_options func.
- return run_my_main(args.fasta_in, fasta_out, args.read_length)
+ return run_my_main(args.fasta_in, args.fasta_out, args.read_length)
+
def _resolved_tool_contract_runner(resolved_tool_contract):
+ """
+ :type resolved_tool_contract: pbcommand.models.ResolvedToolContract"""
rtc = resolved_tool_contract
# all options are referenced by globally namespaced id. This allows tools to use other tools options
# e.g., pbalign to use blasr defined options.
- return run_my_main(rtc.inputs[0], rtc.outputs[0], rtc.options["pbcommand.task_options.dev_read_length"])
+ return run_my_main(rtc.task.input_files[0], rtc.task.outputs[0], rtc.task.options["pbcommand.task_options.dev_read_length"])
```
@@ -148,17 +155,17 @@ Add running layer
```python
import sys
import logging
-import pbcommand.utils setup_log
+from pbcommand.utils import setup_log
from pbcommand.cli import pbparser_runner
log = logging.getLogger(__name__)
+
def main(argv=sys.argv):
# New interface that supports running resolved tool contracts
- log.info("Starting {f} version {v} pbcommand example dev app".format(f=__file__, v=__version__))
- p = get_contract_parser()
+ log.info("Starting {f} version {v} pbcommand example dev app".format(f=__file__, v="0.1.0"))
return pbparser_runner(argv[1:],
- p,
+ get_contract_parser(),
_args_runner, # argparse runner func
_resolved_tool_contract_runner, # tool contract runner func
log, # log instance
diff --git a/REQUIREMENTS_TEST.txt b/REQUIREMENTS_TEST.txt
index 17b0395..7f3ccb2 100644
--- a/REQUIREMENTS_TEST.txt
+++ b/REQUIREMENTS_TEST.txt
@@ -1,2 +1,3 @@
nose
-tox
\ No newline at end of file
+tox
+numpy # this is required for pbreports serialization
\ No newline at end of file
diff --git a/circle.yml b/circle.yml
index 3fab715..80d0fc2 100644
--- a/circle.yml
+++ b/circle.yml
@@ -5,6 +5,7 @@ machine:
dependencies:
pre:
- pip install -r REQUIREMENTS.txt
+ - pip install -r REQUIREMENTS_TEST.txt
- pip install nose
test:
diff --git a/extract-readme-snippets.py b/extract-readme-snippets.py
new file mode 100644
index 0000000..e7b6863
--- /dev/null
+++ b/extract-readme-snippets.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+"""
+Pandoc filter to exact python code blocks and write each snippet out.
+"""
+
+from pandocfilters import toJSONFilter, Str
+
+n = 0
+
+
+def caps(key, value, format, meta):
+ global n
+ if key == "CodeBlock":
+ py_types = value[0][1][0]
+ if py_types.encode("ascii") == "python":
+ code_block = value[-1]
+ # eval(code_block)
+ with open("readme-snippet-{n}.py".format(n=n), 'a') as f:
+ f.write("# example {k}-{n}\n".format(k=key, n=n))
+ f.write("{v}\n".format(v=code_block))
+
+ n += 1
+
+
+if __name__ == "__main__":
+ toJSONFilter(caps)
diff --git a/pbcommand/__init__.py b/pbcommand/__init__.py
index eaaed79..e61fceb 100644
--- a/pbcommand/__init__.py
+++ b/pbcommand/__init__.py
@@ -1,4 +1,4 @@
-VERSION = (0, 2, 16)
+VERSION = (0, 2, 17)
def get_version():
diff --git a/pbcommand/cli/quick.py b/pbcommand/cli/quick.py
index c147a82..7cb0b4d 100644
--- a/pbcommand/cli/quick.py
+++ b/pbcommand/cli/quick.py
@@ -192,6 +192,7 @@ def __args_rtc_runner(registry):
else:
exit_code = func(rtc)
log.info("Completed running {r} exitcode {e}".format(r=rtc, e=exit_code))
+ return exit_code
return _w
diff --git a/pbcommand/models/common.py b/pbcommand/models/common.py
index 30bb753..2d6c7cd 100644
--- a/pbcommand/models/common.py
+++ b/pbcommand/models/common.py
@@ -278,19 +278,21 @@ class FileTypes(object):
class DataStoreFile(object):
- def __init__(self, uuid, file_id, type_id, path):
+ def __init__(self, uuid, source_id, type_id, path, is_chunked=False):
# adding this for consistency. In the scala code, the unique id must be
# a uuid format
self.uuid = uuid
# this must globally unique. This is used to provide context to where
# the file originated from (i.e., the tool author
- self.file_id = file_id
+ self.file_id = source_id
# Consistent with a value in FileTypes
self.file_type_id = type_id
self.path = path
self.file_size = os.path.getsize(path)
self.created_at = datetime.datetime.fromtimestamp(os.path.getctime(path))
self.modified_at = datetime.datetime.fromtimestamp(os.path.getmtime(path))
+ # Was the file produced by Chunked task
+ self.is_chunked = is_chunked
def __repr__(self):
_d = dict(k=self.__class__.__name__,
@@ -306,14 +308,19 @@ class DataStoreFile(object):
path=self.path,
fileSize=self.file_size,
createdAt=_datetime_to_string(self.created_at),
- modifiedAt=_datetime_to_string(self.modified_at))
+ modifiedAt=_datetime_to_string(self.modified_at),
+ isChunked=self.is_chunked)
@staticmethod
def from_dict(d):
# FIXME. This isn't quite right.
to_a = lambda x: x.encode('ascii', 'ignore')
to_k = lambda x: to_a(d[x])
- return DataStoreFile(to_k('uniqueId'), to_k('sourceId'), to_k('fileTypeId'), to_k('path'))
+ is_chunked = d.get('isChunked', False)
+ return DataStoreFile(to_k('uniqueId'),
+ to_k('sourceId'),
+ to_k('fileTypeId'),
+ to_k('path'), is_chunked=is_chunked)
def _datetime_to_string(dt):
diff --git a/pbcommand/models/report.py b/pbcommand/models/report.py
index 8239554..7e910a3 100644
--- a/pbcommand/models/report.py
+++ b/pbcommand/models/report.py
@@ -684,7 +684,7 @@ class Report(BaseReportElement):
"""
with open(file_name, 'w') as f:
f.write(self.to_json())
- log.info("Wrote report {r}".format(r=file_name))
+ # log.info("Wrote report {r}".format(r=file_name))
@staticmethod
def from_simple_dict(report_id, raw_d, namespace):
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-pbcommand.git
More information about the debian-med-commit
mailing list