[med-svn] [python-pbcore] 02/03: Drop rename-dataset.py patch in favor of build-time substitution
Afif Elghraoui
afif-guest at moszumanska.debian.org
Tue Oct 27 05:21:17 UTC 2015
This is an automated email from the git hooks/post-receive script.
afif-guest pushed a commit to branch master
in repository python-pbcore.
commit b3e49cd60d2a43496b354d1d0210b6262ebcbf44
Author: Afif Elghraoui <afif at ghraoui.name>
Date: Mon Oct 26 21:53:00 2015 -0700
Drop rename-dataset.py patch in favor of build-time substitution
Maintaining the patch is unwieldy. Making the substitution at
build time using sed should be more robust.
---
debian/patches/rename-datasetpy.patch | 179 ----------------------------------
debian/patches/series | 1 -
debian/rules | 1 +
3 files changed, 1 insertion(+), 180 deletions(-)
diff --git a/debian/patches/rename-datasetpy.patch b/debian/patches/rename-datasetpy.patch
deleted file mode 100644
index 04e1ebe..0000000
--- a/debian/patches/rename-datasetpy.patch
+++ /dev/null
@@ -1,179 +0,0 @@
-Description: Rename dataset.py to pbdataset
- dataset.py is not a name suitable for system-wide installation. It
- is referenced by several parts of the source distribution here (especially
- the tests), so this patch corrects those references. Upstream is not interested
- in renaming since system-wide installation was not their intention for
- this package [1].
-Author: Afif Elghraoui
-Forwarded: https://github.com/PacificBiosciences/pbcore/issues/21#issuecomment-141742545
-Last-Update: 2015-09-20
---- python-pbcore.orig/tests/test_pbdataset.py
-+++ python-pbcore/tests/test_pbdataset.py
-@@ -22,7 +22,7 @@
-
- log = logging.getLogger(__name__)
-
--datasetpy_not_available = subprocess.call(["which", "dataset.py"])
-+datasetpy_not_available = subprocess.call(["which", "pbdataset"])
-
- class TestDataSet(unittest.TestCase):
- """Unit and integrationt tests for the DataSet class and \
-@@ -86,7 +86,7 @@
- "pbdataset script is not available")
- def test_split_cli(self):
- outdir = tempfile.mkdtemp(suffix="dataset-unittest")
-- cmd = "dataset.py split --outdir {o} --contigs --chunks 2 {d}".format(
-+ cmd = "pbdataset split --outdir {o} --contigs --chunks 2 {d}".format(
- o=outdir,
- d=data.getXml(8))
- log.debug(cmd)
-@@ -102,7 +102,7 @@
- def test_create_cli(self):
- log.debug("Absolute")
- outdir = tempfile.mkdtemp(suffix="dataset-unittest")
-- cmd = "dataset.py create --type AlignmentSet {o} {i1} {i2}".format(
-+ cmd = "pbdataset create --type AlignmentSet {o} {i1} {i2}".format(
- o=os.path.join(outdir, 'pbalchemysim.alignmentset.xml'),
- i1=data.getXml(8), i2=data.getXml(11))
- log.debug(cmd)
-@@ -113,7 +113,7 @@
-
- log.debug("Relative")
- outdir = tempfile.mkdtemp(suffix="dataset-unittest")
-- cmd = ("dataset.py create --relative --type AlignmentSet "
-+ cmd = ("pbdataset create --relative --type AlignmentSet "
- "{o} {i1} {i2}".format(
- o=os.path.join(outdir, 'pbalchemysim.alignmentset.xml'),
- i1=data.getXml(8),
---- python-pbcore.orig/tests/test_pbdataset_subtypes.py
-+++ python-pbcore/tests/test_pbdataset_subtypes.py
-@@ -270,7 +270,7 @@
- outdir = tempfile.mkdtemp(suffix="dataset-unittest")
- datafile = os.path.join(outdir, "merged.bam")
- xmlfile = os.path.join(outdir, "merged.xml")
-- cmd = "dataset.py consolidate {i} {d} {x}".format(i=data.getXml(12),
-+ cmd = "pbdataset consolidate {i} {d} {x}".format(i=data.getXml(12),
- d=datafile,
- x=xmlfile)
- log.debug(cmd)
-@@ -304,7 +304,7 @@
- outdir = tempfile.mkdtemp(suffix="dataset-unittest")
- datafile = os.path.join(outdir, "merged.bam")
- xmlfile = os.path.join(outdir, "merged.xml")
-- cmd = "dataset.py consolidate --numFiles 2 {i} {d} {x}".format(
-+ cmd = "pbdataset consolidate --numFiles 2 {i} {d} {x}".format(
- i=testFile, d=datafile, x=xmlfile)
- log.debug(cmd)
- o, r, m = backticks(cmd)
---- python-pbcore.orig/doc/pbcore.io.dataset.rst
-+++ python-pbcore/doc/pbcore.io.dataset.rst
-@@ -9,20 +9,20 @@
- The API and console entry points are designed with the set operations one might
- perform on the various types of data held by a DataSet XML in mind: merge,
- split, write etc. While various types of DataSets can be found in XML files,
--the API (and in a way the console entry point, dataset.py) has DataSet as its
-+the API (and in a way the console entry point, pbdataset) has DataSet as its
- base type, with various subtypes extending or replacing functionality as
- needed.
-
-
- Console Entry Point Usage
- =============================
--The following entry points are available through the main script: dataset.py::
-+The following entry points are available through the main script: pbdataset::
-
-- usage: dataset.py [-h] [-v] [--debug]
-+ usage: pbdataset [-h] [-v] [--debug]
- {create,filter,merge,split,validate,loadstats,consolidate}
- ...
-
-- Run dataset.py by specifying a command.
-+ Run pbdataset by specifying a command.
-
- optional arguments:
- -h, --help show this help message and exit
-@@ -35,7 +35,7 @@
-
- Create::
-
-- usage: dataset.py create [-h] [--type DSTYPE] [--novalidate] [--relative]
-+ usage: pbdataset create [-h] [--type DSTYPE] [--novalidate] [--relative]
- outfile infile [infile ...]
-
- Create an XML file from a fofn or bam
-@@ -53,7 +53,7 @@
-
- Filter::
-
-- usage: dataset.py filter [-h] infile outfile filters [filters ...]
-+ usage: pbdataset filter [-h] infile outfile filters [filters ...]
-
- Add filters to an XML file
-
-@@ -67,7 +67,7 @@
-
- Union::
-
-- usage: dataset.py union [-h] outfile infiles [infiles ...]
-+ usage: pbdataset union [-h] outfile infiles [infiles ...]
-
- Combine XML (and BAM) files
-
-@@ -80,7 +80,7 @@
-
- Validate::
-
-- usage: dataset.py validate [-h] infile
-+ usage: pbdataset validate [-h] infile
-
- Validate ResourceId files (XML validation only available in testing)
-
-@@ -92,7 +92,7 @@
-
- Load PipeStats::
-
-- usage: dataset.py loadstats [-h] [--outfile OUTFILE] infile statsfile
-+ usage: pbdataset loadstats [-h] [--outfile OUTFILE] infile statsfile
-
- Load an sts.xml file into a DataSet XML file
-
-@@ -106,7 +106,7 @@
-
- Split::
-
-- usage: dataset.py split [-h] [--contigs] [--chunks CHUNKS] [--subdatasets]
-+ usage: pbdataset split [-h] [--contigs] [--chunks CHUNKS] [--subdatasets]
- [--outdir OUTDIR]
- infile ...
-
-@@ -145,8 +145,8 @@
-
- 2. Merge the files into a FOFN-like dataset (bams aren't touched)::
-
-- # dataset.py merge <out_fn> <in_fn> [<in_fn> <in_fn> ...]
-- dataset.py merge merged.alignmentset.xml movie1.alignmentset.xml movie2.alignmentset.xml
-+ # pbdataset merge <out_fn> <in_fn> [<in_fn> <in_fn> ...]
-+ pbdataset merge merged.alignmentset.xml movie1.alignmentset.xml movie2.alignmentset.xml
-
- 3. Split the dataset into chunks by contig (rname) (bams aren't touched). Note
- that supplying output files splits the dataset into that many output files
-@@ -155,7 +155,7 @@
- automatically. Specifying a number of chunks instead will produce that many
- files, with contig or even sub contig (reference window) splitting.::
-
-- dataset.py split --contigs --chunks 8 merged.alignmentset.xml
-+ pbdataset split --contigs --chunks 8 merged.alignmentset.xml
-
- 4. Quiver then consumes these chunks::
-
---- python-pbcore.orig/bin/dataset.py
-+++ python-pbcore/bin/dataset.py
-@@ -38,7 +38,7 @@
- return parser
-
- def get_parser():
-- description = 'Run dataset.py by specifying a command.'
-+ description = 'Run pbdataset by specifying a command.'
- parser = argparse.ArgumentParser(version=__VERSION__,
- description=description)
- parser.add_argument("--debug", default=False, action='store_true',
diff --git a/debian/patches/series b/debian/patches/series
index 8944739..57f5ddc 100644
--- a/debian/patches/series
+++ b/debian/patches/series
@@ -1,3 +1,2 @@
doc-theme.patch
enable-build-time-testing.patch
-rename-datasetpy.patch
diff --git a/debian/rules b/debian/rules
index a8eec36..f867f98 100755
--- a/debian/rules
+++ b/debian/rules
@@ -19,6 +19,7 @@ override_dh_auto_build:
override_dh_install:
dh_install
+ find $(DESTDIR) -type f -exec sed -i 's/dataset\.py/pbdataset/g' {} +
mv $(DESTDIR)/usr/bin/.open $(DESTDIR)/usr/bin/pbopen
mv $(DESTDIR)/usr/bin/dataset.py $(DESTDIR)/usr/bin/pbdataset
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-pbcore.git
More information about the debian-med-commit
mailing list