[med-svn] [Git][med-team/pbdagcon][master] 9 commits: Run 2to3 on Python2 files to try a Python3 port
Andreas Tille
gitlab at salsa.debian.org
Mon Aug 5 14:38:17 BST 2019
Andreas Tille pushed to branch master at Debian Med / pbdagcon
Commits:
f1293f7d by Andreas Tille at 2019-08-05T08:05:32Z
Run 2to3 on Python2 files to try a Python3 port
- - - - -
24e5ff6a by Andreas Tille at 2019-08-05T08:29:36Z
New upstream version 0.3+git20180411.c14c422+ds
- - - - -
299cc716 by Andreas Tille at 2019-08-05T08:29:37Z
Update upstream source from tag 'upstream/0.3+git20180411.c14c422+ds'
Update to upstream version '0.3+git20180411.c14c422+ds'
with Debian dir c49b142db0e8aa891047acd5af464836ba587e34
- - - - -
77a848a5 by Andreas Tille at 2019-08-05T08:29:37Z
New upstream version
- - - - -
8ced7a4d by Andreas Tille at 2019-08-05T08:29:37Z
debhelper-compat 12
- - - - -
937e3bca by Andreas Tille at 2019-08-05T08:29:45Z
Standards-Version: 4.4.0
- - - - -
f7388d53 by Andreas Tille at 2019-08-05T09:00:25Z
Adapt patches
- - - - -
eda89ff7 by Andreas Tille at 2019-08-05T09:14:08Z
Build-Depends: s/python/python3/
- - - - -
21b29a28 by Andreas Tille at 2019-08-05T09:19:49Z
Set Python3 interpreter
- - - - -
17 changed files:
- README.md
- + bamboo_build.sh
- + bamboo_test.sh
- configure.py
- debian/changelog
- − debian/compat
- debian/control
- + debian/patches/2to3.patch
- debian/patches/compiler-flags.patch
- debian/patches/series
- src/cpp/DazAlnProvider.cpp
- src/cpp/DazAlnProvider.hpp
- src/cpp/makefile
- src/filterm4.py
- src/m4topre.py
- src/q-sense.py
- test/cpp/makefile
Changes:
=====================================
README.md
=====================================
@@ -130,3 +130,7 @@ ga('send', 'pageview');
src/filterm4.py 'code'
[pbdagcon_wf.sh]:
src/cpp/pbdagcon_wf.sh 'code'
+
+DISCLAIMER
+----------
+THIS WEBSITE AND CONTENT AND ALL SITE-RELATED SERVICES, INCLUDING ANY DATA, ARE PROVIDED "AS IS," WITH ALL FAULTS, WITH NO REPRESENTATIONS OR WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, SATISFACTORY QUALITY, NON-INFRINGEMENT OR FITNESS FOR A PARTICULAR PURPOSE. YOU ASSUME TOTAL RESPONSIBILITY AND RISK FOR YOUR USE OF THIS SITE, ALL SITE-RELATED SERVICES, AND ANY THIRD PARTY WEBSITES OR APPLICATIONS. NO ORAL OR WRITTEN INFORMATION OR ADVICE SHALL CREATE A WARRANTY OF ANY KIND. ANY REFERENCES TO SPECIFIC PRODUCTS OR SERVICES ON THE WEBSITES DO NOT CONSTITUTE OR IMPLY A RECOMMENDATION OR ENDORSEMENT BY PACIFIC BIOSCIENCES.
=====================================
bamboo_build.sh
=====================================
@@ -0,0 +1,75 @@
+#!/bin/bash
+set -e
+NEXUS_BASEURL=http://ossnexus.pacificbiosciences.com/repository
+#NEXUS_URL=$NEXUS_BASEURL/unsupported/gcc-4.9.2
+if [ ! -e .distfiles/gtest/release-1.7.0.tar.gz ]; then
+ mkdir -p .distfiles/gtest
+ curl -sL $NEXUS_BASEURL/unsupported/distfiles/googletest/release-1.7.0.tar.gz \
+ -o .distfiles/gtest/release-1.7.0.tar.gz
+fi
+tar zxf .distfiles/gtest/release-1.7.0.tar.gz -C repos/
+ln -sfn googletest-release-1.7.0 repos/gtest
+
+rm -rf deployment
+mkdir -p deployment
+/bin/ls -t tarballs/pbbam-*.tgz | head -1 | xargs -r -n 1 cat | tar zxv --strip-components 3 -C deployment
+/bin/ls -t tarballs/blasr_libcpp-*.tgz | head -1 | xargs -r -n 1 cat | tar zxv --strip-components 3 -C deployment
+/bin/ls -t tarballs/blasr-*.tgz | head -1 | xargs -r -n 1 cat | tar zxv --strip-components 2 -C deployment
+DEPLOYMENT=$PWD/deployment
+
+export PATH=$PWD/deployment/bin:$PATH
+export LD_LIBRARY_PATH=$PWD/deployment/lib:$LD_LIBRARY_PATH
+
+type module >& /dev/null || . /mnt/software/Modules/current/init/bash
+module load git
+module load gcc
+module load ccache
+if [[ $USER == bamboo ]]; then
+ export CCACHE_DIR=/mnt/secondary/Share/tmp/bamboo.mobs.ccachedir
+fi
+module load boost
+if [[ $BOOST_ROOT =~ /include ]]; then
+ set -x
+ BOOST_ROOT=$(dirname $BOOST_ROOT)
+ set +x
+fi
+module load htslib
+module load hdf5-tools
+module load zlib
+
+DAZZDB=$PWD/repos/dazzdb
+DALIGNER=$PWD/repos/daligner
+cd repos/pbdagcon
+export CCACHE_BASEDIR=$PWD
+rm -rf build
+set -x
+mkdir -p build
+ BOOST_INCLUDE=$BOOST_ROOT/include \
+LIBPBDATA_INCLUDE=$DEPLOYMENT/include/pbdata \
+ LIBPBDATA_LIB=$DEPLOYMENT/lib \
+ LIBBLASR_INCLUDE=$DEPLOYMENT/include/alignment \
+ LIBBLASR_LIB=$DEPLOYMENT/lib \
+LIBPBIHDF_INCLUDE=$DEPLOYMENT/include/hdf \
+ LIBPBIHDF_LIB=$DEPLOYMENT/lib \
+ PBBAM_INCLUDE=$DEPLOYMENT/include \
+ PBBAM_LIB=$DEPLOYMENT/lib \
+ HTSLIB_CFLAGS=$(pkg-config --cflags htslib) \
+ HTSLIB_LIBS=$(pkg-config --libs htslib) \
+ HDF5_INCLUDE=$(pkg-config --cflags-only-I hdf5|awk '{print $1}'|sed -e 's/^-I//') \
+ HDF5_LIB=$(pkg-config --libs-only-L hdf5|awk '{print $1}'|sed -e 's/^-L//') \
+ DALIGNER_SRC=$DALIGNER \
+ DAZZ_DB_SRC=$DAZZDB \
+ GTEST_SRC=$PWD/../gtest/src \
+ GTEST_INCLUDE=$PWD/../gtest/include \
+ ZLIB_LIBFLAGS="$(pkg-config --libs zlib)" \
+./configure.py --build-dir=$PWD/build
+sed -i -e 's/-lpbihdf/-llibcpp/;s/-lblasr//;s/-lpbdata//' build/defines.mk
+make -C build
+cp -a build/src/cpp/pbdagcon $DEPLOYMENT/bin/
+cp -a build/src/cpp/dazcon $DEPLOYMENT/bin/
+cd ../..
+
+myVERSION=`pbdagcon --version|awk '/version/{print $3}'`
+#rm -rf tarballs && mkdir -p tarballs
+cd deployment
+tar zcf ../tarballs/pbdagcon-${myVERSION}.tgz bin/pbdagcon bin/dazcon
=====================================
bamboo_test.sh
=====================================
@@ -0,0 +1,12 @@
+#!/bin/bash
+export PATH=$PWD/deployment/bin:$PATH
+export LD_LIBRARY_PATH=$PWD/deployment/lib:$LD_LIBRARY_PATH
+type module >& /dev/null \
+|| . /mnt/software/Modules/current/init/bash
+module load gcc
+module load htslib
+module load hdf5-tools
+module load zlib
+
+make -C repos/pbdagcon/build check
+chmod +w -R repos/pbdagcon
=====================================
configure.py
=====================================
@@ -152,8 +152,8 @@ def compose_defines_pacbio(envin):
'LIBPBIHDF_INCLUDE', 'LIBPBIHDF_LIB', 'LIBPBIHDF_LIBFLAGS',
'HDF5_INCLUDE', 'HDF5_LIB', 'HDF5_LIBFLAGS',
'PBBAM_INCLUDE', 'PBBAM_LIB', 'PBBAM_LIBFLAGS',
- 'HTSLIB_INCLUDE', 'HTSLIB_LIB', 'HTSLIB_LIBFLAGS',
- 'BOOST_INCLUDE','PTHREAD_LIBFLAGS',
+ 'HTSLIB_CFLAGS', 'HTSLIB_LIBS',
+ 'BOOST_INCLUDE', 'PTHREAD_LIBFLAGS',
'ZLIB_LIB', 'ZLIB_LIBFLAGS',
'GCC_LIB',
'DAZZ_DB_SRC', 'DAZZ_DB_INCLUDE',
@@ -244,7 +244,7 @@ def set_defs_defaults(env, nopbbam):
pbbam_defaults = {
'LIBPBIHDF_LIBFLAGS': '-lpbihdf',
'PBBAM_LIBFLAGS': '-lpbbam',
- 'HTSLIB_LIBFLAGS': '-lhts',
+ 'HTSLIB_LIBS': '-lhts',
'HDF5_LIBFLAGS': '-lhdf5_cpp -lhdf5',
'ZLIB_LIBFLAGS': '-lz',
'PTHREAD_LIBFLAGS': '-lpthread',
=====================================
debian/changelog
=====================================
@@ -1,9 +1,14 @@
-pbdagcon (0.3+git20161121.0000000+ds-2) UNRELEASED; urgency=medium
+pbdagcon (0.3+git20180411.c14c422+ds-1) UNRELEASED; urgency=medium
* Add myself to Uploaders to have at least one human uploader after
Afif removed himself.
+ * Run 2to3 on Python2 files to try a Python3 port
+ * New upstream version
+ * debhelper-compat 12
+ * Standards-Version: 4.4.0
+ * Build-Depends: s/python/python3/
- -- Andreas Tille <tille at debian.org> Thu, 07 Feb 2019 08:22:25 +0100
+ -- Andreas Tille <tille at debian.org> Mon, 05 Aug 2019 10:29:37 +0200
pbdagcon (0.3+git20161121.0000000+ds-1.1) unstable; urgency=medium
=====================================
debian/compat deleted
=====================================
@@ -1 +0,0 @@
-12
=====================================
debian/control
=====================================
@@ -3,8 +3,8 @@ Maintainer: Debian Med Packaging Team <debian-med-packaging at lists.alioth.debian.
Uploaders: Andreas Tille <tille at debian.org>
Section: science
Priority: optional
-Build-Depends: debhelper (>= 12~),
- python,
+Build-Depends: debhelper-compat (= 12),
+ python3,
zlib1g-dev,
libhdf5-dev,
libboost-dev,
@@ -14,7 +14,7 @@ Build-Depends: debhelper (>= 12~),
libpbdata-dev,
# Test-Depends:
googletest
-Standards-Version: 4.3.0
+Standards-Version: 4.4.0
Vcs-Browser: https://salsa.debian.org/med-team/pbdagcon
Vcs-Git: https://salsa.debian.org/med-team/pbdagcon.git
Homepage: https://github.com/PacificBiosciences/pbdagcon
=====================================
debian/patches/2to3.patch
=====================================
@@ -0,0 +1,172 @@
+Author: Andreas Tille <tille at debian.org>
+Last-Update: Mon, 05 Aug 2019 10:03:14 +0200
+Description: Run 2to3 on Python2 files to try a Python3 port
+
+--- a/DALIGNER/py_utils/DAPipe.py
++++ b/DALIGNER/py_utils/DAPipe.py
+@@ -89,5 +89,5 @@ if __name__ == "__main__":
+ daligner_task = make_daligner_task ( run_daligner )
+ wf.addTask(daligner_task)
+ job_id += 1
+- print job_id
++ print(job_id)
+ wf.refreshTargets(updateFreq = 45) #all
+--- a/DALIGNER/py_utils/DBLA_to_falcon.py
++++ b/DALIGNER/py_utils/DBLA_to_falcon.py
+@@ -4,7 +4,7 @@ import LAPI
+ import sys
+
+
+-rcmap = dict(zip("ACGTacgtNn-","TGCATGCANN-"))
++rcmap = dict(list(zip("ACGTacgtNn-","TGCATGCANN-")))
+ def rc(seq):
+ return "".join([rcmap[c] for c in seq[::-1]])
+
+@@ -22,7 +22,7 @@ for aread in ovl_data:
+ LAPI.load_read(db, aread, aln.aseq, 2)
+ aseq = cast( aln.aseq, c_char_p)
+ aseq = aseq.value
+- print "%08d" % aread, aseq
++ print("%08d" % aread, aseq)
+ for aln_data in ovl_data[aread]:
+ aread, bread, acc, abpos, aepos, alen, comp, bbpos, bepos, blen = aln_data
+
+@@ -34,9 +34,9 @@ for aread in ovl_data:
+ #load_read(db, ovl.bread, aln.bseq, 2)
+ if comp == 1:
+ bseq = rc(bseq)
+- print bread, bseq
+- print "+ +"
++ print(bread, bseq)
++ print("+ +")
+ count += 1
+-print "- -"
++print("- -")
+
+ close_DB(db)
+--- a/DALIGNER/py_utils/LAPI.py
++++ b/DALIGNER/py_utils/LAPI.py
+@@ -80,7 +80,7 @@ def get_ovl_data(fn):
+ ovl = Overlap()
+ ovl_data = {}
+
+- for j in xrange(novl.value):
++ for j in range(novl.value):
+ _read_overlap(in_f, ovl)
+
+ if ovl.path.tlen > tmax:
+--- a/DALIGNER/py_utils/LAPipe.py
++++ b/DALIGNER/py_utils/LAPipe.py
+@@ -71,10 +71,10 @@ def run_consensus_task(self):
+ log_path = os.path.join( script_dir, "cp_%05d.log" % (job_id))
+
+ with open( os.path.join(cwd, "c_%05d.sh" % job_id), "w") as p_script:
+- print >> p_script, ". /mnt/secondary/Share/HBAR_03202013/bin/activate"
+- print >> p_script, "cd .."
+- print >> p_script, """./LA4Falcon -o -f:%s las_files/%s.%d.las | """ % (prefix, prefix, job_id),
+- print >> p_script, """ falcon_sense.py --trim --output_multi --min_idt 0.70 --min_cov 4 --local_match_count_threshold 3 --max_n_read 800 --n_core 8 > %s""" % fn(self.out_file)
++ print(". /mnt/secondary/Share/HBAR_03202013/bin/activate", file=p_script)
++ print("cd ..", file=p_script)
++ print("""./LA4Falcon -o -f:%s las_files/%s.%d.las | """ % (prefix, prefix, job_id), end=' ', file=p_script)
++ print(""" falcon_sense.py --trim --output_multi --min_idt 0.70 --min_cov 4 --local_match_count_threshold 3 --max_n_read 800 --n_core 8 > %s""" % fn(self.out_file), file=p_script)
+
+ script = []
+ script.append( "cd %s" % cwd )
+@@ -142,10 +142,10 @@ if __name__ == "__main__":
+ except OSError:
+ pass
+ with open("./p_%05d/p_%05d.sh" % (p_id, p_id), "w") as p_script:
+- print >> p_script, """for f in `find .. -wholename "*job*/%s.%d.%s.*.*.las"`; do ln -sf $f .; done""" % (prefix, p_id, prefix)
++ print("""for f in `find .. -wholename "*job*/%s.%d.%s.*.*.las"`; do ln -sf $f .; done""" % (prefix, p_id, prefix), file=p_script)
+ for l in s_data:
+- print >> p_script, l
+- print >> p_script, "mv %s.%d.las ../las_files" % (prefix, p_id)
++ print(l, file=p_script)
++ print("mv %s.%d.las ../las_files" % (prefix, p_id), file=p_script)
+
+ p_file = os.path.abspath( "./p_%05d/p_%05d.sh" % (p_id, p_id) )
+ job_done = makePypeLocalFile(os.path.abspath( "./p_%05d/p_%05d_done" % (p_id,p_id) ))
+@@ -173,5 +173,5 @@ if __name__ == "__main__":
+
+ c_task = make_c_task( run_consensus_task )
+ wf.addTask(c_task)
+- print p_id
++ print(p_id)
+ wf.refreshTargets(updateFreq = 15) #all
+--- a/configure.py
++++ b/configure.py
+@@ -1,11 +1,11 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ """Configure the build.
+
+ - Fetch boost/gtest.
+ - Create defines.mk
+ """
+ import argparse
+-import commands
++import subprocess
+ import contextlib
+ import os
+ import sys
+@@ -18,7 +18,7 @@ def log(msg):
+
+ def shell(cmd):
+ log(cmd)
+- status, output = commands.getstatusoutput(cmd)
++ status, output = subprocess.getstatusoutput(cmd)
+ if status:
+ raise Exception('%d <-| %r' %(status, cmd))
+ return output
+@@ -178,7 +178,7 @@ def get_make_style_env(envin, args):
+ return envout
+
+ class OsType:
+- Unknown, Linux, Darwin = range(3)
++ Unknown, Linux, Darwin = list(range(3))
+
+ def getOsType():
+ uname = shell('uname -s')
+--- a/src/filterm4.py
++++ b/src/filterm4.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+
+ # Filters for unique, highest scoring subread query/target pairs from an m4
+ # file. Helps get rid of chimeras, at the cost of some yield.
+@@ -35,7 +35,7 @@ def printUniq(qgroup, count):
+ else:
+ top[k] = q
+
+- for r in top.values():
++ for r in list(top.values()):
+ count.filt += 1
+ print(r, end=' ')
+
+--- a/src/m4topre.py
++++ b/src/m4topre.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ """Super-simple converter from blasr m4 alignments to pbdagcon 'pre'
+ alignments. For use in the pre-assembler dagcon workflow.
+ """
+@@ -7,7 +7,7 @@ from __future__ import print_function
+ import sys
+ import heapq
+ import string # pylint: disable=W0402
+-from itertools import ifilter
++
+ from collections import namedtuple, defaultdict
+ import numpy as np
+ from pbcore.io.FastaIO import FastaReader
+@@ -169,7 +169,7 @@ def main(): # pylint: disable=R0914
+
+ # take a max number of alignments for each target
+ limiter = AlnLimiter()
+- my_m4recs[:] = [x for x in ifilter(limiter, my_m4recs)]
++ my_m4recs[:] = [x for x in filter(limiter, my_m4recs)]
+
+ # load only related sequences
+ seqs = {}
=====================================
debian/patches/compiler-flags.patch
=====================================
@@ -2,15 +2,15 @@ Description: Prevent override of dpkg-buildflags
Author: Afif Elghraoui <afif at debian.org>
Forwarded: no
Last-Update: 2016-10-23
---- pbdagcon.orig/src/cpp/makefile
-+++ pbdagcon/src/cpp/makefile
-@@ -8,8 +8,8 @@
+--- a/src/cpp/makefile
++++ b/src/cpp/makefile
+@@ -8,8 +8,8 @@ PBDAGCON_OBJECTS := BlasrM5AlnProvider.o
DAZCON_OBJECTS := DB.o QV.o align.o DazAlnProvider.o dazcon.o
CPPFLAGS += -MMD -MP
--CXXFLAGS = -O3 -std=c++11 -Wall -Wuninitialized -pedantic
+-CXXFLAGS = -O3 -std=c++14 -Wall -Wuninitialized -pedantic
-CFLAGS = -O3 -Wall -Wextra -fno-strict-aliasing
-+CXXFLAGS += -O3 -std=c++11 -Wall -Wuninitialized -pedantic
++CXXFLAGS += -O3 -std=c++14 -Wall -Wuninitialized -pedantic
+CFLAGS += -O3 -Wall -Wextra -fno-strict-aliasing
INCDIRS := \
=====================================
debian/patches/series
=====================================
@@ -1 +1,2 @@
compiler-flags.patch
+2to3.patch
=====================================
src/cpp/DazAlnProvider.cpp
=====================================
@@ -222,7 +222,7 @@ int TargetHit::aend() {
// daligner structures.
Target::Target(): needsFree_(false) { }
-Target::Target(HITS_DB& db, int tspace, int small) :
+Target::Target(DAZZ_DB& db, int tspace, int small) :
db_(db),
tspace_(tspace),
small_(small),
=====================================
src/cpp/DazAlnProvider.hpp
=====================================
@@ -94,7 +94,7 @@ std::ostream& operator<<(std::ostream& ostrm, TargetHit& hit);
class Target {
public:
Target();
- Target(HITS_DB& db, int tspace, int small);
+ Target(DAZZ_DB& db, int tspace, int small);
~Target();
// Initializes this target based on the given record, possibly scoring as
@@ -119,7 +119,7 @@ public:
std::vector<TargetHit> hits;
private:
- HITS_DB db_;
+ DAZZ_DB db_;
char* abuffer_, *bbuffer_;
Work_Data* work_;
int tspace_, small_;
@@ -158,7 +158,7 @@ private:
const ProgramOpts popts_;
// Dazzler-related data
- HITS_DB db_;
+ DAZZ_DB db_;
int64 novl_, covl_;
int tbytes_;
FILE* input_;
=====================================
src/cpp/makefile
=====================================
@@ -8,7 +8,7 @@ PBDAGCON_OBJECTS := BlasrM5AlnProvider.o main.o SimpleAligner.o
DAZCON_OBJECTS := DB.o QV.o align.o DazAlnProvider.o dazcon.o
CPPFLAGS += -MMD -MP
-CXXFLAGS = -O3 -std=c++11 -Wall -Wuninitialized -pedantic
+CXXFLAGS = -O3 -std=c++14 -Wall -Wuninitialized -pedantic
CFLAGS = -O3 -Wall -Wextra -fno-strict-aliasing
INCDIRS := \
@@ -18,8 +18,7 @@ INCDIRS := \
${LIBPBDATA_INCLUDE} \
${LIBPBIHDF_INCLUDE} \
${PBBAM_INCLUDE} \
- ${HDF5_INCLUDE} \
- ${HTSLIB_INCLUDE}
+ ${HDF5_INCLUDE}
SYS_INCDIRS := \
${BOOST_INCLUDE} \
${THISDIR}/third-party
@@ -29,7 +28,6 @@ LIBDIRS := \
${LIBPBIHDF_LIB} \
${PBBAM_LIB} \
${HDF5_LIB} \
- ${HTSLIB_LIB} \
${GCC_LIB} \
${ZLIB_LIB}
LDLIBS+= \
@@ -38,11 +36,11 @@ LDLIBS+= \
${LIBPBIHDF_LIBFLAGS} \
${PBBAM_LIBFLAGS} \
${HDF5_LIBFLAGS} \
- ${HTSLIB_LIBFLAGS} \
+ ${HTSLIB_LIBS} \
${ZLIB_LIBFLAGS} \
${PTHREAD_LIBFLAGS} \
${DL_LIBFLAGS}
-CPPFLAGS+=$(patsubst %,-I%,${INCDIRS}) $(patsubst %,-isystem%,${SYS_INCDIRS})
+CPPFLAGS+=$(patsubst %,-I%,${INCDIRS}) $(patsubst %,-isystem%,${SYS_INCDIRS}) $(HTSLIB_CFLAGS)
LDFLAGS+=$(patsubst %,-L %,${LIBDIRS})
LDFLAGS += ${EXTRA_LDFLAGS}
=====================================
src/filterm4.py
=====================================
@@ -3,6 +3,7 @@
# Filters for unique, highest scoring subread query/target pairs from an m4
# file. Helps get rid of chimeras, at the cost of some yield.
+from __future__ import print_function
import sys
from collections import namedtuple
@@ -36,7 +37,7 @@ def printUniq(qgroup, count):
for r in top.values():
count.filt += 1
- print r,
+ print(r, end=' ')
qgroup[:] = []
=====================================
src/m4topre.py
=====================================
@@ -2,6 +2,7 @@
"""Super-simple converter from blasr m4 alignments to pbdagcon 'pre'
alignments. For use in the pre-assembler dagcon workflow.
"""
+from __future__ import print_function
import sys
import heapq
@@ -201,8 +202,8 @@ def main(): # pylint: disable=R0914
else:
tseq = seqs[rec.tname].translate(__rc__)[::-1][tst:tnd]
- print ' '.join([rec.qname, rec.tname, strand,
- rec.tseqlength, str(tst), str(tnd), qseq, tseq])
+ print(' '.join([rec.qname, rec.tname, strand,
+ rec.tseqlength, str(tst), str(tnd), qseq, tseq]))
if __name__ == '__main__':
sys.exit(main())
=====================================
src/q-sense.py
=====================================
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+from __future__ import print_function
import sys
import os
import logging
@@ -85,8 +86,8 @@ class Consensus(PBMultiToolRunner):
prefix = ".".join(prefix)
full_prefix = os.path.join(self.args.out_dir_name, prefix)
with open("%s_ref.fa" % full_prefix, "w") as f:
- print >>f ,">%s_ref" % self.args.consensus_seq_name
- print >>f, s
+ print(">%s_ref" % self.args.consensus_seq_name, file=f)
+ print(s, file=f)
hp_corr = True if self.args.enable_hp_corr else False
mark_lower_case = True if self.args.mark_lower_case else False
generate_consensus(input_fasta_name, "%s_ref.fa" % full_prefix, full_prefix, self.args.consensus_seq_name,
=====================================
test/cpp/makefile
=====================================
@@ -15,7 +15,6 @@ INCDIRS := . \
${LIBPBIHDF_INCLUDE} \
${PBBAM_INCLUDE} \
${HDF5_INCLUDE} \
- ${HTSLIB_INCLUDE} \
${BOOST_INCLUDE} \
${GTEST_INCLUDE} \
third-party
@@ -26,7 +25,6 @@ LIBDIRS := \
${LIBPBIHDF_LIB} \
${PBBAM_LIB} \
${HDF5_LIB} \
- ${HTSLIB_LIB} \
${GCC_LIB} \
${ZLIB_LIB}
@@ -37,11 +35,11 @@ LDLIBS+= \
${LIBPBIHDF_LIBFLAGS} \
${PBBAM_LIBFLAGS} \
${HDF5_LIBFLAGS} \
- ${HTSLIB_LIBFLAGS} \
+ ${HTSLIB_LIBS} \
${ZLIB_LIBFLAGS} \
${PTHREAD_LIBFLAGS} \
${DL_LIBFLAGS}
-CPPFLAGS+=$(patsubst %,-I%,${INCDIRS})
+CPPFLAGS+=$(patsubst %,-I%,${INCDIRS}) $(HTSLIB_CFLAGS)
LDFLAGS+=$(patsubst %,-L %,${LIBDIRS})
# For fused-src, gtest-all.cc includes gtest/*.
@@ -50,7 +48,7 @@ LDFLAGS+=$(patsubst %,-L %,${LIBDIRS})
GTEST_CPPFLAGS += -isystem $(GTEST_INCLUDE) -isystem ${GTEST_SRC}/..
GTEST_CXXFLAGS += -g -Wall -Wextra -pthread
-CXXFLAGS := -O3 -std=c++11
+CXXFLAGS := -O3 -std=c++14
GTEST_OBJECTS := gtest-all.o gtest_main.o
DAZCON_OBJECTS := DB.o QV.o align.o Alignment.o \
View it on GitLab: https://salsa.debian.org/med-team/pbdagcon/compare/83dbb7eea91693f73f0432b5052919075bff6a3e...21b29a28aa525fdeb46918c40817e4190ca9cbbd
--
View it on GitLab: https://salsa.debian.org/med-team/pbdagcon/compare/83dbb7eea91693f73f0432b5052919075bff6a3e...21b29a28aa525fdeb46918c40817e4190ca9cbbd
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20190805/06a8664a/attachment-0001.html>
More information about the debian-med-commit
mailing list