[med-svn] [Git][med-team/falcon][master] networkx build error fixed, nim updated, Python 3 almost transitioned

Steffen Möller gitlab at salsa.debian.org
Thu Aug 27 17:49:34 BST 2020



Steffen Möller pushed to branch master at Debian Med / falcon


Commits:
3492618a by Steffen Moeller at 2020-08-27T18:47:23+02:00
networkx build error fixed, nim updated, Python 3 almost transitioned

Pending is the Python3-compatible C module declaration, along
https://stackoverflow.com/questions/28305731/compiler-cant-find-py-initmodule-is-it-deprecated-and-if-so-what-should-i

- - - - -


11 changed files:

- + debian/TODO
- debian/changelog
- debian/control
- + debian/patches/isNilError.patch
- + debian/patches/newNimPathSep.patch
- + debian/patches/nimTypeMismatch.patch
- + debian/patches/nimUndeclearedIdentifier.patch
- + debian/patches/nimundeclaredWordWrap.patch
- + debian/patches/python2to3.patch
- debian/patches/series
- debian/rules


Changes:

=====================================
debian/TODO
=====================================
@@ -0,0 +1,6 @@
+TODO
+====
+
+ * fix mkdir ../home in d/rules
+
+ * Needed help in nim-falcon/repos/cligen/argcvt.nim:66


=====================================
debian/changelog
=====================================
@@ -11,12 +11,16 @@ falcon (2.1.4-1) UNRELEASED; urgency=medium
   * Versioned Build-Depends: python-future (>= 0.16.0)
   * Build-Depends: python-msgpack
   
-  TODO: Test fails with
-    AttributeError: 'DiGraph' object has no attribute 'successors_iter'
-
   [ Jelmer Vernooij ]
   * Use secure copyright file specification URI.
 
+  [ Steffen Moeller ]
+  * Created d/TODO
+  * Rules-Requires-Root: no
+  * Bumped dependency python3-networkx (>= 2.5) because of previous
+    AttributeError: 'DiGraph' object has no attribute 'successors_iter'
+  * Introduced fixme to help function in nim-falcon/repos/cligen/argcvt.nim - please help
+
  -- Andreas Tille <tille at debian.org>  Tue, 07 Aug 2018 14:29:11 +0200
 
 falcon (1.8.8-1) unstable; urgency=medium


=====================================
debian/control
=====================================
@@ -1,15 +1,19 @@
 Source: falcon
 Maintainer: Debian Med Packaging Team <debian-med-packaging at lists.alioth.debian.org>
+Uploaders: Steffen Moeller <moeller at debian.org>
 Section: science
 Priority: optional
-Build-Depends: debhelper-compat (= 12),
+Build-Depends: debhelper-compat (= 13),
                dh-python,
+               cython3,
                nim,
                python3-all-dev,
                python3-future (>= 0.16.0),
                python3-msgpack,
-               python3-networkx (>= 1.7),
+               python3-networkx (>= 2.5),
                python3-nose,
+               python3-numpy,
+               python3-h5py,
                python3-pytest,
                python3-setuptools,
                rsync
@@ -18,6 +22,7 @@ Vcs-Browser: https://salsa.debian.org/med-team/falcon
 Vcs-Git: https://salsa.debian.org/med-team/falcon.git
 Homepage: https://github.com/PacificBiosciences/FALCON
 Testsuite: autopkgtest-pkg-python
+Rules-Requires-Root: no
 
 Package: falcon
 Architecture: alpha amd64 arm64 hppa kfreebsd-amd64 mips64el ppc64 ppc64el s390x sparc64


=====================================
debian/patches/isNilError.patch
=====================================
@@ -0,0 +1,241 @@
+Author: Steffen Moeller <moeller at debian.org>
+Description: Attempt to fix
+   nim-falcon/repos/msgpack4nim/msgpack4nim.nim(470, 6) Error: usage of 'isNil' is an {.error.} defined at /usr/lib/nim/system.nim(1519, 1
+   nim-falcon/src/falcon/rr_hctg_track.nim(303, 33) Error: usage of 'isNil' is an {.error.} defined at /usr/lib/nim/system.nim(1519, 1)
+   nim-falcon/src/falcon/rr_hctg_track.nim(362, 25) Error: 'nil' is now invalid for 'string'; compile with --nilseqs:on for a migration period; usage of '==' is an {.error.} defined at /usr/lib/nim/system.nim(2873, 5) 
+   nim-falcon/src/falcon/rr_hctg_track.nim(368, 33) Error: 'nil' is now invalid for 'string'; compile with --nilseqs:on for a migration period; usage of '==' is an {.error.} defined at /usr/lib/nim/system.nim(2873, 5)
+   nim-falcon/repos/msgpack4nim/msgpack4nim.nim(971, 11) Error: type mismatch: got <typeof(nil)> but expected 'seq[mytuple]'
+   nim-falcon/repos/cligen/parseopt3.nim(84, 46) Error: type mismatch: got <typeof(nil)> but expected 'seq[string]'
+   nim-falcon/repos/cligen/parseopt3.nim(104, 14) Error: type mismatch: got <seq[string], typeof(nil)>
+   nim-falcon/repos/cligen/cligen.nim(74, 19) Error: 'nil' is now invalid for 'string'; compile with --nilseqs:on for a migration period; usage of '==' is an {.error.} defined at /usr/lib/nim/system.nim(2873, 5)
+ with the insight that strings are no longer nil but of length 0 at worst and
+   nim-falcon/repos/msgpack4nim/msgpack4nim.nim(542, 33) Error: undeclared identifier: 'SomeReal'
+   nim-falcon/repos/msgpack4nim/msgpack4nim.nim(775, 39) Error: undeclared identifier: 'SomeReal'
+ with a pointer to https://github.com/nim-lang/Nim/pull/8239
+Index: falcon/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
++++ falcon/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
+@@ -467,7 +467,8 @@ proc pack_type*(s: Stream, val: char) =
+   s.pack_imp_uint8(uint8(val))
+ 
+ proc pack_type*(s: Stream, val: string) =
+-  if isNil(val): s.pack_imp_nil()
++  if ""==val:
++    s.pack_imp_nil()
+   else:
+     s.pack_string(val.len)
+     s.write(val)
+@@ -538,7 +539,7 @@ proc pack_type*(s: Stream, val: float32)
+ proc pack_type*(s: Stream, val: float64) =
+   s.pack_imp_float64(val)
+ 
+-proc pack_type*(s: Stream, val: SomeReal) =
++proc pack_type*(s: Stream, val: SomeFloat) =
+   when sizeof(val) == sizeof(float32):
+     s.pack_imp_float32(float32(val))
+   elif sizeof(val) == sizeof(float64):
+@@ -632,7 +633,8 @@ proc pack_type*[T](s: Stream, val: opena
+   for i in 0..val.len-1: s.pack_type undistinct(val[i])
+ 
+ proc pack_type*[T](s: Stream, val: seq[T]) =
+-  if isNil(val): s.pack_imp_nil()
++  if 0==val.len:
++    s.pack_imp_nil()
+   else:
+     s.pack_array(val.len)
+     for i in 0..val.len-1: s.pack_type undistinct(val[i])
+@@ -696,7 +698,7 @@ proc unpack_string*(s: Stream): int =
+ proc unpack_type*(s: Stream, val: var string) =
+   let pos = s.getPosition()
+   if s.readChar == pack_value_nil:
+-    val = nil
++    val = ""
+     return
+ 
+   s.setPosition(pos)
+@@ -771,7 +773,7 @@ proc unpack_type*(s: Stream, val: var fl
+ proc unpack_type*(s: Stream, val: var float64) =
+   val = s.unpack_imp_float64()
+ 
+-proc unpack_type*(s: Stream, val: var SomeReal) =
++proc unpack_type*(s: Stream, val: var SomeFloat) =
+   when sizeof(val) == sizeof(float32):
+     result = float32(s.unpack_imp_float32())
+   elif sizeof(val) == sizeof(float64):
+@@ -966,7 +968,7 @@ proc unpack_type*[T](s: Stream, val: var
+ proc unpack_type*[T](s: Stream, val: var seq[T]) =
+   let pos = s.getPosition()
+   if s.readChar == pack_value_nil:
+-    val = []
++    val = newSeq[T](0)
+     return
+ 
+   s.setPosition(pos)
+Index: falcon/nim-falcon/src/falcon/rr_hctg_track.nim
+===================================================================
+--- falcon.orig/nim-falcon/src/falcon/rr_hctg_track.nim
++++ falcon/nim-falcon/src/falcon/rr_hctg_track.nim
+@@ -300,7 +300,7 @@ proc stream_get_rid_to_phase(rawread_ids
+     # We could save memory in the table by using "ref Phase", but
+     # then the Phases would be reference-counted. Not a good trade-off.
+     # And the real memory problem comes from the threads/sub-procs.
+-    if rid_to_phase[rid].ctg_id.is_nil:
++    if 0==rid_to_phase[rid].ctg_id.len:
+       nout.inc
+     else:
+       nin.inc
+@@ -359,13 +359,13 @@ proc tr_stage1(la4falcon_stream: streams
+     #log(" t_id_int:", t_id_int, ", len(r2p):", len(global_rid_to_phase))
+     if t_id_int < len(global_rid_to_phase[]):
+       let t_phase = global_rid_to_phase[t_id_int]
+-      if t_phase.ctg_id != nil:
++      if "" != t_phase.ctg_id:
+         if t_phase.blockn != -1:
+             let q_id_int = strutils.parseInt(q_id)
+             #log("  q_id_int:", q_id_int)
+             if q_id_int < len(global_rid_to_phase[]):
+               let q_phase = global_rid_to_phase[q_id_int]
+-              if q_phase.ctg_id != nil:
++              if 0 < q_phase.ctg_id.len:
+                 #log("   t_phase:", t_phase)
+                 #log("   q_phase:", q_phase)
+                 if (q_phase.ctg_id == t_phase.ctg_id and q_phase.blockn == t_phase.blockn and
+Index: falcon/nim-falcon/repos/cligen/parseopt3.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/cligen/parseopt3.nim
++++ falcon/nim-falcon/repos/cligen/parseopt3.nim
+@@ -81,7 +81,7 @@ type
+ 
+ proc initOptParser*(cmdline: seq[string] = commandLineParams(),
+                     shortNoVal: set[char] = {},
+-                    longNoVal: seq[string] = nil,
++                    longNoVal: seq[string] = newSeq[string](0),
+                     requireSeparator=false,  # true imitates old parseopt2
+                     sepChars: string= "=:",
+                     stopWords: seq[string] = @[]): OptParser =
+@@ -101,7 +101,7 @@ proc initOptParser*(cmdline: seq[string]
+   ##
+   ## Parameters following either "--" or any literal parameter in stopWords are
+   ## never interpreted as options.
+-  if cmdline == nil:
++  if 0 == cmdline.len:
+     result.cmd = commandLineParams()
+     return
+   result.cmd = cmdline
+@@ -117,14 +117,14 @@ proc initOptParser*(cmdline: seq[string]
+ proc initOptParser*(cmdline: string): OptParser =
+   ## Initializes option parses with cmdline.  Splits cmdline in on spaces and
+   ## calls initOptParser(openarray[string]).  Should use a proper tokenizer.
+-  if cmdline == "": # backward compatibility
+-    return initOptParser(seq[string](nil))
++  if 0==cmdline.len: # backward compatibility
++    return initOptParser(newSeq[string](0))
+   else:
+     return initOptParser(cmdline.split)
+ 
+ proc do_short(p: var OptParser) =
+   p.kind = cmdShortOption
+-  p.val = nil
++  p.val = ""
+   p.key = p.moreShort[0..0]             # shift off first char as key
+   p.moreShort = p.moreShort[1..^1]
+   if p.moreShort.len == 0:              # param exhausted; advance param
+@@ -149,22 +149,22 @@ proc do_short(p: var OptParser) =
+ 
+ proc do_long(p: var OptParser) =
+   p.kind = cmdLongOption
+-  p.val = nil
++  p.val = ""
+   let param = p.cmd[p.pos]
+   p.pos += 1                            # always consume at least 1 param
+   let sep = find(param, p.sepChars)     # only very first occurrence of delim
+   if sep == 2:
+     echo "Empty long option key at param", p.pos - 1, " (\"", param, "\")"
+-    p.key = nil
++    p.key = ""
+     return
+   if sep > 2:
+     p.key = param[2 .. sep-1]
+     p.val = param[sep+1..^1]
+-    if p.longNoVal != nil and p.key in p.longNoVal:
++    if p.longNoVal.len>0 and p.key in p.longNoVal:
+       echo "Warning option `", p.key, "` does not expect an argument"
+     return
+   p.key = param[2..^1]                  # no sep; key is whole param past --
+-  if p.longNoVal != nil and p.key in p.longNoVal:
++  if p.longNoVal.len>0 and p.key in p.longNoVal:
+     return                              # No argument; done
+   if p.requireSep:
+     echo "Expecting option key-val separator :|= after `", p.key, "`"
+@@ -172,7 +172,7 @@ proc do_long(p: var OptParser) =
+   if p.pos < p.cmd.len:                 # Take opt arg from next param
+     p.val = p.cmd[p.pos]
+     p.pos += 1
+-  elif p.longNoVal != nil:
++  elif p.longNoVal.len>0:
+     echo "argument expected for option `", p.key, "` at end of params"
+ 
+ proc next*(p: var OptParser) =
+@@ -185,7 +185,7 @@ proc next*(p: var OptParser) =
+   if not p.cmd[p.pos].startsWith("-") or p.optsDone:  #Step3: non-option param
+     p.kind = cmdArgument
+     p.key = p.cmd[p.pos]
+-    p.val = nil
++    p.val = ""
+     if p.cmd[p.pos] in p.stopWords:     #Step4: check for stop word
+       p.optsDone = true                 # should only hit Step3 henceforth
+     p.pos += 1
+@@ -201,7 +201,7 @@ proc next*(p: var OptParser) =
+     if p.cmd[p.pos].len == 1:           #Step6a: simply "-" => non-option param
+       p.kind = cmdArgument              #  {"-" often used to indicate "stdin"}
+       p.key = p.cmd[p.pos]
+-      p.val = nil
++      p.val = ""
+       p.pos += 1
+     else:                               #Step6b: maybe a block of short options
+       p.moreShort = p.cmd[p.pos][1..^1] # slice out the initial "-"
+@@ -268,7 +268,7 @@ iterator getopt*(p: var OptParser): Geto
+ 
+ when declared(paramCount):
+   iterator getopt*(cmdline=commandLineParams(), shortNoVal: set[char] = {},
+-                   longNoVal: seq[string] = nil, requireSeparator=false,
++                   longNoVal: seq[string] = newSeq[string](0), requireSeparator=false,
+                    sepChars="=:", stopWords: seq[string] = @[]): GetoptResult =
+     ## This is an convenience iterator for iterating over the command line.
+     ## Parameters here are the same as for initOptParser.  Example:
+Index: falcon/nim-falcon/repos/cligen/cligen.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/cligen/cligen.nim
++++ falcon/nim-falcon/repos/cligen/cligen.nim
+@@ -71,7 +71,7 @@ proc collectComments(buf: var string, n:
+     for kid in n: collectComments(buf, kid, depth + 1)
+   else:
+     if n.kind == nnkCommentStmt and depth < 4:
+-      if n.strVal != nil:
++      if n.strVal.len > 0:
+         buf.add(" ")
+         buf.add(n.strVal)
+ 
+@@ -168,7 +168,7 @@ macro dispatchGen*(pro: typed, cmdName:
+   let impl = pro.symbol.getImpl
+   let fpars = formalParams(impl, toStrSeq(suppress))
+   var cmtDoc: string = $doc
+-  if cmtDoc == nil or cmtDoc.len == 0:  # allow caller to override commentDoc
++  if cmtDoc.len == 0:  # allow caller to override commentDoc
+     collectComments(cmtDoc, impl)
+     cmtDoc = strip(cmtDoc)
+   let proNm = $pro                      # Name of wrapped proc
+@@ -300,10 +300,10 @@ macro dispatchGen*(pro: typed, cmdName:
+   proc callParser(): NimNode =
+     result = quote do:
+       var exitCode = 0
+-      if `argPreP` != nil and len(`argPreP`) > 0:
++      if len(`argPreP`) > 0:
+         exitCode += parser(`argPreP`)
+       exitCode += parser()
+-      if `argPostP` != nil and len(`argPostP`) > 0:
++      if len(`argPostP`) > 0:
+         exitCode += parser(`argPostP`)
+       if exitCode != 0:
+         return exitCode


=====================================
debian/patches/newNimPathSep.patch
=====================================
@@ -0,0 +1,25 @@
+Index: falcon/nim-falcon/src/fc_rr_hctg_track2.nim
+===================================================================
+--- falcon.orig/nim-falcon/src/fc_rr_hctg_track2.nim
++++ falcon/nim-falcon/src/fc_rr_hctg_track2.nim
+@@ -1,6 +1,6 @@
+ # vim: sw=2 ts=2 sts=2 tw=80 et:
+ #import nimprof
+-from falcon.rr_hctg_track import nil
++from falcon/rr_hctg_track import nil
+ 
+ 
+ when isMainModule:
+Index: falcon/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
++++ falcon/nim-falcon/repos/msgpack4nim/msgpack4nim.nim
+@@ -966,7 +966,7 @@ proc unpack_type*[T](s: Stream, val: var
+ proc unpack_type*[T](s: Stream, val: var seq[T]) =
+   let pos = s.getPosition()
+   if s.readChar == pack_value_nil:
+-    val = nil
++    val = []
+     return
+ 
+   s.setPosition(pos)


=====================================
debian/patches/nimTypeMismatch.patch
=====================================
@@ -0,0 +1,35 @@
+Author: Steffen Moeller
+Description: Attempt to fix errors along the line of
+ nim-falcon/src/fc_consensus.nim(193, 41) Error: type mismatch: got <int>
+Index: falcon/nim-falcon/src/falcon/consensus/falcon.nim
+===================================================================
+--- falcon.orig/nim-falcon/src/falcon/consensus/falcon.nim
++++ falcon/nim-falcon/src/falcon/consensus/falcon.nim
+@@ -98,7 +98,7 @@ type
+     score*: cdouble
+ 
+   msa_base_group_t* = object
+-    base*: array[0 .. <MSA_BASE_GROUP_LEN, align_tag_col_t] # array of length 5
++    base*: array[0 .. MSA_BASE_GROUP_LEN, align_tag_col_t] # array of length 5
+ 
+   msa_delta_group_t* = object
+     # TODO(CD): size and max_delta may be redundant now
+Index: falcon/nim-falcon/src/fc_consensus.nim
+===================================================================
+--- falcon.orig/nim-falcon/src/fc_consensus.nim
++++ falcon/nim-falcon/src/fc_consensus.nim
+@@ -190,12 +190,12 @@ proc format_seq(sequ: string, col: int):
+   var bo = 0
+   var bn = 0
+   while (bo+col) < len(sequ):
+-    result[bn .. <(bn+col)] = sequ[bo .. <(bo+col)]
++    result[bn .. (bn+col)] = sequ[bo .. (bo+col)]
+     result[(bn+col)] = '\l'
+     bo += col
+     bn += col + 1
+   var tail = len(sequ) - bo
+-  result[bn .. <(bn+tail)] = sequ[bo .. <(bo+tail)]
++  result[bn .. (bn+tail)] = sequ[bo .. (bo+tail)]
+   result.setLen(bn+tail)
+   #result[(bn+tail)] = '\l' # Python did not add final newline
+ proc write_seq(cns_seq: string, seed_id: string, seq_i: var int): bool =


=====================================
debian/patches/nimUndeclearedIdentifier.patch
=====================================
@@ -0,0 +1,16 @@
+Author: Steffen Moeller <moeller at debian.org>
+Description: Attempt to address
+   nim-falcon/repos/nim-heap/binaryheap.nim(48, 53) Error: undeclared identifier: 'expr'
+Index: falcon/nim-falcon/repos/nim-heap/binaryheap.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/nim-heap/binaryheap.nim
++++ falcon/nim-falcon/repos/nim-heap/binaryheap.nim
+@@ -45,7 +45,7 @@ proc propFulfilled[T](h: Heap[T], indPar
+   h.comp(h.data[indParent], h.data[indChild]) <= 0
+ 
+ 
+-template assertHeapProperty[T](h: Heap[T], enabled: expr = true) =
++template assertHeapProperty[T](h: Heap[T], enabled: bool) =
+   ## only for debugging: explicit check if the heap property
+   ## is fulfilled for all nodes
+   when enabled:


=====================================
debian/patches/nimundeclaredWordWrap.patch
=====================================
@@ -0,0 +1,217 @@
+Author: Steffen Moeller <moeller at debian.org>
+Description:
+ nim-falcon/repos/cligen/argcvt.nim(66, 21) Error: index out of bounds, the container is empty""[0]
+ and nim changed wordwrap to wrapWords (https://github.com/nim-lang/Nim/pull/9649/)
+Index: falcon/nim-falcon/repos/cligen/argcvt.nim
+===================================================================
+--- falcon.orig/nim-falcon/repos/cligen/argcvt.nim
++++ falcon/nim-falcon/repos/cligen/argcvt.nim
+@@ -1,6 +1,7 @@
+ from parseutils import parseInt, parseFloat
+-from strutils   import `%`, join, split, wordWrap, repeat, escape, strip
++from strutils   import `%`, join, split, repeat, escape, strip
+ from terminal   import terminalWidth
++from std/wordwrap   import wrapWords
+ 
+ proc postInc*(x: var int): int =
+   ## Similar to post-fix `++` in C languages: yield initial val, then increment
+@@ -40,7 +41,7 @@ proc alignTable*(tab: seq[array[0..3, st
+   for row in tab:
+     for c in cols[0 .. ^2]:
+       result &= row[c] & repeat(" ", wCol[c] - row[c].len + colGap)
+-    var wrapped = wordWrap(row[last], maxLineWidth = wCol[last]).split("\n")
++    var wrapped = wrapWords(row[last], maxLineWidth = wCol[last]).split("\n")
+     result &= (if wrapped.len > 0: wrapped[0] else: "") & "\n"
+     for j in 1 ..< len(wrapped):
+       result &= repeat(" ", leader) & wrapped[j] & "\n"
+@@ -62,12 +63,14 @@ template argParse*(dst: bool, key: strin
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: bool,
+                   parNm: string, sh: string, parHelp: string) =
+   helpT.add([ keys(parNm, sh, argSep=""), "toggle", $defVal, parHelp ])
+-  shortNoVal.incl(sh[0])            # bool must elide option arguments.
++
++  ## FIXME
++  ##if sh.len>0: shortNoVal.incl(sh[0])            # bool must elide option arguments.
+   longNoVal.add(parNm)              # So, add to *NoVal.
+ 
+ # string
+ template argParse*(dst: string, key: string, val: string, help: string) =
+-  if val == nil:
++  if 0==val.len:
+     argRet(1, "Bad value nil for string param \"$1\"\n$2" % [ key, help ])
+   dst = val
+ 
+@@ -77,7 +80,7 @@ template argHelp*(helpT: seq[array[0..3,
+ 
+ # cstring
+ template argParse*(dst: cstring, key: string, val: string, help: string) =
+-  if val == nil:
++  if 0==val.len:
+     argRet(1, "Bad value nil for string param \"$1\"\n$2" % [ key, help ])
+   dst = val
+ 
+@@ -87,7 +90,7 @@ template argHelp*(helpT: seq[array[0..3,
+ 
+ # char
+ template argParse*(dst: char, key: string, val: string, help: string) =
+-  if val == nil or len(val) > 1:
++  if 0==val.len or len(val) > 1:
+     argRet(1, "Bad value nil/multi-char for char param \"$1\"\n$2" %
+            [ key , help ])
+   dst = val[0]
+@@ -98,9 +101,9 @@ template argHelp*(helpT: seq[array[0..3,
+ 
+ # int
+ template argParse*(dst: int, key: string, val: string, help: string) =
+-  if val == nil or parseInt(strip(val), dst) == 0:
++  if 0==val.len or parseInt(strip(val), dst) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting int\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "" else: val), key, help ])
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: int,
+                   parNm: string, sh: string, parHelp: string) =
+@@ -109,9 +112,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # int8
+ template argParse*(dst: int8, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting int8\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = int8(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: int8,
+@@ -121,9 +124,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # int16
+ template argParse*(dst: int16, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting int16\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = int16(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: int16,
+@@ -133,9 +136,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # int32
+ template argParse*(dst: int32, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting int32\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = int32(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: int32,
+@@ -145,9 +148,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # int64
+ template argParse*(dst: int64, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting int64\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = tmp
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: int64,
+@@ -157,9 +160,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # uint
+ template argParse*(dst: uint, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting uint\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = uint(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: uint,
+@@ -169,9 +172,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # uint8
+ template argParse*(dst: uint8, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting uint8\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = uint8(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: uint8,
+@@ -181,9 +184,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # uint16
+ template argParse*(dst: uint16, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting uint16\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = uint16(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: uint16,
+@@ -193,9 +196,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # uint32
+ template argParse*(dst: uint32, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting uint32\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = uint32(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: uint32,
+@@ -205,9 +208,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # uint64
+ template argParse*(dst: uint64, key: string, val: string, help: string) =
+   var tmp: int
+-  if val == nil or parseInt(strip(val), tmp) == 0:
++  if 0==val.len or parseInt(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting uint64\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = uint64(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: uint64,
+@@ -216,9 +219,9 @@ template argHelp*(helpT: seq[array[0..3,
+ 
+ # float
+ template argParse*(dst: float, key: string, val: string, help: string) =
+-  if val == nil or parseFloat(strip(val), dst) == 0:
++  if 0==val.len or parseFloat(strip(val), dst) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting float\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: float,
+                   parNm: string, sh: string, parHelp: string) =
+@@ -227,9 +230,9 @@ template argHelp*(helpT: seq[array[0..3,
+ # float32
+ template argParse*(dst: float32, key: string, val: string, help: string) =
+   var tmp: float
+-  if val == nil or parseFloat(strip(val), tmp) == 0:
++  if 0==val.len or parseFloat(strip(val), tmp) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting float32\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+   else: dst = float32(tmp)
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: float32,
+@@ -238,9 +241,9 @@ template argHelp*(helpT: seq[array[0..3,
+ 
+ # float64
+ template argParse*(dst: float64, key: string, val: string, help: string) =
+-  if val == nil or parseFloat(strip(val), dst) == 0:
++  if 0==val.len or parseFloat(strip(val), dst) == 0:
+     argRet(1, "Bad value: \"$1\" for option \"$2\"; expecting float64\n$3" %
+-           [ (if val == nil: "nil" else: val), key, help ])
++           [ (if 0==val.len: "nil" else: val), key, help ])
+ 
+ template argHelp*(helpT: seq[array[0..3, string]], defVal: float64,
+                   parNm: string, sh: string, parHelp: string) =


=====================================
debian/patches/python2to3.patch
=====================================
@@ -0,0 +1,762 @@
+Index: falcon/pypeFLOW/makefile
+===================================================================
+--- falcon.orig/pypeFLOW/makefile
++++ falcon/pypeFLOW/makefile
+@@ -1,16 +1,16 @@
+ WHEELHOUSE?=wheelhouse
+-PIP=pip wheel --wheel-dir ${WHEELHOUSE} --find-links ${WHEELHOUSE}
++PIP=pip3 wheel --wheel-dir ${WHEELHOUSE} --find-links ${WHEELHOUSE}
+ MY_TEST_FLAGS?=-v -s --durations=0
+ 
+ default:
+ pylint:
+ 	pylint --errors-only pypeflow/ pwatcher/
+ pytest:
+-	python -c 'import pypeflow; print pypeflow'
++	python3 -c 'import pypeflow; print pypeflow'
+ 	py.test ${MY_TEST_FLAGS} --junit-xml=nosetests.xml --doctest-modules pypeflow/ pwatcher/ test/
+ autopep8:
+ 	autopep8 --max-line-length=120 -ir -j0 pypeflow/ pwatcher/
+ wheel:
+-	which pip
++	which pip3
+ 	${PIP} --no-deps .
+ 	ls -larth ${WHEELHOUSE}
+Index: falcon/FALCON-examples/scripts/check.py
+===================================================================
+--- falcon.orig/FALCON-examples/scripts/check.py
++++ falcon/FALCON-examples/scripts/check.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ import sys, collections, pprint, itertools
+ def ranges(covered):
+     for c, g in itertools.groupby(covered.items(), lambda x: x[1]):
+Index: falcon/FALCON-examples/scripts/circ-sim.py
+===================================================================
+--- falcon.orig/FALCON-examples/scripts/circ-sim.py
++++ falcon/FALCON-examples/scripts/circ-sim.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ from __future__ import division
+ import sys, os, random
+ from random import choice
+Index: falcon/FALCON-examples/scripts/synther.py
+===================================================================
+--- falcon.orig/FALCON-examples/scripts/synther.py
++++ falcon/FALCON-examples/scripts/synther.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ from __future__ import division
+ import sys, os, random
+ from random import choice
+Index: falcon/FALCON-make/get-pip.py
+===================================================================
+--- falcon.orig/FALCON-make/get-pip.py
++++ falcon/FALCON-make/get-pip.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ #
+ # Hi There!
+ # You may be wondering what this giant blob of binary data here is, you might
+Index: falcon/FALCON/falcon_kit/bash.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/bash.py
++++ falcon/FALCON/falcon_kit/bash.py
+@@ -176,7 +176,7 @@ def script_build_rdb(config, input_fofn_
+     params = dict(config)
+     length_cutoff = params.get('length_cutoff')
+     if int(length_cutoff) < 0:
+-        bash_cutoff = '$(python2.7 -m falcon_kit.mains.calc_cutoff --coverage {} {} <(DBstats -b1 {}))'.format(
++        bash_cutoff = '$(python3 -m falcon_kit.mains.calc_cutoff --coverage {} {} <(DBstats -b1 {}))'.format(
+             params['seed_coverage'], params['genome_size'], 'raw_reads')
+     else:
+         bash_cutoff = '{}'.format(length_cutoff)
+@@ -225,7 +225,7 @@ def script_build_pdb(config, input_fofn_
+ 
+     params.update(locals())
+     script = """\
+-python -m falcon_kit.mains.copy_fofn --in={input_fofn_bfn} --out=preads.fofn --abs
++python3 -m falcon_kit.mains.copy_fofn --in={input_fofn_bfn} --out=preads.fofn --abs
+ rm -f preads.db .preads.* # in case of re-run
+ while read fn; do fasta2DB -v preads $fn; done < preads.fofn
+ DBsplit {ovlp_DBsplit_option} preads
+@@ -341,7 +341,7 @@ def script_run_consensus(config, db_fn,
+     # to create yet another task in pbsmrtpipe.
+     length_cutoff = params.get('length_cutoff')
+     if int(length_cutoff) < 0:
+-        bash_cutoff = '$(python2.7 -m falcon_kit.mains.calc_cutoff --coverage {} {} <(DBstats -b1 {}))'.format(
++        bash_cutoff = '$(python3 -m falcon_kit.mains.calc_cutoff --coverage {} {} <(DBstats -b1 {}))'.format(
+             params['seed_coverage'], params['genome_size'], db_fn)
+     else:
+         bash_cutoff = '{}'.format(length_cutoff)
+@@ -355,7 +355,7 @@ def script_run_consensus(config, db_fn,
+         LA4Falcon_flags += 'fo'
+     if LA4Falcon_flags:
+         LA4Falcon_flags = '-' + ''.join(set(LA4Falcon_flags))
+-    run_consensus = "LA4Falcon -H$CUTOFF %s {db_fn} {las_fn} | python -m falcon_kit.mains.consensus {falcon_sense_option} >| {out_file_bfn}" % LA4Falcon_flags
++    run_consensus = "LA4Falcon -H$CUTOFF %s {db_fn} {las_fn} | python3 -m falcon_kit.mains.consensus {falcon_sense_option} >| {out_file_bfn}" % LA4Falcon_flags
+ 
+     if config.get('dazcon', False):
+         run_consensus = """
+@@ -379,31 +379,31 @@ def script_run_falcon_asm(config, las_fo
+ # write preads.ovl:
+ 
+ # mobs uses binwrappers, so it does not see our "entry-points".
+-# So, after dropping "src/py_scripts/*.py", we can call these via python -m:
++# So, after dropping "src/py_scripts/*.py", we can call these via python3 -m:
+ 
+-time python -m falcon_kit.mains.ovlp_filter --db {db_file_fn} --las-fofn {las_fofn_fn} {overlap_filtering_setting} --min_len {length_cutoff_pr} --out-fn preads.ovl
++time python3 -m falcon_kit.mains.ovlp_filter --db {db_file_fn} --las-fofn {las_fofn_fn} {overlap_filtering_setting} --min_len {length_cutoff_pr} --out-fn preads.ovl
+ 
+ ln -sf {preads4falcon_fasta_fn} ./preads4falcon.fasta
+ 
+ # Given preads.ovl,
+ # write sg_edges_list, c_path, utg_data, ctg_paths.
+-time python -m falcon_kit.mains.ovlp_to_graph {fc_ovlp_to_graph_option} --overlap-file preads.ovl >| fc_ovlp_to_graph.log
++time python3 -m falcon_kit.mains.ovlp_to_graph {fc_ovlp_to_graph_option} --overlap-file preads.ovl >| fc_ovlp_to_graph.log
+ 
+ # Given sg_edges_list, utg_data, ctg_paths, preads4falcon.fasta,
+ # write p_ctg.fa and a_ctg_all.fa,
+ # plus a_ctg_base.fa, p_ctg_tiling_path, a_ctg_tiling_path, a_ctg_base_tiling_path:
+-time python -m falcon_kit.mains.graph_to_contig
++time python3 -m falcon_kit.mains.graph_to_contig
+ 
+ # Given a_ctg_all.fa, write a_ctg.fa:
+-time python -m falcon_kit.mains.dedup_a_tigs
++time python3 -m falcon_kit.mains.dedup_a_tigs
+ 
+ # Generate a GFA of all assembly graph edges. This GFA can contain
+ # edges and nodes which are not part of primary and associate contigs.
+-time python -m falcon_kit.mains.gen_gfa_v1 >| asm.gfa
++time python3 -m falcon_kit.mains.gen_gfa_v1 >| asm.gfa
+ 
+ # Generate a GFA of all assembly graph edges. This GFA can contain
+ # edges and nodes which are not part of primary and associate contigs.
+-time python -m falcon_kit.mains.gen_gfa_v1 --add-string-graph >| sg.gfa
++time python3 -m falcon_kit.mains.gen_gfa_v1 --add-string-graph >| sg.gfa
+ 
+ #rm -f ./preads4falcon.fasta
+ """
+@@ -414,6 +414,6 @@ def script_run_report_pre_assembly(i_raw
+     params = dict()
+     params.update(locals())
+     script = """\
+-python2.7 -m falcon_kit.mains.report_pre_assembly --genome-length {genome_length} --length-cutoff {length_cutoff} --db {i_raw_reads_db_fn} --preads-fofn {i_preads_fofn_fn} --out {o_json_fn}
++python3 -m falcon_kit.mains.report_pre_assembly --genome-length {genome_length} --length-cutoff {length_cutoff} --db {i_raw_reads_db_fn} --preads-fofn {i_preads_fofn_fn} --out {o_json_fn}
+ """
+     return script.format(**params)
+Index: falcon/FALCON/falcon_kit/mains/LAmerge.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/LAmerge.py
++++ falcon/FALCON/falcon_kit/mains/LAmerge.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ """Usage:
+ 
+     LAmerge.py DB <args>
+Index: falcon/FALCON/falcon_kit/mains/LAsort.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/LAsort.py
++++ falcon/FALCON/falcon_kit/mains/LAsort.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ """Usage:
+ 
+     LAsort.py DB <args>
+Index: falcon/FALCON/falcon_kit/mains/consensus_task.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/consensus_task.py
++++ falcon/FALCON/falcon_kit/mains/consensus_task.py
+@@ -62,7 +62,7 @@ def script_run_consensus(config, db_fn,
+         LA4Falcon_flags += 'fo'
+     if LA4Falcon_flags:
+         LA4Falcon_flags = '-' + ''.join(set(LA4Falcon_flags))
+-    run_consensus = "LA4Falcon -H$CUTOFF %s {db_fn} {las_fn} | python -m falcon_kit.mains.consensus {falcon_sense_option} >| {out_file_bfn}" % LA4Falcon_flags
++    run_consensus = "LA4Falcon -H$CUTOFF %s {db_fn} {las_fn} | python3 -m falcon_kit.mains.consensus {falcon_sense_option} >| {out_file_bfn}" % LA4Falcon_flags
+ 
+     if config.get('dazcon', False):
+         run_consensus = """
+Index: falcon/FALCON/falcon_kit/mains/copy_mapped.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/copy_mapped.py
++++ falcon/FALCON/falcon_kit/mains/copy_mapped.py
+@@ -1,4 +1,4 @@
+-#!/bin/env python2.7
++#!/bin/python3
+ from __future__ import absolute_import
+ 
+ 
+Index: falcon/FALCON/falcon_kit/mains/fasta2fasta.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/fasta2fasta.py
++++ falcon/FALCON/falcon_kit/mains/fasta2fasta.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ """A pre-processor for DAZZ_DB/fasta2DB.
+ 
+ Since fasta2DB has several constraints
+Index: falcon/FALCON/falcon_kit/mains/hgap4_adapt.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/hgap4_adapt.py
++++ falcon/FALCON/falcon_kit/mains/hgap4_adapt.py
+@@ -430,7 +430,7 @@ def get_parser():
+ Typically:
+     mkdir mydir/
+     cd mydir/
+-    python -m falcon_kit.mains.hgap4_adapt --job-output-dir=../job_output/
++    python3 -m falcon_kit.mains.hgap4_adapt --job-output-dir=../job_output/
+ 
+     fc_run fc_run.cfg          -- (A)
+     fc_unzip.py fc_unzip.cfg   -- (B)
+Index: falcon/FALCON/falcon_kit/mains/report_pre_assembly.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/report_pre_assembly.py
++++ falcon/FALCON/falcon_kit/mains/report_pre_assembly.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ from __future__ import absolute_import
+ 
+ from .. import stats_preassembly
+Index: falcon/FALCON/falcon_kit/mains/symlink_mapped.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/mains/symlink_mapped.py
++++ falcon/FALCON/falcon_kit/mains/symlink_mapped.py
+@@ -1,4 +1,4 @@
+-#!/bin/env python2.7
++#!/bin/python3
+ from __future__ import absolute_import
+ 
+ 
+Index: falcon/FALCON/falcon_kit/pype.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/pype.py
++++ falcon/FALCON/falcon_kit/pype.py
+@@ -13,17 +13,17 @@ from . import io
+ LOG = logging.getLogger(__name__)
+ 
+ TASK_GENERIC_RUN_UNITS_SCRIPT = """\
+-python -m falcon_kit.mains.generic_run_units_of_work --nproc={params.pypeflow_nproc} --units-of-work-fn={input.units_of_work} --bash-template-fn={input.bash_template} --results-fn={output.results}
++python3 -m falcon_kit.mains.generic_run_units_of_work --nproc={params.pypeflow_nproc} --units-of-work-fn={input.units_of_work} --bash-template-fn={input.bash_template} --results-fn={output.results}
+ """
+ TASK_GENERIC_SCATTER_ONE_UOW_SCRIPT = """\
+-python -m falcon_kit.mains.generic_scatter_one_uow --all-uow-list-fn={input.all} --one-uow-list-fn={output.one} --split-idx={params.split_idx}
++python3 -m falcon_kit.mains.generic_scatter_one_uow --all-uow-list-fn={input.all} --one-uow-list-fn={output.one} --split-idx={params.split_idx}
+ """
+ TASK_GENERIC_UNSPLIT_SCRIPT = """
+-python -m falcon_kit.mains.generic_unsplit --result-fn-list-fn={output.result_fn_list} --gathered-fn={output.gathered}
++python3 -m falcon_kit.mains.generic_unsplit --result-fn-list-fn={output.result_fn_list} --gathered-fn={output.gathered}
+ """
+ #TASK_GENERIC_CHUNKING_SCRIPT = """\
+ # This is done via pbtag now, I think.
+-#python -m falcon_kit.mains.generic_chunking split-fn={input.split} --bash-template-temp-fn={input.bash_template_temp} --units-of-work-fn={output.units_of_work} --uow-template-fn={output.uow_template} --split-idx={params.split_idx}
++#python3 -m falcon_kit.mains.generic_chunking split-fn={input.split} --bash-template-temp-fn={input.bash_template_temp} --units-of-work-fn={output.units_of_work} --uow-template-fn={output.uow_template} --split-idx={params.split_idx}
+ #"""
+ 
+ 
+Index: falcon/FALCON/falcon_kit/pype_tasks.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/pype_tasks.py
++++ falcon/FALCON/falcon_kit/pype_tasks.py
+@@ -16,55 +16,55 @@ LOG = logging.getLogger(__name__)
+ 
+ 
+ #TASK_LAS_MERGE_SCATTER_SCRIPT = """\
+-#python -m falcon_kit.mains.las_merge_scatter --db-prefix={params.db_prefix} --stage={params.stage} --run-jobs-fn={input.run_jobs} --gathered-las-fn={input.gathered_las} --wildcards={params.wildcards} --scattered-fn={output.scattered}
++#python3 -m falcon_kit.mains.las_merge_scatter --db-prefix={params.db_prefix} --stage={params.stage} --run-jobs-fn={input.run_jobs} --gathered-las-fn={input.gathered_las} --wildcards={params.wildcards} --scattered-fn={output.scattered}
+ #"""
+ TASK_LAS_MERGE_SPLIT_SCRIPT = """\
+-python -m falcon_kit.mains.las_merge_split --wildcards={params.wildcards} --db-prefix={params.db_prefix} --run-jobs-fn={input.run_jobs} --gathered-las-fn={input.gathered_las} --split-fn={output.split} --bash-template-fn={output.bash_template}
++python3 -m falcon_kit.mains.las_merge_split --wildcards={params.wildcards} --db-prefix={params.db_prefix} --run-jobs-fn={input.run_jobs} --gathered-las-fn={input.gathered_las} --split-fn={output.split} --bash-template-fn={output.bash_template}
+ """
+ TASK_LAS_MERGE_SCRIPT = """\
+ # Note: HPC.daligner chooses a merged filename in its generated script, so we will symlink to it.
+-python -m falcon_kit.mains.las_merge --las-paths-fn={input.las_paths} --merge-script-fn={input.merge_script} --las-merged-fn-fn={input.merged_las_json} --las-merged-symlink-fn={output.merged_las} --job-done-fn={output.job_done} --p-id-fn={output.p_id} --p-id-num={params.p_id_num}
++python3 -m falcon_kit.mains.las_merge --las-paths-fn={input.las_paths} --merge-script-fn={input.merge_script} --las-merged-fn-fn={input.merged_las_json} --las-merged-symlink-fn={output.merged_las} --job-done-fn={output.job_done} --p-id-fn={output.p_id} --p-id-num={params.p_id_num}
+ """
+ TASK_LAS_MERGE_GATHER_SCRIPT = """\
+-python -m falcon_kit.mains.las_merge_gather --gathered-fn={input.gathered} --p-id2las-fn={output.p_id2las} --las-fn={output.las}
++python3 -m falcon_kit.mains.las_merge_gather --gathered-fn={input.gathered} --p-id2las-fn={output.p_id2las} --las-fn={output.las}
+ """
+ #TASK_CONSENSUS_SCATTER_SCRIPT = """\
+-#python -m falcon_kit.mains.consensus_scatter --las-fopfn-fn={input.las_fopfn} --db-fn={input.raw_reads_db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --wildcards={params.wildcards} --scattered-fn={output.scattered}
++#python3 -m falcon_kit.mains.consensus_scatter --las-fopfn-fn={input.las_fopfn} --db-fn={input.raw_reads_db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --wildcards={params.wildcards} --scattered-fn={output.scattered}
+ #"""
+ TASK_CONSENSUS_SPLIT_SCRIPT = """\
+-python -m falcon_kit.mains.consensus_split --wildcards={params.wildcards} --p-id2las-fn={input.p_id2las} --db-fn={input.raw_reads_db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --split-fn={output.split} --bash-template-fn={output.bash_template}
++python3 -m falcon_kit.mains.consensus_split --wildcards={params.wildcards} --p-id2las-fn={input.p_id2las} --db-fn={input.raw_reads_db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --split-fn={output.split} --bash-template-fn={output.bash_template}
+ """
+ TASK_CONSENSUS_TASK_SCRIPT = """\
+-python -m falcon_kit.mains.consensus_task --nproc={params.pypeflow_nproc} --las-fn={input.las} --db-fn={input.db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --fasta-fn={output.fasta}
++python3 -m falcon_kit.mains.consensus_task --nproc={params.pypeflow_nproc} --las-fn={input.las} --db-fn={input.db} --length-cutoff-fn={input.length_cutoff} --config-fn={input.config} --fasta-fn={output.fasta}
+ """
+ TASK_CONSENSUS_GATHER_SCRIPT = """\
+-python -m falcon_kit.mains.consensus_gather_fasta_fofn --gathered-fn={input.gathered} --preads-fofn-fn={output.preads_fofn}
++python3 -m falcon_kit.mains.consensus_gather_fasta_fofn --gathered-fn={input.gathered} --preads-fofn-fn={output.preads_fofn}
+ """
+ TASK_REPORT_PRE_ASSEMBLY_SCRIPT = """\
+-python -m falcon_kit.mains.task_report_pre_assembly --config-fn={input.config} --length-cutoff-fn={input.length_cutoff} --raw-reads-db-fn={input.raw_reads_db} --preads-fofn-fn={input.preads_fofn} --pre-assembly-report-fn={output.pre_assembly_report}
++python3 -m falcon_kit.mains.task_report_pre_assembly --config-fn={input.config} --length-cutoff-fn={input.length_cutoff} --raw-reads-db-fn={input.raw_reads_db} --preads-fofn-fn={input.preads_fofn} --pre-assembly-report-fn={output.pre_assembly_report}
+ """
+ TASK_BUILD_RDB_SCRIPT = """\
+-python -m falcon_kit.mains.build_rdb --input-fofn-fn={input.raw_reads_fofn} --config-fn={input.config} --run-jobs-fn={output.run_jobs} --length-cutoff-fn={output.length_cutoff} --job-done-fn={output.db_build_done}
++python3 -m falcon_kit.mains.build_rdb --input-fofn-fn={input.raw_reads_fofn} --config-fn={input.config} --run-jobs-fn={output.run_jobs} --length-cutoff-fn={output.length_cutoff} --job-done-fn={output.db_build_done}
+ touch {output.db_build_done}
+ """
+ TASK_BUILD_PDB_SCRIPT = """\
+-python -m falcon_kit.mains.build_pdb --input-fofn-fn={input.preads_fofn} --config-fn={input.config} --run-jobs-fn={output.run_jobs} --job-done-fn={output.db_build_done}
++python3 -m falcon_kit.mains.build_pdb --input-fofn-fn={input.preads_fofn} --config-fn={input.config} --run-jobs-fn={output.run_jobs} --job-done-fn={output.db_build_done}
+ # TODO: Verify that input.preads_db exists.
+ touch {output.db_build_done}
+ """
+ #TASK_DALIGNER_SCATTER_SCRIPT = """\
+-#python -m falcon_kit.mains.daligner_scatter --run-jobs-fn={input.run_jobs} --db-prefix={params.db_prefix} --db-fn={input.db} --skip-checks={params.skip_checks} --pread-aln={params.pread_aln} --stage={params.stage} --wildcards={params.wildcards} --scattered-fn={output.scattered}
++#python3 -m falcon_kit.mains.daligner_scatter --run-jobs-fn={input.run_jobs} --db-prefix={params.db_prefix} --db-fn={input.db} --skip-checks={params.skip_checks} --pread-aln={params.pread_aln} --stage={params.stage} --wildcards={params.wildcards} --scattered-fn={output.scattered}
+ #"""
+ TASK_DALIGNER_SPLIT_SCRIPT = """\
+-python -m falcon_kit.mains.daligner_split --nproc={params.pypeflow_nproc} --wildcards={params.wildcards} --db-prefix={params.db_prefix} --skip-checks={params.skip_checks} --pread-aln={params.pread_aln} --run-jobs-fn={input.run_jobs} --db-fn={input.db} --split-fn={output.split} --bash-template-fn={output.bash_template}
++python3 -m falcon_kit.mains.daligner_split --nproc={params.pypeflow_nproc} --wildcards={params.wildcards} --db-prefix={params.db_prefix} --skip-checks={params.skip_checks} --pread-aln={params.pread_aln} --run-jobs-fn={input.run_jobs} --db-fn={input.db} --split-fn={output.split} --bash-template-fn={output.bash_template}
+ """
+ TASK_DALIGNER_SCRIPT = """\
+ # Note: HPC.daligner chooses a merged filename in its generated script, so we will symlink to it.
+-python -m falcon_kit.mains.daligner --daligner-settings-fn={input.daligner_settings} --daligner-script-fn={input.daligner_script} --job-done-fn={output.job_done}
++python3 -m falcon_kit.mains.daligner --daligner-settings-fn={input.daligner_settings} --daligner-script-fn={input.daligner_script} --job-done-fn={output.job_done}
+ """
+ 
+ TASK_DALIGNER_FIND_LAS_SCRIPT = """\
+-python -m falcon_kit.mains.daligner_gather_las_list --gathered-fn={input.gathered} --las-paths-fn={output.las_paths}
++python3 -m falcon_kit.mains.daligner_gather_las_list --gathered-fn={input.gathered} --las-paths-fn={output.las_paths}
+ """
+ TASK_DUMP_RAWREAD_IDS_SCRIPT = """\
+ DBshow -n {input.rawread_db} | tr -d '>' | LD_LIBRARY_PATH= awk '{{print $1}}' > {output.rawread_id_file}
+@@ -73,7 +73,7 @@ TASK_DUMP_PREAD_IDS_SCRIPT = """\
+ DBshow -n {input.pread_db} | tr -d '>' | LD_LIBRARY_PATH= awk '{{print $1}}' > {output.pread_id_file}
+ """
+ TASK_GENERATE_READ_TO_CTG_MAP_SCRIPT = """\
+-python -m falcon_kit.mains.generate_read_to_ctg_map --rawread-id={input.rawread_id_file} --pread-id={input.pread_id_file} --sg-edges-list={input.sg_edges_list} --utg-data={input.utg_data} --ctg-paths={input.ctg_paths} --output={output.read_to_contig_map}
++python3 -m falcon_kit.mains.generate_read_to_ctg_map --rawread-id={input.rawread_id_file} --pread-id={input.pread_id_file} --sg-edges-list={input.sg_edges_list} --utg-data={input.utg_data} --ctg-paths={input.ctg_paths} --output={output.read_to_contig_map}
+ """
+ TASK_RUN_DB_TO_FALCON_SCRIPT = """\
+ # Given preads.db,
+@@ -87,31 +87,31 @@ TASK_RUN_FALCON_ASM_SCRIPT = """\
+ # write preads.ovl:
+ 
+ # mobs uses binwrappers, so it does not see our "entry-points".
+-# So, after dropping "src/py_scripts/*.py", we can call these via python -m:
++# So, after dropping "src/py_scripts/*.py", we can call these via python3 -m:
+ 
+-time python -m falcon_kit.mains.ovlp_filter --db {input.db_file} --las-fofn {input.las_fofn} {params.overlap_filtering_setting} --min-len {params.length_cutoff_pr} --out-fn preads.ovl
++time python3 -m falcon_kit.mains.ovlp_filter --db {input.db_file} --las-fofn {input.las_fofn} {params.overlap_filtering_setting} --min-len {params.length_cutoff_pr} --out-fn preads.ovl
+ 
+ ln -sf {input.preads4falcon_fasta} ./preads4falcon.fasta
+ 
+ # Given preads.ovl,
+ # write sg_edges_list, c_path, utg_data, ctg_paths.
+-time python -m falcon_kit.mains.ovlp_to_graph {params.fc_ovlp_to_graph_option} --overlap-file preads.ovl >| fc_ovlp_to_graph.log
++time python3 -m falcon_kit.mains.ovlp_to_graph {params.fc_ovlp_to_graph_option} --overlap-file preads.ovl >| fc_ovlp_to_graph.log
+ 
+ # Given sg_edges_list, utg_data, ctg_paths, preads4falcon.fasta,
+ # write p_ctg.fa and a_ctg_all.fa,
+ # plus a_ctg_base.fa, p_ctg_tiling_path, a_ctg_tiling_path, a_ctg_base_tiling_path:
+-time python -m falcon_kit.mains.graph_to_contig
++time python3 -m falcon_kit.mains.graph_to_contig
+ 
+ # Given a_ctg_all.fa, write a_ctg.fa:
+-time python -m falcon_kit.mains.dedup_a_tigs
++time python3 -m falcon_kit.mains.dedup_a_tigs
+ 
+ # Generate a GFA of all assembly graph edges. This GFA can contain
+ # edges and nodes which are not part of primary and associate contigs.
+-time python -m falcon_kit.mains.gen_gfa_v1 >| asm.gfa
++time python3 -m falcon_kit.mains.gen_gfa_v1 >| asm.gfa
+ 
+ # Generate a GFA of all assembly graph edges. This GFA can contain
+ # edges and nodes which are not part of primary and associate contigs.
+-time python -m falcon_kit.mains.gen_gfa_v1 --add-string-graph >| sg.gfa
++time python3 -m falcon_kit.mains.gen_gfa_v1 --add-string-graph >| sg.gfa
+ 
+ #rm -f ./preads4falcon.fasta
+ 
+Index: falcon/FALCON/falcon_kit/snakemake.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/snakemake.py
++++ falcon/FALCON/falcon_kit/snakemake.py
+@@ -77,7 +77,7 @@ class SnakemakeRuleWriter(object):
+ rule dynamic_%(rule_name)s_split:
+     input:  %(input_json)r
+     output: %(dynamic_output_kvs)s
+-    shell: 'python -m falcon_kit.mains.copy_mapped --special-split={input} %(wi_pattern_kvs)s'
++    shell: 'python3 -m falcon_kit.mains.copy_mapped --special-split={input} %(wi_pattern_kvs)s'
+ """%(locals())
+         self.write(rule)
+ 
+Index: falcon/FALCON/falcon_kit/stats_preassembly.py
+===================================================================
+--- falcon.orig/FALCON/falcon_kit/stats_preassembly.py
++++ falcon/FALCON/falcon_kit/stats_preassembly.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ """ PreAssembly Report.
+ 
+ See FALCON-pbsmrtpipe/pbfalcon/report_preassembly.py for XML version.
+Index: falcon/pypeFLOW/example/PypeTest.py
+===================================================================
+--- falcon.orig/pypeFLOW/example/PypeTest.py
++++ falcon/pypeFLOW/example/PypeTest.py
+@@ -263,7 +263,7 @@ if __name__ == "__main__":
+     try:
+         testDistributed(sys.argv[1], sys.argv[2])
+     except IndexError:
+-        print("try: python PypeTest.py localshell 1")
++        print("try: python3 PypeTest.py localshell 1")
+         print("running simpleTest()")
+         simpleTest()
+ 
+Index: falcon/pypeFLOW/pwatcher/fs_based.py
+===================================================================
+--- falcon.orig/pypeFLOW/pwatcher/fs_based.py
++++ falcon/pypeFLOW/pwatcher/fs_based.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ """Filesytem-based process-watcher.
+ 
+ This is meant to be part of a 2-process system. For now, let's call these processes the Definer and the Watcher.
+@@ -205,7 +205,7 @@ set -x
+     command = mjob.job.cmd
+ 
+     prog = 'heartbeat-wrapper' # missing in mobs
+-    prog = 'python2.7 -m pwatcher.mains.fs_heartbeat'
++    prog = 'python3 -m pwatcher.mains.fs_heartbeat'
+     heartbeat_wrapper_template = "{prog} --directory={metajob_rundir} --heartbeat-file={heartbeat_fn} --exit-file={exit_sentinel_fn} --rate={rate} {command} || echo 99 >| {exit_sentinel_fn}"
+     # We write 99 into exit-sentinel if the wrapper fails.
+     wrapped = heartbeat_wrapper_template.format(**locals())
+Index: falcon/pypeFLOW/pwatcher/mains/query_server.py
+===================================================================
+--- falcon.orig/pypeFLOW/pwatcher/mains/query_server.py
++++ falcon/pypeFLOW/pwatcher/mains/query_server.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python
++#!/usr/bin/python3
+ 
+ """
+ Query the heartbeat server from the command line.
+Index: falcon/pypeFLOW/pwatcher/network_based.py
+===================================================================
+--- falcon.orig/pypeFLOW/pwatcher/network_based.py
++++ falcon/pypeFLOW/pwatcher/network_based.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ """Network-based process-watcher.
+ 
+ This is meant to be part of a 2-process system. For now, let's call these
+@@ -40,7 +40,7 @@ try:
+     from shlex import quote
+ except ImportError:
+     from pipes import quote
+-import SocketServer
++import socketserver
+ import collections
+ import contextlib
+ import glob
+@@ -102,7 +102,7 @@ def socket_read(socket):
+ 
+ # TODO: have state be persistent in some fashion, so we can be killed
+ # and restarted
+-class StatusServer(SocketServer.BaseRequestHandler):
++class StatusServer(socketserver.BaseRequestHandler):
+     """input format is "command [jobid [arg1 [arg2]]]"
+     job status update commands are:
+         i <jobid> <pid> <pgid>	- initialize job
+@@ -200,7 +200,7 @@ def get_localhost_ipaddress(hostname, po
+ 
+ # if we end up restarting a partially killed process, we'll try
+ # to pick up the ongoing heartbeats
+-class ReuseAddrServer(SocketServer.TCPServer):
++class ReuseAddrServer(socketserver.TCPServer):
+     def restore_from_directories(self):
+         """
+         as our heartbeat server has been down, there's no accurate way
+@@ -231,7 +231,7 @@ class ReuseAddrServer(SocketServer.TCPSe
+         # {jobid} = [pid, pgid, heartbeat timestamp, exit rc]
+         self.server_job_list = dict()
+         self.restore_from_directories()
+-        SocketServer.TCPServer.__init__(self, server_address, RequestHandlerClass)
++        socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
+ 
+ def start_server(server_directories, hostname='', port=0):
+     server = ReuseAddrServer((hostname, port), StatusServer, server_directories)
+@@ -429,7 +429,7 @@ def MetaJob_wrap(mjob, state):
+     jobid = mjob.job.jobid
+     exit_sentinel_fn = os.path.join(edir, jobid)
+ 
+-    prog = 'python2.7 -m pwatcher.mains.network_heartbeat'
++    prog = 'python3 -m pwatcher.mains.network_heartbeat'
+     heartbeat_wrapper_template = "{prog} --directory={metajob_rundir} --heartbeat-server={heartbeat_server} --heartbeat-port={heartbeat_port} --exit-dir={edir} --rate={rate} --jobid={jobid} {command} || echo 99 >| {exit_sentinel_fn}"
+     # We write 99 into exit-sentinel if the wrapper fails.
+     wrapped = heartbeat_wrapper_template.format(**locals())
+Index: falcon/pypeFLOW/pypeflow/do_task.py
+===================================================================
+--- falcon.orig/pypeFLOW/pypeflow/do_task.py
++++ falcon/pypeFLOW/pypeflow/do_task.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ from . import do_support, util
+ import argparse
+ import importlib
+Index: falcon/pypeFLOW/pypeflow/simple_pwatcher_bridge.py
+===================================================================
+--- falcon.orig/pypeFLOW/pypeflow/simple_pwatcher_bridge.py
++++ falcon/pypeFLOW/pypeflow/simple_pwatcher_bridge.py
+@@ -1,10 +1,10 @@
+ from __future__ import absolute_import
+-from .util import (mkdirs, system, touch, run, cd)
+ import pwatcher.blocking
+ import pwatcher.fs_based
+ import pwatcher.network_based
+ import networkx
+ import networkx.algorithms.dag #import (topological_sort, is_directed_acyclic_graph)
++from pathlib import Path
+ 
+ import hashlib
+ import json
+@@ -455,7 +455,7 @@ class PypeNode(NodeBase):
+     """
+     def generate_script(self):
+         wdir = self.wdir
+-        mkdirs(wdir)
++        pathlib.Path(wdir).mkdir(parents=True,exists_ok=True)
+         pt = self.pypetask
+         assert pt.wdir == self.wdir
+         inputs = {k:v.path for k,v in pt.inputs.items()}
+@@ -486,7 +486,7 @@ class PypeNode(NodeBase):
+         task_content = json.dumps(task_desc, sort_keys=True, indent=4, separators=(',', ': ')) + '\n'
+         task_json_fn = os.path.join(wdir, 'task.json')
+         open(task_json_fn, 'w').write(task_content)
+-        python = 'python2.7' # sys.executable fails sometimes because of binwrapper: SE-152
++        python = 'python3' # sys.executable fails sometimes because of binwrapper: SE-152
+         tmpdir_flag = '--tmpdir {}'.format(self.use_tmpdir) if self.use_tmpdir else ''
+         cmd = '{} -m pypeflow.do_task {} {}'.format(python, tmpdir_flag, task_json_fn)
+         script_content = """#!/bin/bash
+@@ -494,7 +494,7 @@ onerror () {{
+   set -vx
+   echo "FAILURE. Running top in $(pwd) (If you see -terminal database is inaccessible- you are using the python bin-wrapper, so you will not get diagnostic info. No big deal. This process is crashing anyway.)"
+   rm -f top.txt
+-  which python
++  which python3
+   which top
+   env -u LD_LIBRARY_PATH top -b -n 1 >| top.txt &
+   env -u LD_LIBRARY_PATH top -b -n 1 2>&1
+Index: falcon/FALCON/makefile
+===================================================================
+--- falcon.orig/FALCON/makefile
++++ falcon/FALCON/makefile
+@@ -7,32 +7,32 @@ ifndef PYTHONUSERBASE
+ endif
+ 
+ WHEELHOUSE?=wheelhouse
+-PIP=pip wheel --wheel-dir ${WHEELHOUSE}
++PIP=pip3 wheel --wheel-dir ${WHEELHOUSE}
+ 
+ MY_TEST_FLAGS?=-v -s --durations=0
+ 
+ DOCTEST_MODULES= falcon_kit/functional.py falcon_kit/mains/consensus_task.py falcon_kit/mains/las_merge_split.py
+ 
+ install-edit:
+-	pip -v install --user --edit .
++	pip3 -v install --user --edit .
+ install: wheel
+-	pip -v install --user --use-wheel --find-links=dist/ .
++	pip3 -v install --user --use-wheel --find-links=dist/ .
+ pylint:
+ 	pylint --errors-only falcon_kit/
+ test:
+-	python -c 'import falcon_kit; print falcon_kit.falcon'
+-	which py.test || pip install --user pytest
+-	py.test ${MY_TEST_FLAGS} --junit-xml=test.xml --doctest-modules ${DOCTEST_MODULES} test/
++	PYTHONPATH=${FALCON_WORKSPACE}/pypeFLOW:${FALCON_WORKSPACE}/FALCON:${FALCON_WORKSPACE}/FALCON/build/lib.linux-x86_64-3.8/:$$PYTHONPATH python3 -c 'import falcon_kit; print(falcon_kit.falcon)'
++	which py.test || pip3 install --user pytest
++	PYTHONPATH=${FALCON_WORKSPACE}/pypeFLOW:${FALCON_WORKSPACE}/FALCON:${FALCON_WORKSPACE}/FALCON/build/lib.linux-x86_64-3.8/:$$PYTHONPATH py.test ${MY_TEST_FLAGS} --junit-xml=test.xml --doctest-modules ${DOCTEST_MODULES} test/
+ autopep8:
+ 	autopep8 --max-line-length=120 -ir -j0 falcon_kit/ examples/ test/ setup.py
+ 
+ old-wheel:
+-	pip install --upgrade --user pip
+-	python setup.py bdist_wheel
++	pip3 install --upgrade --user pip
++	python3 setup.py bdist_wheel
+ 	# Look for dist/*.whl
+ 
+ wheel:
+-	which pip
++	which pip3
+ 	${PIP} --no-deps .
+ 	ls -larth ${WHEELHOUSE}
+ 
+Index: falcon/FALCON/setup.py
+===================================================================
+--- falcon.orig/FALCON/setup.py
++++ falcon/FALCON/setup.py
+@@ -1,4 +1,4 @@
+-#!/usr/bin/env python2.7
++#!/usr/bin/python3
+ 
+ from setuptools import setup, Extension
+ import subprocess
+Index: falcon/FALCON-make/makefile
+===================================================================
+--- falcon.orig/FALCON-make/makefile
++++ falcon/FALCON-make/makefile
+@@ -20,7 +20,8 @@ all: checklist
+ checklist:
+ 	@if [ -z "$${FALCON_PREFIX}" ]; then echo 'Error: FALCON_PREFIX is not set'; exit 1; fi
+ 	@if [ ! -e "$${FALCON_PREFIX}/bin" ] ; then echo 'Error: directory FALCON_PREFIX/bin (${FALCON_PREFIX}/bin) does not exist'; exit 1; fi
+-install: install-DAZZ_DB install-DALIGNER install-DAMASKER install-DEXTRACTOR install-pypeFLOW install-FALCON install-git-sym install-nim-falcon
++install: install-DAZZ_DB install-DALIGNER install-DAMASKER install-DEXTRACTOR install-pypeFLOW install-FALCON install-nim-falcon
++	#install-git-sym # removed from above dependencies
+ install-DAZZ_DB:
+ 	${MAKE} -C ${FALCON_WORKSPACE}/DAZZ_DB all
+ 	PREFIX=${FALCON_PREFIX} ${MAKE} -C ${FALCON_WORKSPACE}/DAZZ_DB ${FALCON_INSTALL_RULE}
+@@ -34,11 +35,11 @@ install-DEXTRACTOR:
+ 	${MAKE} -C ${FALCON_WORKSPACE}/DEXTRACTOR all
+ 	PREFIX=${FALCON_PREFIX} ${MAKE} -C ${FALCON_WORKSPACE}/DEXTRACTOR ${FALCON_INSTALL_RULE}
+ install-pypeFLOW:
+-	cd ${FALCON_WORKSPACE}/pypeFLOW; python setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
++	cd ${FALCON_WORKSPACE}/pypeFLOW; python3 setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
+ install-FALCON: install-pypeFLOW
+-	cd ${FALCON_WORKSPACE}/FALCON; python setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
++	cd ${FALCON_WORKSPACE}/FALCON; python3 setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
+ install-FALCON_unzip: install-pypeFLOW
+-	cd ${FALCON_WORKSPACE}/FALCON_unzip; python setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
++	cd ${FALCON_WORKSPACE}/FALCON_unzip; python3 setup.py install ${FALCON_PYFLAGS} --prefix=${FALCON_PREFIX}
+ install-nim-falcon:
+ 	make -C ${FALCON_WORKSPACE}/nim-falcon/src -j1 all
+ 	rsync ${FALCON_WORKSPACE}/nim-falcon/src/*.exe ${FALCON_PREFIX}/bin/
+@@ -47,31 +48,35 @@ install-git-sym:
+ 	ln -sf $(abspath ${FALCON_WORKSPACE}/git-sym/git-sym) ${FALCON_PREFIX}/bin/git-sym
+ 
+ install-pip:
+-	python -c 'import pip; print pip' || python get-pip.py ${FALCON_PIP_USER}
+-	pip install ${FALCON_PIP_USER} --upgrade pip
+-	pip install ${FALCON_PIP_USER} --upgrade setuptools # needed when --edit is used
+-	pip install ${FALCON_PIP_USER} nose pytest # for running tests while building
++	#python3 -c 'import pip; print(pip)' || python3 get-pip.py ${FALCON_PIP_USER}
++	#pip3 install ${FALCON_PIP_USER} --upgrade pip
++	#pip3 install ${FALCON_PIP_USER} --upgrade setuptools # needed when --edit is used
++	#pip3 install ${FALCON_PIP_USER} nose3 pytest # for running tests while building
++	echo "No need for manual installations with Debian"
+ show:
+ 	mkdir -p ${FALCON_PREFIX}/bin
+-	which python
++	which python3
+ 	echo "PYTHONUSERBASE=${PYTHONUSERBASE}"
+-	python -c 'import site; print site.USER_BASE'
++	python3 -c 'import site; print(site.USER_BASE)'
+ 	echo "FALCON_PIP_EDIT=${FALCON_PIP_EDIT}"
+ 	echo "FALCON_PIP_USER=${FALCON_PIP_USER}"
+ check:
+-	python -c 'import pypeflow.simple_pwatcher_bridge; print pypeflow.simple_pwatcher_bridge'
+-	python -c 'import falcon_kit; print falcon_kit.falcon'
++	PYTHONPATH=${FALCON_WORKSPACE}/pypeFLOW:$$PYTHONPATH python3 -c 'import pypeflow.simple_pwatcher_bridge; print(pypeflow.simple_pwatcher_bridge)'
++	# FIXME: undefined Py_initModule symbol in /home/moeller/git/med-team/falcon/FALCON/build/lib.linux-x86_64-3.8/ext_falcon.cpython-38-x86_64-linux-gnu.so
++	#PYTHONPATH=${FALCON_WORKSPACE}/pypeFLOW:${FALCON_WORKSPACE}/FALCON:${FALCON_WORKSPACE}/FALCON/build/lib.linux-x86_64-3.8/:$$PYTHONPATH python3 -c 'import falcon_kit; print(falcon_kit.falcon)'
+ extra:
+-	pip install ${FALCON_PIP_USER} Cython
+-	pip install ${FALCON_PIP_USER} numpy
+-	pip install ${FALCON_PIP_USER} h5py
+-	cd ${FALCON_WORKSPACE}/pbcommand; pip install ${FALCON_PIP_USER} ${FALCON_PIP_EDIT} .
+-	cd ${FALCON_WORKSPACE}/pbsmrtpipe; pip install ${FALCON_PIP_USER} ${FALCON_PIP_EDIT} .
++	#pip3 install ${FALCON_PIP_USER} Cython
++	#pip3 install ${FALCON_PIP_USER} numpy
++	#pip3 install ${FALCON_PIP_USER} h5py
++	#cd ${FALCON_WORKSPACE}/pbcommand; pip3 install ${FALCON_PIP_USER} ${FALCON_PIP_EDIT} .
++	#cd ${FALCON_WORKSPACE}/pbsmrtpipe; pip3 install ${FALCON_PIP_USER} ${FALCON_PIP_EDIT} .
++	echo "No need for manual installations with Debian"
++
+ test: #after 'install'
+ 	${MAKE} -C ${FALCON_WORKSPACE}/FALCON-examples test
+ clean:
+ 	cd ${FALCON_WORKSPACE}/DAZZ_DB; ${MAKE} clean
+ 	cd ${FALCON_WORKSPACE}/DALIGNER; ${MAKE} clean
+ 	cd ${FALCON_WORKSPACE}/DAMASKER; ${MAKE} clean
+-	cd ${FALCON_WORKSPACE}/pypeFLOW; python setup.py clean; rm -rf build/ dist/
+-	cd ${FALCON_WORKSPACE}/FALCON; python setup.py clean; rm -rf build/ dist/
++	cd ${FALCON_WORKSPACE}/pypeFLOW; python3 setup.py clean; rm -rf build/ dist/
++	cd ${FALCON_WORKSPACE}/FALCON; python3 setup.py clean; rm -rf build/ dist/
+Index: falcon/FALCON-examples/makefile
+===================================================================
+--- falcon.orig/FALCON-examples/makefile
++++ falcon/FALCON-examples/makefile
+@@ -13,8 +13,8 @@ test: full-test import-test ;
+ 
+ .PHONY: import-test
+ import-test:
+-	python -c 'import pypeflow.pwatcher_workflow; print pypeflow.pwatcher_workflow'
+-	python -c 'import falcon_kit; print falcon_kit.falcon'
++	python3 -c 'import pypeflow.pwatcher_workflow; print pypeflow.pwatcher_workflow'
++	python3 -c 'import falcon_kit; print falcon_kit.falcon'
+ 
+ .PHONY: full-test
+ full-test:
+Index: falcon/FALCON-examples/run/greg200k-sv2/makefile
+===================================================================
+--- falcon.orig/FALCON-examples/run/greg200k-sv2/makefile
++++ falcon/FALCON-examples/run/greg200k-sv2/makefile
+@@ -9,4 +9,4 @@ clean:
+ cleaner: clean
+ 	rm -rf 0-*/ 1-*/ 2-*/ all.log mypwatcher/ scripts/ sge_log/
+ symlink:
+-	python -m falcon_kit.mains.hgap4_adapt --job-output=jo/ --link-dir=..
++	python3 -m falcon_kit.mains.hgap4_adapt --job-output=jo/ --link-dir=..
+Index: falcon/pypeFLOW/pypeflow/util.py
+===================================================================
+--- falcon.orig/pypeFLOW/pypeflow/util.py
++++ falcon/pypeFLOW/pypeflow/util.py
+@@ -2,19 +2,18 @@
+ Prefer io.py now.
+ """
+ import logging
+-import os
+-from io import (cd, touch, mkdirs, syscall as system)
++# along suggestion from https://stackoverflow.com/questions/89228/how-to-call-an-external-command
++import subprocess
+ 
+ LOG = logging.getLogger()
+ 
+ def run(script_fn):
+     cwd, basename = os.path.split(script_fn)
+-    with cd(cwd):
+-        system('/bin/bash {}'.format(basename))
++    with os.chdir(cwd):
++        subprocess('/bin/bash',basename)
+ def rmdirs(path):
+     if os.path.isdir(path):
+         if len(path) < 20 and 'home' in path:
+             LOG.error('Refusing to rm {!r} since it might be your homedir.'.format(path))
+             return
+-        cmd = 'rm -rf {}'.format(path)
+-        system(cmd)
++        subprocess('rm','-rf',path)
+Index: falcon/pypeFLOW/pwatcher/blocking.py
+===================================================================
+--- falcon.orig/pypeFLOW/pwatcher/blocking.py
++++ falcon/pypeFLOW/pwatcher/blocking.py
+@@ -210,7 +210,7 @@ eval "$cmd"
+     log.debug('Writing wrapper "%s"' %wrapper_fn)
+     open(wrapper_fn, 'w').write(wrapped)
+     st = os.stat(wrapper_fn)
+-    os.chmod(wrapper_fn, st.st_mode | 0111)
++    os.chmod(wrapper_fn, st.st_mode | 0o111)
+ 
+ class JobThread(threading.Thread):
+     def run(self):


=====================================
debian/patches/series
=====================================
@@ -5,3 +5,9 @@ skip_checks_true.patch
 skip_fixture_test.patch
 skip_get_falcon_sense_option_test.patch
 do_not_fail_when_there_without_lustre.patch
+newNimPathSep.patch
+isNilError.patch
+nimUndeclearedIdentifier.patch
+nimundeclaredWordWrap.patch
+nimTypeMismatch.patch
+python2to3.patch


=====================================
debian/rules
=====================================
@@ -1,25 +1,26 @@
 #!/usr/bin/make -f
 
 export DH_VERBOSE=1
-include /usr/share/dpkg/default.mk
+#include /usr/share/dpkg/default.mk
 export LC_ALL=C.UTF-8
 export FALCON_WORKSPACE=$(CURDIR)
 export FALCON_PREFIX=$(CURDIR)/inst
 export FALCON_INSTALL_RULE=install
-py2vers = $(shell pyversions -rv)
-export PYTHONPATH:=$(FALCON_PREFIX)/lib/python${py2vers}/site-packages:$(PYTHONPATH)
+py3vers = $(shell py3versions -rv)
+export PYTHONPATH:=../pypeFLOW:$(FALCON_PREFIX)/lib/python${py3vers}/site-packages:$(PYTHONPATH)
 
 %:
-	dh $@ --with python3 --buildsystem=pybuild
+	dh $@ --with python3 --system=pybuild
 
 override_dh_auto_build:
-	mkdir $(FALCON_PREFIX)/lib/python${py2vers}/site-packages/ -p
+	mkdir $(FALCON_PREFIX)/lib/python${py3vers}/site-packages/ -p
 	mkdir -p \
 		$(FALCON_PREFIX)/bin \
 		$(FALCON_PREFIX)/include \
 		$(FALCON_PREFIX)/lib \
 		$(FALCON_PREFIX)/pylib
-	mkdir $(CURDIR)/../home
+	# FIXME: This sufficiently ugly to stimulate an improvement
+	mkdir -p $(CURDIR)/../home
 	HOME=$(CURDIR)/../home $(MAKE) all
 
 override_dh_auto_test: unit-tests integration-tests ;
@@ -39,3 +40,16 @@ override_dh_install:
 override_dh_auto_clean:
 	dh_auto_clean
 	$(RM) -r $(FALCON_PREFIX)
+	find . -name __pycache__ | xargs -r $(RM) -r
+	find . -name "*.o" -delete
+	$(RM) pypeFLOW/dist/pypeflow-2.0.1-py3.8.egg
+	$(MAKE) -C DEXTRACTOR clean
+	$(MAKE) -C DAZZ_DB clean
+	$(MAKE) -C DAMASKER clean
+	$(MAKE) -C DALIGNER clean
+	$(MAKE) -C FALCON clean
+	$(RM) -r FALCON/dist FALCON/build
+	$(RM) DAZZ_DB/*.d
+	$(RM) -r pypeFLOW/build/lib
+	$(RM) -r pypeFLOW/pypeflow.egg-info
+	$(RM) -r FALCON/falcon_kit.egg-info



View it on GitLab: https://salsa.debian.org/med-team/falcon/-/commit/3492618ac8be3cb1cae995da7c7582e574494305

-- 
View it on GitLab: https://salsa.debian.org/med-team/falcon/-/commit/3492618ac8be3cb1cae995da7c7582e574494305
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20200827/615ac889/attachment-0001.html>


More information about the debian-med-commit mailing list