[med-svn] [Git][med-team/toil][master] finish updating the patches
Michael R. Crusoe
gitlab at salsa.debian.org
Tue Aug 13 12:22:04 BST 2019
Michael R. Crusoe pushed to branch master at Debian Med / toil
Commits:
51fd8343 by Michael R. Crusoe at 2019-08-13T11:15:24Z
finish updating the patches
- - - - -
7 changed files:
- debian/copyright
- debian/patches/fix_tests
- + debian/patches/remove-pathlib2-dep
- − debian/patches/reproducible-build.patch
- debian/patches/series
- debian/patches/soften-mesos-deps
- − debian/patches/soften-pytest-depends
Changes:
=====================================
debian/copyright
=====================================
@@ -56,7 +56,7 @@ Files: ./src/toil/batchSystems/lsfHelper.py
Copyright: 2013-2017 "Rory Kirchne" <rory.kirchner at gmail.com> and contributors
License: Expat
-Files: ./contrib/azure/LICENSE
+Files: ./contrib/azure/*
Copyright: 2015, Microsoft Azure
License: Expat
=====================================
debian/patches/fix_tests
=====================================
@@ -1,167 +1,16 @@
From: Michael R. Crusoe <michael.crusoe at gmail.com>
Subject: cherry pick py3 testing fixes from upstream
---- toil.orig/src/toil/lib/docker.py
-+++ toil/src/toil/lib/docker.py
-@@ -20,7 +20,6 @@
- from docker.errors import APIError
- from docker.errors import NotFound
- from docker.errors import DockerException
--from docker.utils.types import LogConfig
- from docker.api.container import ContainerApiMixin
-
- from toil.lib.retry import retry
--- toil.orig/src/toil/test/jobStores/jobStoreTest.py
+++ toil/src/toil/test/jobStores/jobStoreTest.py
-@@ -31,11 +31,11 @@
- import logging
- import threading
- import os
-+import sys
- import shutil
- import tempfile
- import time
- import uuid
--from stubserver import FTPStubServer
- from abc import abstractmethod, ABCMeta
- from itertools import chain, islice, count
- from threading import Thread
-@@ -59,7 +59,6 @@
+@@ -54,7 +54,6 @@
from toil.job import Job, JobNode
from toil.jobStores.abstractJobStore import (NoSuchJobException,
NoSuchFileException)
-from toil.jobStores.googleJobStore import googleRetry
from toil.jobStores.fileJobStore import FileJobStore
+ from toil.statsAndLogging import StatsAndLogging
from toil.test import (ToilTest,
- needs_aws,
-@@ -102,7 +101,7 @@
- @classmethod
- @memoize
- def __new__(cls, *args):
-- return super(AbstractJobStoreTest.Test, cls).__new__(*args)
-+ return super(AbstractJobStoreTest.Test, cls).__new__(cls)
-
- def _createConfig(self):
- return Config()
-@@ -403,18 +402,22 @@
- """Tests the sharing of files."""
- jobstore1 = self.jobstore_initialized
- jobstore2 = self.jobstore_resumed_noconfig
-+
-+ bar = 'bar'
-+ if sys.version_info >= (3, 0):
-+ bar = b'bar'
-
- with jobstore1.writeSharedFileStream('foo') as f:
-- f.write('bar')
-+ f.write(bar)
- # ... read that file on worker, ...
- with jobstore2.readSharedFileStream('foo') as f:
-- self.assertEquals('bar', f.read())
-+ self.assertEquals(bar, f.read())
- # ... and read it again on jobstore1.
- with jobstore1.readSharedFileStream('foo') as f:
-- self.assertEquals('bar', f.read())
-+ self.assertEquals(bar, f.read())
-
- with jobstore1.writeSharedFileStream('nonEncrypted', isProtected=False) as f:
-- f.write('bar')
-+ f.write(bar)
- self.assertUrl(jobstore1.getSharedPublicUrl('nonEncrypted'))
- self.assertRaises(NoSuchFileException, jobstore1.getSharedPublicUrl, 'missing')
-
-@@ -435,12 +438,19 @@
- # Check file exists
- self.assertTrue(jobstore2.fileExists(fileOne))
- self.assertTrue(jobstore1.fileExists(fileOne))
-+ one = 'one'
-+ two = 'two'
-+ three = 'three'
-+ if sys.version_info >= (3, 0):
-+ one = b'one'
-+ two = b'two'
-+ three = b'three'
- # ... write to the file on jobstore2, ...
- with jobstore2.updateFileStream(fileOne) as f:
-- f.write('one')
-+ f.write(one)
- # ... read the file as a stream on the jobstore1, ....
- with jobstore1.readFileStream(fileOne) as f:
-- self.assertEquals(f.read(), 'one')
-+ self.assertEquals(f.read(), one)
-
- # ... and copy it to a temporary physical file on the jobstore1.
- fh, path = tempfile.mkstemp()
-@@ -452,27 +462,27 @@
- shutil.copyfile(tmpPath, path)
- finally:
- os.unlink(tmpPath)
-- with open(path, 'r+') as f:
-- self.assertEquals(f.read(), 'one')
-+ with open(path, 'rb+') as f:
-+ self.assertEquals(f.read(), one)
- # Write a different string to the local file ...
- f.seek(0)
- f.truncate(0)
-- f.write('two')
-+ f.write(two)
- # ... and create a second file from the local file.
- fileTwo = jobstore1.writeFile(path, jobOnJobStore1.jobStoreID)
- with jobstore2.readFileStream(fileTwo) as f:
-- self.assertEquals(f.read(), 'two')
-+ self.assertEquals(f.read(), two)
- # Now update the first file from the local file ...
- jobstore1.updateFile(fileOne, path)
- with jobstore2.readFileStream(fileOne) as f:
-- self.assertEquals(f.read(), 'two')
-+ self.assertEquals(f.read(), two)
- finally:
- os.unlink(path)
- # Create a third file to test the last remaining method.
- with jobstore2.writeFileStream(jobOnJobStore1.jobStoreID) as (f, fileThree):
-- f.write('three')
-+ f.write(three)
- with jobstore1.readFileStream(fileThree) as f:
-- self.assertEquals(f.read(), 'three')
-+ self.assertEquals(f.read(), three)
- # Delete a file explicitly but leave files for the implicit deletion through the parent
- jobstore2.deleteFile(fileOne)
-
-@@ -510,22 +520,28 @@
-
- # Collects stats and logging messages.
- stats = set()
--
-+ one = 'one'
-+ two = 'two'
-+ three = 'three'
-+ if sys.version_info >= (3, 0):
-+ one = b'one'
-+ two = b'two'
-+ three = b'three'
- # No stats or logging added yet. Expect nothing.
- self.assertEquals(0, jobstore1.readStatsAndLogging(callback))
- self.assertEquals(set(), stats)
-
- # Test writing and reading.
-- jobstore2.writeStatsAndLogging('1')
-+ jobstore2.writeStatsAndLogging(one)
- self.assertEquals(1, jobstore1.readStatsAndLogging(callback))
-- self.assertEquals({'1'}, stats)
-+ self.assertEquals({one}, stats)
- self.assertEquals(0, jobstore1.readStatsAndLogging(callback)) # readStatsAndLogging purges saved stats etc
-
-- jobstore2.writeStatsAndLogging('1')
-- jobstore2.writeStatsAndLogging('2')
-+ jobstore2.writeStatsAndLogging(one)
-+ jobstore2.writeStatsAndLogging(two)
- stats = set()
- self.assertEquals(2, jobstore1.readStatsAndLogging(callback))
-- self.assertEquals({'1', '2'}, stats)
-+ self.assertEquals({one, two}, stats)
-
- largeLogEntry = os.urandom(self._largeLogEntrySize())
- stats = set()
-@@ -634,7 +650,7 @@
+@@ -656,7 +655,7 @@
@classmethod
def makeImportExportTests(cls):
@@ -170,119 +19,7 @@ Subject: cherry pick py3 testing fixes from upstream
activeTestClassesByName = {testCls.__name__: testCls
for testCls in testClasses
-@@ -714,7 +730,13 @@
- assignedPort = http.server_address[1]
- url = 'http://localhost:%d' % assignedPort
- with self.jobstore_initialized.readFileStream(self.jobstore_initialized.importFile(url)) as readable:
-- self.assertEqual(readable.read(), StubHttpRequestHandler.fileContents)
-+ f1 = readable.read()
-+ f2 = StubHttpRequestHandler.fileContents
-+ if isinstance(f1, bytes) and not isinstance(f2, bytes):
-+ f1 = f1.decode()
-+ if isinstance(f2, bytes) and not isinstance(f1, bytes):
-+ f1 = f1.encode()
-+ self.assertEqual(f1, f2)
- finally:
- http.shutdown()
- httpThread.join()
-@@ -723,6 +745,7 @@
-
- def testImportFtpFile(self):
- '''Test importing a file over FTP'''
-+ from stubserver import FTPStubServer
- file = {'name':'foo', 'content':'foo bar baz qux'}
- ftp = FTPStubServer(0)
- ftp.run()
-@@ -785,7 +808,7 @@
- checksumThread = Thread(target=checksumThreadFn)
- checksumThread.start()
- try:
-- with open(random_device) as readable:
-+ with open(random_device, 'rb') as readable:
- with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (writable, fileId):
- for i in range(int(partSize * partsPerFile / bufSize)):
- buf = readable.read(bufSize)
-@@ -813,8 +836,8 @@
- checksum = hashlib.md5()
- fh, path = tempfile.mkstemp()
- try:
-- with os.fdopen(fh, 'r+') as writable:
-- with open(random_device) as readable:
-+ with os.fdopen(fh, 'wb+') as writable:
-+ with open(random_device, 'rb') as readable:
- for i in range(int(partSize * partsPerFile / bufSize)):
- buf = readable.read(bufSize)
- writable.write(buf)
-@@ -842,11 +865,11 @@
- job = self.jobstore_initialized.create(self.arbitraryJob)
- nullFile = self.jobstore_initialized.writeFile('/dev/null', job.jobStoreID)
- with self.jobstore_initialized.readFileStream(nullFile) as f:
-- self.assertEquals(f.read(), "")
-+ assert not f.read()
- with self.jobstore_initialized.writeFileStream(job.jobStoreID) as (f, nullStream):
- pass
- with self.jobstore_initialized.readFileStream(nullStream) as f:
-- self.assertEquals(f.read(), "")
-+ assert not f.read()
- self.jobstore_initialized.delete(job.jobStoreID)
-
- @slow
-@@ -856,7 +879,7 @@
- dirPath = self._createTempDir()
- filePath = os.path.join(dirPath, 'large')
- hashIn = hashlib.md5()
-- with open(filePath, 'w') as f:
-+ with open(filePath, 'wb') as f:
- for i in range(0, 10):
- buf = os.urandom(self._partSize())
- f.write(buf)
-@@ -874,7 +897,7 @@
-
- # Reread the file to confirm success.
- hashOut = hashlib.md5()
-- with open(filePath, 'r') as f:
-+ with open(filePath, 'rb') as f:
- while True:
- buf = f.read(self._partSize())
- if not buf:
-@@ -962,11 +985,15 @@
- # will get blocked on the write. Technically anything
- # greater than the pipe buffer size plus the libc
- # buffer size (64K + 4K(?)) should trigger this bug,
-- # but this gives us a lot of extra room just to be
-- # sure.
-- f.write('a' * 300000)
-+ # but this gives us a lot of extra room just to be sure.
-+
-+ # python 3 requires self.fileContents to be a bytestring
-+ a = 'a'
-+ if sys.version_info >= (3, 0):
-+ a = b'a'
-+ f.write(a * 300000)
- with self.jobstore_initialized.readFileStream(fileID) as f:
-- self.assertEquals(f.read(1), "a")
-+ self.assertEquals(f.read(1), a)
- # If it times out here, there's a deadlock
-
- @abstractmethod
-@@ -1091,14 +1118,14 @@
- return url
- else:
- content = os.urandom(size)
-- with open(localFilePath, 'w') as writable:
-+ with open(localFilePath, 'wb') as writable:
- writable.write(content)
-
- return url, hashlib.md5(content).hexdigest()
-
- def _hashTestFile(self, url):
- localFilePath = FileJobStore._extractPathFromUrl(urlparse.urlparse(url))
-- with open(localFilePath, 'r') as f:
-+ with open(localFilePath, 'rb') as f:
- return hashlib.md5(f.read()).hexdigest()
-
- def _createExternalStore(self):
-@@ -1119,54 +1146,6 @@
+@@ -1151,54 +1150,6 @@
os.unlink(path)
@@ -337,61 +74,3 @@ Subject: cherry pick py3 testing fixes from upstream
@needs_aws
class AWSJobStoreTest(AbstractJobStoreTest.Test):
-@@ -1448,6 +1427,9 @@
- self.send_header("Content-type", "text/plain")
- self.send_header("Content-length", len(self.fileContents))
- self.end_headers()
-+ # python 3 requires self.fileContents to be a bytestring
-+ if sys.version_info >= (3, 0):
-+ self.fileContents = self.fileContents.encode('utf-8')
- self.wfile.write(self.fileContents)
-
-
---- toil.orig/src/toil/test/src/fileStoreTest.py
-+++ toil/src/toil/test/src/fileStoreTest.py
-@@ -1338,10 +1338,13 @@
- """
- for name, kind, clazz, value in inspect.classify_class_attrs(cls):
- if kind == 'static method':
-- method = value.__func__
-- args = inspect.getargspec(method).args
-- if args and args[0] == 'job':
-- globals()[name] = method
-+ method = value
-+ try:
-+ args = inspect.getargspec(method).args
-+ if args and args[0] == 'job':
-+ globals()[name] = method
-+ except TypeError:
-+ pass
-
-
- _exportStaticMethodAsGlobalFunctions(hidden.AbstractFileStoreTest)
---- toil.orig/src/toil/test/src/jobFileStoreTest.py
-+++ toil/src/toil/test/src/jobFileStoreTest.py
-@@ -134,13 +134,24 @@
- with open(tempFile, 'w') as fH:
- fH.write(testString)
- #Write a local copy of the file using the local file
-- outputFileStoreIds.append(job.fileStore.writeGlobalFile(tempFile))
-+ fileStoreID = job.fileStore.writeGlobalFile(tempFile)
-+
-+ # Make sure it returned a valid and correct FileID with the right size
-+ assert isinstance(fileStoreID, FileID)
-+ assert fileStoreID.size == len(testString.encode('utf-8'))
-+
-+ outputFileStoreIds.append(fileStoreID)
- else:
- #Use the writeGlobalFileStream method to write the file
- with job.fileStore.writeGlobalFileStream() as (fH, fileStoreID):
- fH.write(testString.encode('utf-8'))
- outputFileStoreIds.append(fileStoreID)
-
-+
-+ #Make sure it returned a valid and correct FileID with the right size
-+ assert isinstance(fileStoreID, FileID)
-+ assert fileStoreID.size == len(testString.encode('utf-8'))
-+
- if chainLength > 0:
- #Make a child that will read these files and check it gets the same results
- job.addChildJobFn(fileTestJob, outputFileStoreIds, testStrings, chainLength-1)
=====================================
debian/patches/remove-pathlib2-dep
=====================================
@@ -0,0 +1,20 @@
+--- toil.orig/setup.py
++++ toil/setup.py
+@@ -47,7 +47,6 @@
+ dateutil = 'python-dateutil'
+ addict = 'addict<=2.2.0'
+ sphinx = 'sphinx==1.7.5'
+- pathlib2 = 'pathlib2==2.3.2'
+
+ core_reqs = [
+ dill,
+@@ -58,8 +57,7 @@
+ dateutil,
+ psutil,
+ subprocess32,
+- sphinx,
+- pathlib2]
++ sphinx]
+
+ mesos_reqs = [
+ addict,
=====================================
debian/patches/reproducible-build.patch deleted
=====================================
@@ -1,24 +0,0 @@
-Subject: Make the build reproducible
-From: Chris Lamb <lamby at debian.org>
-Forwarded: https://github.com/DataBiosphere/toil/pull/2562
-
---- toil-3.18.0.orig/docs/conf.py
-+++ toil-3.18.0/docs/conf.py
-@@ -18,6 +18,7 @@ import inspect
- import re
- from datetime import datetime
- import toil.version
-+import time
-
- # This makes the modules located in docs/vendor/sphinxcontrib available to import
- sphinxPath = os.path.abspath(os.path.join(os.path.pardir, os.path.dirname('docs/vendor/sphinxcontrib/')))
-@@ -100,7 +101,8 @@ master_doc = 'index'
-
- # General information about the project.
- project = u'Toil'
--copyright = u'2015 – %i UCSC Computational Genomics Lab' % datetime.now().year
-+build_date = datetime.utcfromtimestamp(int(os.environ.get('SOURCE_DATE_EPOCH', time.time())))
-+copyright = u'2015 – %i UCSC Computational Genomics Lab' % build_date.year
- author = u'UCSC Computational Genomics Lab'
-
- # The version info for the project you're documenting, acts as replacement for
=====================================
debian/patches/series
=====================================
@@ -5,6 +5,5 @@ no_galaxy_lib
debianize_docs
adjust_to_newer_cwltool
fix_tests
-soften-pytest-depends
soften-mesos-deps
-reproducible-build.patch
+remove-pathlib2-dep
=====================================
debian/patches/soften-mesos-deps
=====================================
@@ -2,14 +2,25 @@ Author: Michael R. Crusoe <michael.crusoe at gmail.com>
Subject: Use Debian's newer python3-psutil
--- toil.orig/setup.py
+++ toil/setup.py
-@@ -24,8 +24,8 @@
- boto3 = 'boto3>=1.7.50, <2.0'
+@@ -25,7 +25,7 @@
futures = 'futures==3.1.1'
pycryptodome = 'pycryptodome==3.5.1'
+ pymesos = 'pymesos==0.3.7'
- psutil = 'psutil==3.0.1'
-- protobuf = 'protobuf==3.5.1'
+ psutil = 'psutil>=3.0.1'
-+ protobuf = 'protobuf>=3.5.1'
azureCosmosdbTable = 'azure-cosmosdb-table==0.37.1'
azureAnsible = 'ansible[azure]==2.5.0a1'
azureStorage = 'azure-storage==0.35.1'
+@@ -58,11 +58,11 @@
+ dateutil,
+ psutil,
+ subprocess32,
+- addict,
+ sphinx,
+ pathlib2]
+
+ mesos_reqs = [
++ addict,
+ pymesos,
+ psutil]
+ aws_reqs = [
=====================================
debian/patches/soften-pytest-depends deleted
=====================================
@@ -1,24 +0,0 @@
-Author: Michael R. Crusoe <michael.crusoe at gmail.com>
-Description: No need to depend on testing libraries
---- toil.orig/setup.py
-+++ toil/setup.py
-@@ -45,8 +45,6 @@
- docker = 'docker==2.5.1'
- subprocess32 = 'subprocess32<=3.5.2'
- dateutil = 'python-dateutil'
-- pytest = 'pytest==3.7.4'
-- pytest_cov = 'pytest-cov==2.5.1'
-
- core_reqs = [
- dill,
-@@ -56,9 +54,7 @@
- docker,
- dateutil,
- psutil,
-- subprocess32,
-- pytest,
-- pytest_cov]
-+ subprocess32]
-
- mesos_reqs = [
- psutil,
View it on GitLab: https://salsa.debian.org/med-team/toil/commit/51fd8343cfeae5155caf8d3da8d5f1ed7127536c
--
View it on GitLab: https://salsa.debian.org/med-team/toil/commit/51fd8343cfeae5155caf8d3da8d5f1ed7127536c
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20190813/2fc92aad/attachment-0001.html>
More information about the debian-med-commit
mailing list