[med-svn] [Git][med-team/biomaj3][upstream] New upstream version 3.1.8

Olivier Sallou gitlab at salsa.debian.org
Sat Mar 9 10:22:12 GMT 2019


Olivier Sallou pushed to branch upstream at Debian Med / biomaj3


Commits:
2fd6682f by Olivier Sallou at 2019-03-09T10:18:43Z
New upstream version 3.1.8
- - - - -


6 changed files:

- .travis.yml
- CHANGES.txt
- biomaj/process/metaprocess.py
- biomaj/workflow.py
- requirements.txt
- setup.py


Changes:

=====================================
.travis.yml
=====================================
@@ -1,36 +1,33 @@
 language: python
 sudo: false
 python:
-  - "2.7"
-  - "3.4"
-  - "3.5"
-  - "3.6"
+- '2.7'
+- '3.4'
+- '3.5'
+- '3.6'
 services:
-  - mongodb
-  - elasticsearch
-# Apply only on main branches
+- mongodb
+- elasticsearch
 branches:
   except:
-    - /^feature.*$/
-# command to install dependencies
-#before_install:
-#  - "sudo apt-get update -qq"
-#  - "sudo apt-get install -qq libldap2-dev libsasl2-dev"
+  - "/^feature.*$/"
 install:
-  - "pip install flake8"
-  - "pip install -r requirements.txt"
-  - "pip install coverage"
-  - "pip install python-coveralls"
-  - "python setup.py -q install"
-#  - "echo data_file=$TRAVIS_BUILD_DIR/.coverage >> .coveragerc"
-# command to run tests
+- pip install flake8
+- pip install -r requirements.txt
+- pip install coverage
+- pip install python-coveralls
+- python setup.py -q install
 before_script:
-  - sleep 10
-#script: nosetests --with-coverage --cover-package=biomaj -a '!network'
-#script: nosetests --with-coverage --cover-package=biomaj
+- sleep 10
 script:
-  - python setup.py test
-  - flake8 --ignore E501,E123 biomaj
-#after_success:
-#  - coveralls
-
+- nosetests -a '!network'
+- flake8 --ignore E501,E123 biomaj
+deploy:
+  provider: pypi
+  skip_existing: true
+  user: osallou
+  password:
+    secure: UrZI37Ab+Zz4XtpcTIwuWorV+7iY26Mbfga7Rw5TK0OTJsiI3oZiy0CBFkSO0IGsh8nea6mPZ8KYqzHhLN1EK+mCT0uej09jMIVlI23wGQSigwinO+2nA664bbikcKhsAy5NNlqB73VGyw4eBUf2XXV+dPMXj8aaVM0LZ148Qag=
+  distributions: sdist bdist_wheel
+  on:
+    tags: true


=====================================
CHANGES.txt
=====================================
@@ -1,3 +1,8 @@
+3.1.8:
+  Fix uncompress when saved files contains subdirectory
+3.1.7:
+  Fix utf/ascii encoding issue with python3
+  In case of uncompress failure, put back all compressed files to avoid redownload
 3.1.6:
   Fix #100 Catch error and log error if biomaj fails to connect to InfluxDB
   Add history to update/remove operations


=====================================
biomaj/process/metaprocess.py
=====================================
@@ -139,7 +139,7 @@ class MetaProcess(threading.Thread):
 
     def set_progress(self, name, status=None):
         '''
-        Update progress on download
+        Update progress on execution
 
         :param name: name of process
         :type name: str


=====================================
biomaj/workflow.py
=====================================
@@ -11,6 +11,7 @@ import re
 import traceback
 import json
 import hashlib
+import sys
 
 from biomaj_core.utils import Utils
 from biomaj_download.downloadclient import DownloadClient
@@ -682,7 +683,11 @@ class UpdateWorkflow(Workflow):
                 # Download and extract
                 tmp_dir = tempfile.mkdtemp('biomaj')
                 rel_files = release_downloader.download(tmp_dir)
-                rel_file = open(tmp_dir + '/' + rel_files[0]['name'])
+                rel_file = None
+                if (sys.version_info > (3, 0)):
+                    rel_file = open(tmp_dir + '/' + rel_files[0]['name'], encoding='utf-8')
+                else:
+                    rel_file = open(tmp_dir + '/' + rel_files[0]['name'])
                 rel_content = rel_file.read()
                 rel_file.close()
                 shutil.rmtree(tmp_dir)
@@ -1480,13 +1485,43 @@ class UpdateWorkflow(Workflow):
             return True
         no_extract = self.session.config.get('no.extract')
         if no_extract is None or no_extract == 'false':
+            archives = []
             for file in self.downloaded_files:
                 if 'save_as' not in file:
                     file['save_as'] = file['name']
                 nb_try = 1
+                origFile = self.session.get_offline_directory() + '/' + file['save_as']
+                is_archive = False
+                if origFile.endswith('.tar.gz'):
+                    is_archive = True
+                elif origFile.endswith('.tar'):
+                    is_archive = True
+                elif origFile.endswith('.bz2'):
+                    is_archive = True
+                elif origFile.endswith('.gz'):
+                    is_archive = True
+                elif origFile.endswith('.zip'):
+                    is_archive = True
+
+                logging.info('Workflow:wf_uncompress:Uncompress:' + origFile)
+                if not os.path.exists(origFile):
+                    logging.warn('Workflow:wf_uncompress:NotExists:' + origFile)
+                    continue
+
+                tmpCompressedFile = origFile
+                if is_archive:
+                    tmpFileNameElts = file['save_as'].split('/')
+                    tmpFileNameElts[len(tmpFileNameElts) - 1] = 'tmp_' + tmpFileNameElts[len(tmpFileNameElts) - 1]
+                    tmpCompressedFile = self.session.get_offline_directory() + '/' + '/'.join(tmpFileNameElts)
+                    archives.append({'from': origFile, 'to': tmpCompressedFile})
+                else:
+                    continue
+
+                shutil.copy(origFile, tmpCompressedFile)
+
                 not_ok = True
                 while nb_try < 3 and not_ok:
-                    status = Utils.uncompress(self.session.get_offline_directory() + '/' + file['save_as'])
+                    status = Utils.uncompress(origFile)
                     if status:
                         not_ok = False
                     else:
@@ -1494,7 +1529,17 @@ class UpdateWorkflow(Workflow):
                         nb_try += 1
                 if not_ok:
                     logging.error('Workflow:wf_uncompress:Failure:' + file['name'])
+                    # Revert archive files
+                    for archive in archives:
+                        if os.path.exists(archive['to']):
+                            logging.info("Workflow:wf_uncompress:RevertArchive:" + archive['from'])
+                            shutil.move(archive['to'], archive['from'])
                     return False
+            for archive in archives:
+                if os.path.exists(archive['to']):
+                    logging.info("Workflow:wf_uncompress:RemoveAfterExtract:" + archive['to'])
+                    os.remove(archive['to'])
+
         else:
             logging.info("Workflow:wf_uncompress:NoExtract")
         return True


=====================================
requirements.txt
=====================================
@@ -1,7 +1,7 @@
 biomaj_core
 biomaj_user
-biomaj_download>=3.0.18
-biomaj_process
+biomaj_download>=3.0.20
+biomaj_process>=3.0.12
 biomaj_cli
 mock
 nose


=====================================
setup.py
=====================================
@@ -35,7 +35,7 @@ config = {
     'url': 'http://biomaj.genouest.org',
     'download_url': 'http://biomaj.genouest.org',
     'author_email': 'olivier.sallou at irisa.fr',
-    'version': '3.1.6',
+    'version': '3.1.8',
      'classifiers': [
         # How mature is this project? Common values are
         #   3 - Alpha



View it on GitLab: https://salsa.debian.org/med-team/biomaj3/commit/2fd6682f7717986c2a50c12ea76953d1ddd838e8

-- 
View it on GitLab: https://salsa.debian.org/med-team/biomaj3/commit/2fd6682f7717986c2a50c12ea76953d1ddd838e8
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20190309/4b0c362a/attachment-0001.html>


More information about the debian-med-commit mailing list