[med-svn] [Git][med-team/biomaj3][upstream] New upstream version 3.1.4
Andreas Tille
gitlab at salsa.debian.org
Fri Jun 8 12:08:55 BST 2018
Andreas Tille pushed to branch upstream at Debian Med / biomaj3
Commits:
4c891770 by Andreas Tille at 2018-06-08T10:51:32+02:00
New upstream version 3.1.4
- - - - -
7 changed files:
- CHANGES.txt
- biomaj/bank.py
- biomaj/process/metaprocess.py
- biomaj/workflow.py
- global.properties.example
- requirements.txt
- setup.py
Changes:
=====================================
CHANGES.txt
=====================================
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,3 +1,9 @@
+3.1.4:
+ Fix #88 Unset 'last_update_session' when found in pending sessions using --remove-pending
+ Add formats in bank info request
+ Add checks for some production fields before display
+ Add irods download support
+
3.1.3:
Remove post-install step for automatic upgrades, not supported by wheel package
=====================================
biomaj/bank.py
=====================================
--- a/biomaj/bank.py
+++ b/biomaj/bank.py
@@ -8,7 +8,6 @@ import json
from datetime import datetime
import redis
-from influxdb import InfluxDBClient
from biomaj.mongo_connector import MongoConnector
from biomaj.session import Session
@@ -164,7 +163,7 @@ class Bank(object):
str(last_update),
str(release)])
# Bank production info header
- prod_info.append(["Session", "Remote release", "Release", "Directory", "Freeze"])
+ prod_info.append(["Session", "Remote release", "Release", "Directory", "Freeze", "Format(s)"])
for prod in _bank['production']:
data_dir = self.config.get('data.dir')
dir_version = self.config.get('dir.version')
@@ -172,20 +171,32 @@ class Bank(object):
data_dir = prod['data.dir']
if 'dir.version' in prod:
dir_version = prod['dir.version']
+ if not prod['prod_dir'] or not dir_version or not data_dir:
+ continue
release_dir = os.path.join(data_dir,
dir_version,
prod['prod_dir'])
date = datetime.fromtimestamp(prod['session']).strftime('%Y-%m-%d %H:%M:%S')
+ formats = ""
+ # Check the value exist , is not empty, and a list.
+ if 'formats' in prod and prod['formats'] and isinstance(prod['formats'], list):
+ formats = str(','.join(prod['formats']))
prod_info.append([date,
prod['remoterelease'],
prod['release'],
release_dir,
- 'yes' if 'freeze' in prod and prod['freeze'] else 'no'])
+ 'yes' if 'freeze' in prod and prod['freeze'] else 'no',
+ formats])
# Bank pending info header
if 'pending' in _bank and len(_bank['pending']) > 0:
pend_info.append(["Pending release", "Last run"])
for pending in _bank['pending']:
- run = datetime.fromtimestamp(pending['id']).strftime('%Y-%m-%d %H:%M:%S')
+ run = ""
+ try:
+ run = datetime.fromtimestamp(pending['id']).strftime('%Y-%m-%d %H:%M:%S')
+ except Exception as e:
+ logging.error('BANK:ERROR:invalid pending id: ' + str(pending['id']))
+ logging.error('BANK:ERROR:invalid pending id: ' + str(e))
pend_info.append([pending['release'], run])
info['info'] = bank_info
@@ -959,6 +970,9 @@ class Bank(object):
if 'pending' not in self.bank:
return True
pendings = self.bank['pending']
+ last_update = None
+ if 'last_update_session' in self.bank:
+ last_update = self.bank['last_update_session']
for pending in pendings:
# Only work with pending for argument release
@@ -979,6 +993,10 @@ class Bank(object):
logging.debug("Remove:Pending:Dir:" + session.get_full_release_directory())
shutil.rmtree(session.get_full_release_directory())
self.remove_session(pending['id'])
+ if last_update and last_update == pending_session_id:
+ self.banks.update({'name': self.name},
+ {'$unset': {'last_update_session': ''}})
+
# If no release ask for deletion, remove all pending
if not release:
self.banks.update({'name': self.name}, {'$set': {'pending': []}})
@@ -1097,6 +1115,11 @@ class Bank(object):
'''
Send stats to Influxdb if enabled
'''
+ try:
+ from influxdb import InfluxDBClient
+ except Exception as e:
+ logging.error('Cannot load influxdb library' + str(e))
+ return
db_host = self.config.get('influxdb.host', default=None)
if not db_host:
return
=====================================
biomaj/process/metaprocess.py
=====================================
--- a/biomaj/process/metaprocess.py
+++ b/biomaj/process/metaprocess.py
@@ -193,6 +193,10 @@ class MetaProcess(threading.Thread):
# bank_env=None, log_dir=None,
# rabbit_mq=None, rabbit_mq_port=5672, rabbit_mq_user=None, rabbit_mq_password=None, rabbit_mq_virtualhost=None,
# proxy=None, bank=None):
+ proxy = self.bank.config.get('micro.biomaj.proxy.process')
+ if not proxy:
+ proxy = self.bank.config.get('micro.biomaj.proxy')
+
use_sudo = self.bank.config.get_bool('docker.sudo', default=True)
bmaj_process = RemoteProcess(
meta + '_' + name,
@@ -210,7 +214,7 @@ class MetaProcess(threading.Thread):
rabbit_mq_user=self.bank.config.get('micro.biomaj.rabbit_mq_user'),
rabbit_mq_password=self.bank.config.get('micro.biomaj.rabbit_mq_password'),
rabbit_mq_virtualhost=self.bank.config.get('micro.biomaj.rabbit_mq_virtualhost', default='/'),
- proxy=self.bank.config.get('micro.biomaj.proxy'),
+ proxy=proxy,
bank=self.bank.name
)
else:
=====================================
biomaj/workflow.py
=====================================
--- a/biomaj/workflow.py
+++ b/biomaj/workflow.py
@@ -549,7 +549,11 @@ class UpdateWorkflow(Workflow):
)
else:
dserv = DownloadClient()
- proxy = self.bank.config.get('micro.biomaj.proxy')
+
+ proxy = self.bank.config.get('micro.biomaj.proxy.download')
+ if not proxy:
+ proxy = self.bank.config.get('micro.biomaj.proxy')
+
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_release:DownloadSession:" + str(session))
@@ -586,13 +590,6 @@ class UpdateWorkflow(Workflow):
params = None
keys = cf.get('url.params')
- if keys is not None:
- params = {}
- keys = keys.split(',')
- for key in keys:
- param = cf.get(key.strip() + '.value')
- params[key.strip()] = param.strip()
-
credentials = cf.get('server.credentials')
if cf.get('release.credentials') is not None:
credentials = cf.get('release.credentials')
@@ -600,12 +597,32 @@ class UpdateWorkflow(Workflow):
save_as = None
method = 'GET'
if protocol == 'directhttp' or protocol == 'directhttps' or protocol == 'directftp':
+ keys = cf.get('url.params')
+ if keys is not None:
+ params = {}
+ keys = keys.split(',')
+ for key in keys:
+ param = cf.get(key.strip() + '.value')
+ params[key.strip()] = param.strip()
+
save_as = cf.get('release.file')
remotes = [remote_dir]
remote_dir = '/'
method = cf.get('url.method')
if cf.get('release.url.method') is not None:
method = cf.get('release.url.method')
+ # add params for irods to get port, password, user, zone
+ if protocol == 'irods':
+ keys = None
+ keys = str(str(cf.get('irods.user')) + ',' + str(cf.get('irods.password')) + ',' + str(cf.get('irods.port')) + ',' + str(cf.get('irods.protocol')))
+ if keys is not None:
+ params = {}
+ keys = str(keys).split(',')
+ params['user'] = str(cf.get('irods.user')).strip()
+ params['password'] = str(cf.get('irods.password')).strip()
+ params['port'] = str(cf.get('irods.port')).strip()
+ params['protocol'] = str(cf.get('irods.protocol')).strip()
+ params['zone'] = str(cf.get('irods.zone')).strip()
release_downloader = dserv.get_handler(
protocol,
@@ -938,7 +955,10 @@ class UpdateWorkflow(Workflow):
if pool_size:
dserv.set_queue_size(int(pool_size))
- proxy = self.bank.config.get('micro.biomaj.proxy')
+ proxy = self.bank.config.get('micro.biomaj.proxy.download')
+ if not proxy:
+ proxy = self.bank.config.get('micro.biomaj.proxy')
+
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_download:DownloadSession:" + str(session))
@@ -1055,14 +1075,6 @@ class UpdateWorkflow(Workflow):
server = cf.get('server')
params = None
- keys = cf.get('url.params')
- if keys is not None:
- params = {}
- keys = keys.split(',')
- for key in keys:
- param = cf.get(key.strip() + '.value')
- params[key.strip()] = param.strip()
-
method = cf.get('url.method')
if method is None:
method = 'GET'
@@ -1071,8 +1083,28 @@ class UpdateWorkflow(Workflow):
remote_dir = cf.get('remote.dir')
if protocol == 'directhttp' or protocol == 'directhttps' or protocol == 'directftp':
+ keys = cf.get('url.params')
+ if keys is not None:
+ params = {}
+ keys = keys.split(',')
+ for key in keys:
+ param = cf.get(key.strip() + '.value')
+ params[key.strip()] = param.strip()
+
remotes = [cf.get('remote.dir')[:-1]]
remote_dir = '/'
+ # add params for irods to get port, password, user, zone
+ if protocol == 'irods':
+ keys = None
+ keys = str(str(cf.get('irods.user')) + ',' + str(cf.get('irods.password')) + ',' + str(cf.get('irods.port')) + ',' + str(cf.get('irods.protocol')))
+ if keys is not None:
+ params = {}
+ keys = str(keys).split(',')
+ params['user'] = str(cf.get('irods.user')).strip()
+ params['password'] = str(cf.get('irods.password')).strip()
+ params['port'] = str(cf.get('irods.port')).strip()
+ params['protocol'] = str(cf.get('irods.protocol')).strip()
+ params['zone'] = str(cf.get('irods.zone')).strip()
save_as = cf.get('target.name')
@@ -1311,7 +1343,10 @@ class UpdateWorkflow(Workflow):
if pool_size:
dserv.set_queue_size(int(pool_size))
- proxy = self.bank.config.get('micro.biomaj.proxy')
+ proxy = self.bank.config.get('micro.biomaj.proxy.download')
+ if not proxy:
+ proxy = self.bank.config.get('micro.biomaj.proxy')
+
session = dserv.create_session(self.name, proxy)
logging.info("Workflow:wf_download:DownloadSession:" + str(session))
@@ -1636,14 +1671,17 @@ class UpdateWorkflow(Workflow):
nb_prod = len(self.bank.bank['production'])
# save session during delete workflow
keep_session = self.bank.session
-
+ old_deleted = False
if nb_prod > keep:
for prod in self.bank.bank['production']:
if prod['release'] == keep_session.get('release'):
+ logging.info('Release %s tagged as keep_session, skipping' % (str(prod['release'])))
continue
if 'freeze' in prod and prod['freeze']:
+ logging.info('Release %s tagged as freezed, skipping' % (str(prod['release'])))
continue
if self.bank.bank['current'] == prod['session']:
+ logging.info('Release %s tagged as current, skipping' % (str(prod['release'])))
continue
if nb_prod - keep > 0:
nb_prod -= 1
@@ -1670,10 +1708,14 @@ class UpdateWorkflow(Workflow):
res = self.bank.start_remove(session)
if not res:
logging.error('Workflow:wf_delete_old:ErrorDelete:' + prod['release'])
+ else:
+ old_deleted = True
else:
break
# Set session back
self.bank.session = keep_session
+ if old_deleted:
+ self.bank.session._session['remove'] = True
return True
=====================================
global.properties.example
=====================================
--- a/global.properties.example
+++ b/global.properties.example
@@ -52,6 +52,9 @@ influxdb.db=biomaj
#micro.biomaj.service.user=1
#micro.biomaj.service.daemon=1
+## Optional
+# micro.biomaj.proxy.[user,cron,release,daemon,download,process]=http://127.0.0.1:5000
+
auto_publish=1
########################
=====================================
requirements.txt
=====================================
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,6 @@
biomaj_core
biomaj_user
-biomaj_download>=3.0.10
+biomaj_download>=3.0.17
biomaj_process
biomaj_cli
mock
=====================================
setup.py
=====================================
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ config = {
'url': 'http://biomaj.genouest.org',
'download_url': 'http://biomaj.genouest.org',
'author_email': 'olivier.sallou at irisa.fr',
- 'version': '3.1.3',
+ 'version': '3.1.4',
'classifiers': [
# How mature is this project? Common values are
# 3 - Alpha
View it on GitLab: https://salsa.debian.org/med-team/biomaj3/commit/4c891770b000954594fbc7214c8c0b013493da50
--
View it on GitLab: https://salsa.debian.org/med-team/biomaj3/commit/4c891770b000954594fbc7214c8c0b013493da50
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20180608/9cce85fb/attachment-0001.html>
More information about the debian-med-commit
mailing list