[med-svn] [Git][med-team/biomaj3][upstream] New upstream version 3.1.18

Andreas Tille gitlab at salsa.debian.org
Sun Jan 17 08:51:19 GMT 2021



Andreas Tille pushed to branch upstream at Debian Med / biomaj3


Commits:
3710e5bc by Andreas Tille at 2021-01-17T09:28:32+01:00
New upstream version 3.1.18
- - - - -


16 changed files:

- .travis.yml
- CHANGES.txt
- README.md
- biomaj/bank.py
- biomaj/notify.py
- biomaj/session.py
- biomaj/workflow.py
- docs/global.advanced.properties
- global.properties.example
- requirements.txt
- setup.py
- tests/biomaj_tests.py
- tests/global.properties
- + tools/examples/body.jinja2
- tools/examples/global.properties
- + tools/examples/subject.jinja2


Changes:

=====================================
.travis.yml
=====================================
@@ -1,7 +1,6 @@
 language: python
 sudo: false
 python:
-- '2.7'
 - '3.6'
 - '3.7'
 - '3.8'


=====================================
CHANGES.txt
=====================================
@@ -1,3 +1,31 @@
+3.1.18:
+  Python 3 support only
+  If multiple files match release.file, take most recent one
+  If mail template not found, log and use default
+3.1.17:
+  Fix regression when saving file with a differe,t structure such as xxx/(FASTA)/(file.txt) to save under FASTA/file.txt
+  Send removal mail for --remove-all option
+  #119 add support for custom notification emails with templates and log tail/attach options
+    New optional fields in global.properties (or per bank properties):
+      mail.body.tail=0
+      mail.body.attach=9000000
+      mail.template.subject=file_path_to_subject.jinja2
+      mail.template.body=file_path_to_body.jinja2
+    Available variables:
+            'log_file': path to log file
+            'log_tail': last lines of log file
+            'bank': bank name
+            'release': release related tooperation
+            'status': operation status (true/false)
+            'modified': did operation modified bank (true/false)
+            'update': was operation an update
+            'remove': was operation a removal
+3.1.16:
+  Fix status check of process for --from-task postprocess
+  #118 Rename protocol options to options
+  Add more debug logging
+3.1.15:
+  #117 Fix incorrect behavior with --post-process
 3.1.14:
   Add repair option
 3.1.13:


=====================================
README.md
=====================================
@@ -16,7 +16,8 @@ downloaded again only if a change is detected.
 
 More documentation is available in wiki page.
 
-BioMAJ is python 2 and 3 compatible.
+BioMAJ is python 2 and 3 compatible until release 3.1.17.
+After 3.1.17, only python 3 is supported.
 
 Getting started
 ===============
@@ -124,9 +125,9 @@ The tools/process contains example process files (python and shell).
 Docker
 ======
 
-You can use BioMAJ with Docker (genouest/biomaj)
+You can use BioMAJ with Docker (osallou/biomaj-docker)
 
-    docker pull genouest/biomaj
+    docker pull osallou/biomaj-docker
     docker pull mongo
     docker run --name biomaj-mongodb -d mongo
     # Wait ~10 seconds for mongo to initialize
@@ -134,7 +135,6 @@ You can use BioMAJ with Docker (genouest/biomaj)
     # *local_path*
     docker run --rm -v local_path:/var/lib/biomaj --link biomaj-mongodb:biomaj-mongodb osallou/biomaj-docker --help
 
-
 Copy your bank properties in directory *local_path*/conf and post-processes (if any) in *local_path*/process
 
 You can override global.properties in /etc/biomaj/global.properties (-v xx/global.properties:/etc/biomaj/global.properties)


=====================================
biomaj/bank.py
=====================================
@@ -16,6 +16,7 @@ from biomaj.workflow import RemoveWorkflow
 from biomaj.workflow import RepairWorkflow
 from biomaj.workflow import Workflow
 from biomaj.workflow import ReleaseCheckWorkflow
+from biomaj.notify import Notify
 from biomaj_core.config import BiomajConfig
 from biomaj.options import Options
 from biomaj.process.processfactory import ProcessFactory
@@ -957,30 +958,36 @@ class Bank(object):
                 logging.error('Cannot remove bank, some production directories are freezed, use force if needed')
                 return False
 
+        self.load_session()
+
         self.banks.remove({'name': self.name})
         BmajIndex.delete_all_bank(self.name)
         bank_data_dir = self.get_data_dir()
         logging.warn('DELETE ' + bank_data_dir)
+        err_msg = ''
         if os.path.exists(bank_data_dir):
             try:
                 shutil.rmtree(bank_data_dir)
             except Exception:
                 logging.exception('Failed to delete bank directory: ' + bank_data_dir)
-                logging.error('Bank will be deleted but some files/dirs may still be present on system, you can safely manually delete them')
+                logging.error('Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_data_dir)
+                err_msg = 'Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_data_dir
         bank_offline_dir = os.path.join(self.config.get('data.dir'), self.config.get('offline.dir.name'))
         if os.path.exists(bank_offline_dir):
             try:
                 shutil.rmtree(bank_offline_dir)
             except Exception:
                 logging.exception('Failed to delete bank offline directory: ' + bank_offline_dir)
-                logging.error('Bank will be deleted but some files/dirs may still be present on system, you can safely manually delete them')
+                logging.error('Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_offline_dir)
+                err_msg = 'Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_offline_dir
         bank_log_dir = os.path.join(self.config.get('log.dir'), self.name)
         if os.path.exists(bank_log_dir) and self.no_log:
             try:
                 shutil.rmtree(bank_log_dir)
             except Exception:
                 logging.exception('Failed to delete bank log directory: ' + bank_log_dir)
-                logging.error('Bank will be deleted but some files/dirs may still be present on system, you can safely manually delete them')
+                logging.error('Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_log_dir)
+                err_msg = 'Bank will be deleted but some files/dirs may still be present on %s, you can safely manually delete them' % bank_log_dir
         end_time = datetime.now()
         end_time = time.mktime(end_time.timetuple())
         self.history.insert({
@@ -991,6 +998,12 @@ class Bank(object):
             'action': 'remove',
             'updated': None
         })
+
+        self.session._session['release'] = 'all'
+        self.session._session['status'][Workflow.FLOW_OVER] = True
+        self.session._session['update'] = False
+        self.session._session['remove'] = True
+        Notify.notifyBankAction(self, with_log=False, with_msg=err_msg)
         return True
 
     def get_status(self):
@@ -1218,7 +1231,7 @@ class Bank(object):
             set_to_false = False
             for task in self.session.flow:
                 # If task was in False status (KO) and we ask to start after this task, exit
-                if not set_to_false and not self.session.get_status(task['name']) and \
+                if not set_to_false and self.session.get_status(task['name']) is False and \
                         task['name'] != self.options.get_option('from_task'):
                     logging.error(
                         'Previous task ' + task['name'] + ' was not successful, cannot restart after this task')


=====================================
biomaj/notify.py
=====================================
@@ -4,7 +4,13 @@ import smtplib
 import email.utils
 from biomaj.workflow import Workflow
 import logging
+import os
 import sys
+from email.mime.base import MIMEBase
+from email.mime.multipart import MIMEMultipart
+from email import encoders
+from jinja2 import Template
+
 if sys.version < '3':
     from email.MIMEText import MIMEText
 else:
@@ -17,7 +23,7 @@ class Notify(object):
     """
 
     @staticmethod
-    def notifyBankAction(bank):
+    def notifyBankAction(bank, with_log=True, with_msg=''):
         if not bank.config.get('mail.smtp.host') or bank.session is None:
             logging.info('Notify:none')
             return
@@ -29,32 +35,111 @@ class Notify(object):
         logging.info('Notify:' + bank.config.get('mail.admin'))
         mfrom = bank.config.get('mail.from')
         log_file = bank.config.log_file
-        msg = MIMEText('')
-        if log_file:
-            fp = None
-            if sys.version < '3':
-                fp = open(log_file, 'rb')
+
+        msg = MIMEMultipart()
+
+        log_tail = ''
+        log_file_size = 0
+        if log_file and with_log and os.path.exists(log_file):
+            log_file_size = os.path.getsize(log_file)
+            max_tail = bank.config.get('mail.body.tail', default=None)
+            if max_tail:
+                max_tail_length = min(2000000, log_file_size)
+                try:
+                    max_tail_length = int(max_tail)
+                except Exception:
+                    logging.exception("invalid mail.body.tail value")
+                if max_tail_length > 0:
+                    fp = None
+                    if sys.version < '3':
+                        fp = open(log_file, 'rb')
+                    else:
+                        fp = open(log_file, 'r')
+                    log_tail = fp.read(max_tail_length)
+                    fp.close()
+
+        log_attach = bank.config.get('mail.body.attach', default=None)
+        if log_attach and with_log and os.path.exists(log_file):
+            log_attach_max = 0
+            try:
+                log_attach_max = int(log_attach)
+            except Exception:
+                logging.exception("invalid mail.body.attach value")
+            if log_attach_max > 0 and log_file_size < log_attach_max:
+                logging.debug("attach log file to mail")
+                part = None
+                with open(log_file, "rb") as attachment:
+                    # Add file as application/octet-stream
+                    # Email client can usually download this automatically as attachment
+                    part = MIMEBase("application", "octet-stream")
+                    part.set_payload(attachment.read())
+
+                if part:
+                    # Encode file in ASCII characters to send by email
+                    encoders.encode_base64(part)
+                    part.add_header(
+                        "Content-Disposition",
+                        "attachment; filename=%s" % log_file,
+                    )
+                    msg.attach(part)
+
+        template_info = {
+            'message': with_msg,
+            'log_file': log_file,
+            'log_tail': log_tail,
+            'bank': bank.name,
+            'release': str(bank.session.get('release')),
+            'status': bank.session.get_status(Workflow.FLOW_OVER),
+            'modified': bank.session.get('update') or bank.session.get('remove'),
+            'update': bank.session.get('update'),
+            'remove': bank.session.get('remove')
+        }
+
+        template_file = bank.config.get('mail.template.subject', default=None)
+        if template_file and not os.path.exists(template_file):
+            logging.error('Template file not found: %s' % template_file)
+            template_file = None
+        if template_file:
+            template = None
+            with open(template_file) as file_:
+                template = Template(file_.read())
+            if template:
+                msg['Subject'] = template.render(template_info)
             else:
-                fp = open(log_file, 'r')
-            msg = MIMEText(fp.read(2000000))
-            fp.close()
+                logging.error('Failed to render email subject template')
+                msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
+        else:
+            msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
 
-        msg['From'] = email.utils.formataddr(('Author', mfrom))
-        msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
+        template_file = bank.config.get('mail.template.body', None)
+        if template_file and not os.path.exists(template_file):
+            logging.error('Template file not found: %s' % template_file)
+            template_file = None
+        if template_file:
+            template = None
+            with open(template_file) as file_:
+                template = Template(file_.read())
+            if template:
+                msg.attach(MIMEText(template.render(template_info), "plain"))
+            else:
+                logging.error('Failed to render email subject template')
+        else:
+            msg.attach(MIMEText(log_tail, "plain"))
 
+        msg['From'] = email.utils.formataddr(('BioMAJ', mfrom))
         logging.info(msg['subject'])
         server = None
         for mto in admin_list:
             msg['To'] = email.utils.formataddr(('Recipient', mto))
             try:
-                server = smtplib.SMTP(bank.config.get('mail.smtp.host'))
+                server = smtplib.SMTP(bank.config.get('mail.smtp.host'), int(bank.config.get('mail.smtp.port', default=25)))
                 if bank.config.get('mail.tls') is not None and str(bank.config.get('mail.tls')) == 'true':
                     server.starttls()
                 if bank.config.get('mail.user') is not None and str(bank.config.get('mail.user')) != '':
                     server.login(bank.config.get('mail.user'), bank.config.get('mail.password'))
                 server.sendmail(mfrom, [mto], msg.as_string())
             except Exception as e:
-                logging.error('Could not send email: ' + str(e))
+                logging.exception('Could not send email: ' + str(e))
             finally:
                 if server is not None:
                     server.quit()


=====================================
biomaj/session.py
=====================================
@@ -130,8 +130,12 @@ class Session(object):
                 self._session['process']['postprocess'] = self.reload_postprocess_in_order(self._session['process']['postprocess'])
                 self.reset_meta(self._session['process']['postprocess'][proc])
             else:
+                isInMeta = False
                 for elt in list(self._session['process']['postprocess'].keys()):
-                    self.reset_meta(self._session['process']['postprocess'][elt], proc)
+                    if isInMeta:
+                        self.reset_meta(self._session['process']['postprocess'][elt], None)
+                    else:
+                        isInMeta = self.reset_meta(self._session['process']['postprocess'][elt], proc)
         elif type_proc == Workflow.FLOW_PREPROCESS:
             self._session['process']['preprocess'] = self.reload_in_order('db.pre.process', self._session['process']['preprocess'])
             self.reset_meta(self._session['process']['preprocess'])
@@ -147,9 +151,12 @@ class Session(object):
         if proc in metas:
             for metaproc in list(metas[proc].keys()):
                 self.reset_process(metas[proc], metaproc)
+                return True
         else:
             for meta in list(metas.keys()):
                 self.reset_process(metas[meta], proc)
+                return True
+        return False
 
     def reset_process(self, processes, proc=None):
         """


=====================================
biomaj/workflow.py
=====================================
@@ -707,18 +707,18 @@ class UpdateWorkflow(Workflow):
                     params['protocol'] = str(cf.get('irods.protocol')).strip()
                     params['zone'] = str(cf.get('irods.zone')).strip()
 
-            # Protocol options: as for params, a field contains the name
+            # Options: as for params, a field contains the name
             # of the options (options.names) and the values are in another
             # field named options.<option_name>.
-            protocol_options = {}
+            options = {}
             option_names = cf.get('options.names')
             if option_names is not None:
                 option_names = option_names.split(',')
                 for option_name in option_names:
                     option_name = option_name.strip()
                     param = cf.get('options.' + option_name)
-                    protocol_options[option_name] = param.strip()
-                logging.debug("Protocol options: " + str(protocol_options))
+                    options[option_name] = param.strip()
+                logging.debug("Options: " + str(options))
 
             release_downloader = dserv.get_handler(
                 protocol,
@@ -733,7 +733,7 @@ class UpdateWorkflow(Workflow):
                 save_as=save_as,
                 timeout_download=cf.get('timeout.download'),
                 offline_dir=self.session.get_offline_directory(),
-                protocol_options=protocol_options
+                options=options
             )
 
             if protocol in ['directftp', 'directftps', 'directhttp', 'directhttps']:
@@ -753,23 +753,31 @@ class UpdateWorkflow(Workflow):
                 return False
 
             release_downloader.match([cf.get('release.file')], file_list, dir_list)
+
             if len(release_downloader.files_to_download) == 0:
                 logging.error('release.file defined but does not match any file')
                 self._close_download_service(dserv)
                 return False
-            if len(release_downloader.files_to_download) > 1:
-                logging.error('release.file defined but matches multiple files')
-                self._close_download_service(dserv)
-                return False
+
             if cf.get('release.regexp') is None or not cf.get('release.regexp'):
                 # Try to get from regexp in file name
-                rel = re.search(cf.get('release.file'), release_downloader.files_to_download[0]['name'])
+                rel = None
+                # if multiple files match release, get most recent file
+                if len(release_downloader.files_to_download) > 1:
+                    most_recent = Utils.get_more_recent_file(release_downloader.files_to_download)
+                    rel = re.search(cf.get('release.file'), most_recent['file']['name'])
+                else:
+                    rel = re.search(cf.get('release.file'), release_downloader.files_to_download[0]['name'])
                 if rel is None:
-                    logging.error('release.file defined but does not match any file')
+                    logging.error('release.file defined but could not match any file')
                     self._close_download_service(dserv)
                     return False
                 release = rel.group(1)
             else:
+                if len(release_downloader.files_to_download) > 1:
+                    logging.error('release.file defined but matches multiple files')
+                    self._close_download_service(dserv)
+                    return False
                 # Download and extract
                 tmp_dir = tempfile.mkdtemp('biomaj')
                 rel_files = release_downloader.download(tmp_dir)
@@ -879,14 +887,16 @@ class UpdateWorkflow(Workflow):
 
     def _load_local_files_from_session(self, session_id):
         """
-        Load lccal files for sessions from cache directory
+        Load local files for sessions from cache directory
         """
         cache_dir = self.bank.config.get('cache.dir')
         f_local_files = None
         file_path = os.path.join(cache_dir, 'local_files_' + str(session_id))
         if not os.path.exists(file_path):
+            logging.info("Workflow:wf_download:Cache:No cache file found for session %s, skipping" % (session_id))
             return f_local_files
 
+        logging.info("Workflow:wf_download:Cache:using %s" % (file_path))
         with open(file_path) as data_file:
             f_local_files = json.load(data_file)
 
@@ -1159,18 +1169,18 @@ class UpdateWorkflow(Workflow):
                 if cf.get('remote.file.' + str(i) + '.name'):
                     save_as = cf.get('remote.file.' + str(i) + '.name')
 
-                # Protocol options: as for params, a field contains the name
+                # Options: as for params, a field contains the name
                 # of the options (options.names) and the values are in another
                 # field named options.<option_name>.
-                protocol_options = {}
+                options = {}
                 option_names = cf.get('remote.file.' + str(i) + '.options.names')
                 if option_names is not None:
                     option_names = option_names.split(',')
                     for option_name in option_names:
                         option_name = option_name.strip()
                         param = cf.get('remote.file.' + str(i) + '.options.' + option_name)
-                        protocol_options[option_name] = param.strip()
-                    logging.debug("Protocol options: " + str(protocol_options))
+                        options[option_name] = param.strip()
+                    logging.debug("Options: " + str(options))
 
                 subdownloader = dserv.get_handler(
                     protocol,
@@ -1185,7 +1195,7 @@ class UpdateWorkflow(Workflow):
                     save_as=save_as,
                     timeout_download=cf.get('timeout.download'),
                     offline_dir=self.session.get_offline_directory(),
-                    protocol_options=protocol_options
+                    options=options
                 )
                 subdownloader.set_files_to_download(remotes)
 
@@ -1235,18 +1245,18 @@ class UpdateWorkflow(Workflow):
 
             save_as = cf.get('target.name')
 
-            # Protocol options: as for params, a field contains the name
+            # Options: as for params, a field contains the name
             # of the options (options.names) and the values are in another
             # field named options.<option_name>.
-            protocol_options = {}
+            options = {}
             option_names = cf.get('options.names')
             if option_names is not None:
                 option_names = option_names.split(',')
                 for option_name in option_names:
                     option_name = option_name.strip()
                     param = cf.get('options.' + option_name)
-                    protocol_options[option_name] = param.strip()
-                logging.debug("Protocol options: " + str(protocol_options))
+                    options[option_name] = param.strip()
+                logging.debug("Options: " + str(options))
 
             downloader = dserv.get_handler(
                 protocol,
@@ -1261,7 +1271,7 @@ class UpdateWorkflow(Workflow):
                 save_as=save_as,
                 timeout_download=cf.get('timeout.download'),
                 offline_dir=self.session.get_offline_directory(),
-                protocol_options=protocol_options
+                options=options
             )
 
             if protocol in ['directftp', 'directftps', 'directhttp', 'directhttps']:
@@ -1308,17 +1318,17 @@ class UpdateWorkflow(Workflow):
             for f in downloader.files_to_download:
                 if 'save_as' not in f or not f['save_as']:
                     f['save_as'] = f['name']
-                    for p in cf.get('remote.files', default='.*').split():
-                        if p == '.*' or p == '**/*':
-                            continue
-                        if p.startswith('^'):
-                            p = p.replace('^', '^/')
-                        else:
-                            p = '/' + p
-                        res = re.match(p, f['name'])
-                        if res is not None and res.groups() is not None and len(res.groups()) >= 1:
-                            f['save_as'] = '/'.join(res.groups())
-                            break
+                for p in cf.get('remote.files', default='.*').split():
+                    if p == '.*' or p == '**/*':
+                        continue
+                    if p.startswith('^'):
+                        p = p.replace('^', '^/')
+                    else:
+                        p = '/' + p
+                    res = re.match(p, f['name'])
+                    if res is not None and res.groups() is not None and len(res.groups()) >= 1:
+                        f['save_as'] = '/'.join(res.groups())
+                        break
 
             files_to_download += downloader.files_to_download
 
@@ -1458,6 +1468,8 @@ class UpdateWorkflow(Workflow):
             last_production_files = None
             if len(last_production_session['sessions']) > 0:
                 last_production_files = self._load_local_files_from_session(last_production_session['sessions'][0]['id'])
+            else:
+                logging.warn('Workflow:wf_download:no session found for last production id %s' % (last_production['session']))
 
             if not cf.get_bool('copy.skip', default=False):
                 for downloader in downloaders:
@@ -1475,6 +1487,7 @@ class UpdateWorkflow(Workflow):
             logging.debug('Workflow:wf_download:Copy files from ' + last_production_dir)
             for downloader in downloaders:
                 copied_files += downloader.files_to_copy
+                logging.info('Workflow:wf_download:Copying %d files from %s' % (len(downloader.files_to_copy), last_production_dir))
                 Utils.copy_files(
                     downloader.files_to_copy, offline_dir,
                     use_hardlinks=cf.get_bool('use_hardlinks', default=False)
@@ -1520,7 +1533,7 @@ class UpdateWorkflow(Workflow):
                 message.bank = self.name
                 message.session = session
                 message.local_dir = offline_dir
-                message.protocol_options.update(downloader.protocol_options)
+                message.options.update(downloader.options)
                 remote_file = downmessage_pb2.DownloadFile.RemoteFile()
                 protocol = downloader.protocol
                 remote_file.protocol = downmessage_pb2.DownloadFile.Protocol.Value(protocol.upper())


=====================================
docs/global.advanced.properties
=====================================
@@ -45,12 +45,20 @@ auto_publish=1
 #Uncomment thes lines if you want receive mail when the workflow is finished
 
 mail.smtp.host=
-#mail.stmp.host=
+#mail.stmp.port=25
 mail.admin=
 mail.from=biomaj at localhost
 mail.user=
 mail.password=
 mail.tls=
+# tail last X bytes of log in mail body , 0 = no tail
+# mail.body.tail=2000000
+# attach log file if size < X bytes, 0 for no attach
+#mail.body.attach=4000000
+# path to jinja template for subject, leave empty for defaults
+#mail.template.subject=
+# path to jinja template for body, leave empty for default
+#mail.template.body=
 
 #---------------------
 #Proxy authentification


=====================================
global.properties.example
=====================================
@@ -88,6 +88,9 @@ mail.user=
 mail.password=
 mail.tls=true
 
+mail.body.tail=2000000
+mail.body.attach=9000000
+
 #---------------------
 #Proxy authentification
 #---------------------


=====================================
requirements.txt
=====================================
@@ -1,6 +1,6 @@
-biomaj_core>=3.0.19
+biomaj_core>=3.0.24
 biomaj_user
-biomaj_download>=3.1.0
+biomaj_download>=3.2.1
 biomaj_process>=3.0.12
 biomaj_cli
 mock
@@ -16,3 +16,4 @@ requests
 redis
 influxdb
 Yapsy==1.12.2
+Jinja2


=====================================
setup.py
=====================================
@@ -36,7 +36,7 @@ config = {
     'url': 'http://biomaj.genouest.org',
     'download_url': 'http://biomaj.genouest.org',
     'author_email': 'olivier.sallou at irisa.fr',
-    'version': '3.1.14',
+    'version': '3.1.18',
      'classifiers': [
         # How mature is this project? Common values are
         #   3 - Alpha
@@ -53,10 +53,8 @@ config = {
         'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
         # Specify the Python versions you support here. In particular, ensure
         # that you indicate whether you support Python 2, Python 3 or both.
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.4'
+        'Programming Language :: Python :: 3.6'
     ],
     'install_requires': [
                          'biomaj_cli',


=====================================
tests/biomaj_tests.py
=====================================
@@ -244,6 +244,7 @@ class TestBiomajSetup(unittest.TestCase):
     self.assertTrue(res)
     self.assertTrue(b.session._session['release'] is not None)
 
+  @attr('network')
   def test_remove_session(self):
     b = Bank('alu')
     for i in range(1,5):
@@ -774,6 +775,7 @@ class TestBiomajFunctional(unittest.TestCase):
 
 
   @attr('directrelease')
+  @attr('network')
   def test_directhttp_release(self):
       b = Bank('directhttp')
       res = b.update()


=====================================
tests/global.properties
=====================================
@@ -50,8 +50,17 @@ auto_publish=0
 #Uncomment thes lines if you want receive mail when the workflow is finished
 
 mail.smtp.host=
+#mail.smtp.port=
 mail.admin=
 mail.from=
+# tail last X bytes of log in mail body , 0 = no tail
+# mail.body.tail=2000000
+# attach log file if size < X bytes, 0 for no attach
+#mail.body.attach=4000000
+# path to jinja template for subject, leave empty for defaults
+#mail.template.subject=
+# path to jinja template for body, leave empty for default
+#mail.template.body=
 
 #---------------------
 #Proxy authentification


=====================================
tools/examples/body.jinja2
=====================================
@@ -0,0 +1,6 @@
+[BioMAJ]
+Bank: {{ bank }} - Version {{ release }}
+Log file: {{ log_file }}
+
+{{ log_tail }}
+


=====================================
tools/examples/global.properties
=====================================
@@ -76,11 +76,20 @@ influxdb.db=biomaj
 #Uncomment thes lines if you want receive mail when the workflow is finished
 
 mail.smtp.host=localhost
+#mail.smtp.port=25
 mail.admin=
 mail.from=biomaj at localhost
 mail.user=
 mail.password=
 mail.tls=
+# tail last X bytes of log in mail body , 0 = no tail
+# mail.body.tail=2000000
+# attach log file if size < X bytes, 0 for no attach
+#mail.body.attach=4000000
+# path to jinja template for subject, leave empty for defaults
+#mail.template.subject=
+# path to jinja template for body, leave empty for default
+#mail.template.body=
 
 
 #Number of thread during the download


=====================================
tools/examples/subject.jinja2
=====================================
@@ -0,0 +1 @@
+[BioMAJ]{% if modified %} Bank {{ bank }} - Version {{ release }}{% if status %}{% if update %}updated {% endif %}{% if remove %} removed {% endif %}successfully{% else %}{% if update %}Update{% endif %}{% if remove %}Remove{% endif %}failed for bank {{ bank }} version {{ release }}{% endif %}{% else %} {{bank }} does not need update{% endif %}



View it on GitLab: https://salsa.debian.org/med-team/biomaj3/-/commit/3710e5bcd0a10b3f771253cbed3690a6cbcade75

-- 
View it on GitLab: https://salsa.debian.org/med-team/biomaj3/-/commit/3710e5bcd0a10b3f771253cbed3690a6cbcade75
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20210117/46c678d1/attachment-0001.html>


More information about the debian-med-commit mailing list