[Blends-commit] [SCM] website branch, master, updated. 40975993322361968bedc028899ba85a1bb5980c

Ole Streicher ole at aip.de
Wed Mar 9 11:06:49 UTC 2016


The following commit has been merged in the master branch:
commit 40975993322361968bedc028899ba85a1bb5980c
Author: Ole Streicher <ole at aip.de>
Date:   Wed Mar 9 12:05:04 2016 +0100

    Merge much between webtools and webtools_py3
    Mainly the UDD stuff is left differently.

diff --git a/webtools/blendslanguages.py b/webtools/blendslanguages.py
index a0ea90a..6d33728 100644
--- a/webtools/blendslanguages.py
+++ b/webtools/blendslanguages.py
@@ -1,47 +1,69 @@
-#!/usr/bin/python
+'''Define languages which are supported in web sentinel pages'''
 # Copyright 2010: Andreas Tille <tille at debian.org>
 # License: GPL
 
-# Define languages which are supported in web sentinel pages
-
 from genshi import Markup
 
-# The keys of the following dictionary are used to address the translation files of DDTP
+# The keys of the following dictionary are used to
+# address the translation files of DDTP
 # The values contain dictionaries in turn with the following meaning:
 #  'short'    = shortcut of language as extension of the output files
 #  'htaccess' = language definition in htaccess
 #  'title'    = English name of the language used in title attribut of link
 #  'printed'  = printed name for links
 language_dict = {
-    'en'    : {'short':'en', 'htaccess':'en'   , 'title':'English',         'printed':Markup('English')},
-    'cs'    : {'short':'cs', 'htaccess':'cs'   , 'title':'Czech',           'printed':Markup('česky')},
-    'da'    : {'short':'da', 'htaccess':'da'   , 'title':'Danish' ,         'printed':Markup('dansk')},
-    'de'    : {'short':'de', 'htaccess':'de'   , 'title':'German',          'printed':Markup('Deutsch')},
-    'es'    : {'short':'es', 'htaccess':'es'   , 'title':'Spanish',         'printed':Markup('español')},
-    'fi'    : {'short':'fi', 'htaccess':'fi'   , 'title':'Finnish',         'printed':Markup('suomi')},
-    'fr'    : {'short':'fr', 'htaccess':'fr'   , 'title':'French',          'printed':Markup('français')},
-    'hu'    : {'short':'hu', 'htaccess':'hu'   , 'title':'Hungarian',       'printed':Markup('magyar')},
-    'it'    : {'short':'it', 'htaccess':'it'   , 'title':'Italian',         'printed':Markup('Italiano')},
-    'ja'    : {'short':'ja', 'htaccess':'ja'   , 'title':'Japanese',        'printed':Markup('日本語 (Nihongo)')},
-    'ko'    : {'short':'ko', 'htaccess':'ko'   , 'title':'Korean',          'printed':Markup('한국어 (Hangul)')},
-    'nl'    : {'short':'nl', 'htaccess':'nl'   , 'title':'Dutch',           'printed':Markup('Nederlands')},
-    'pl'    : {'short':'pl', 'htaccess':'pl'   , 'title':'Polish',          'printed':Markup('polski')},
-    'pt_BR' : {'short':'pt', 'htaccess':'pt-BR', 'title':'Portuguese',      'printed':Markup('Português')},
-    'ru'    : {'short':'ru', 'htaccess':'ru'   , 'title':'Russian',         'printed':Markup('Русский (Russkij)')},
-    'uk'    : {'short':'uk', 'htaccess':'uk'   , 'title':'Ukrainian',       'printed':Markup("українська (ukrajins'ka)")},
-    'sk'    : {'short':'sk', 'htaccess':'sk'   , 'title':'Slovenian',       'printed':Markup('slovensky')},
-    'sr'    : {'short':'sr', 'htaccess':'sr'   , 'title':'Serbian',         'printed':Markup('српски(srpski)')},
-    'sv'    : {'short':'sv', 'htaccess':'sv'   , 'title':'Swedish',         'printed':Markup('svenska')},
-    'vi'    : {'short':'vi', 'htaccess':'vi'   , 'title': 'Vietnamese',     'printed':Markup('Tiếng Việt')},
-    'zh_CN' : {'short':'zh_cn', 'htaccess':'zh-CN', 'title':'Chinese (China)',  'printed':Markup('中文(简)')},
-    'zh_TW' : {'short':'zh_tw', 'htaccess':'zh-TW', 'title':'Chinese (Taiwan)', 'printed':Markup('中文(正)')},
+    'en': {'short': 'en', 'htaccess': 'en', 'title': 'English',
+           'printed': Markup('English')},
+    'cs': {'short': 'cs', 'htaccess': 'cs', 'title': 'Czech',
+           'printed': Markup('česky')},
+    'da': {'short': 'da', 'htaccess': 'da', 'title': 'Danish',
+           'printed': Markup('dansk')},
+    'de': {'short': 'de', 'htaccess': 'de', 'title': 'German',
+           'printed': Markup('Deutsch')},
+    'es': {'short': 'es', 'htaccess': 'es', 'title': 'Spanish',
+           'printed': Markup('español')},
+    'fi': {'short': 'fi', 'htaccess': 'fi', 'title': 'Finnish',
+           'printed': Markup('suomi')},
+    'fr': {'short': 'fr', 'htaccess': 'fr', 'title': 'French',
+           'printed': Markup('français')},
+    'hu': {'short': 'hu', 'htaccess': 'hu', 'title': 'Hungarian',
+           'printed': Markup('magyar')},
+    'it': {'short': 'it', 'htaccess': 'it', 'title': 'Italian',
+           'printed': Markup('Italiano')},
+    'ja': {'short': 'ja', 'htaccess': 'ja', 'title': 'Japanese',
+           'printed': Markup('日本語 (Nihongo)')},
+    'ko': {'short': 'ko', 'htaccess': 'ko', 'title': 'Korean',
+           'printed': Markup('한국어 (Hangul)')},
+    'nl': {'short': 'nl', 'htaccess': 'nl', 'title': 'Dutch',
+           'printed': Markup('Nederlands')},
+    'pl': {'short': 'pl', 'htaccess': 'pl', 'title': 'Polish',
+           'printed': Markup('polski')},
+    'pt_BR': {'short': 'pt', 'htaccess': 'pt-BR', 'title': 'Portuguese',
+              'printed': Markup('Português')},
+    'ru': {'short': 'ru', 'htaccess': 'ru', 'title': 'Russian',
+           'printed': Markup('Русск\
+           ий (Russkij)')},
+    'uk': {'short': 'uk', 'htaccess': 'uk', 'title': 'Ukrainian',
+           'printed': Markup("украї\
+           нська\
+            (ukrajins'ka)")},
+    'sk': {'short': 'sk', 'htaccess': 'sk', 'title': 'Slovenian',
+           'printed': Markup('slovensky')},
+    'sr': {'short': 'sr', 'htaccess': 'sr', 'title': 'Serbian',
+           'printed': Markup('српск\
+           и(srpski)')},
+    'sv': {'short': 'sv', 'htaccess': 'sv', 'title': 'Swedish',
+           'printed': Markup('svenska')},
+    'vi': {'short': 'vi', 'htaccess': 'vi', 'title': 'Vietnamese',
+           'printed': Markup('Tiếng Việt')},
+    'zh_CN': {'short': 'zh_cn', 'htaccess': 'zh-CN',
+              'title': 'Chinese (China)',
+              'printed': Markup('中文(简)')},
+    'zh_TW': {'short': 'zh_tw', 'htaccess': 'zh-TW',
+              'title': 'Chinese (Taiwan)',
+              'printed': Markup('中文(正)')},
 }
+
 # global languages
-_languages = language_dict.keys()
-_languages.sort()
 # make sure 'en' comes first because the first language determines the default
-languages = ['en']
-for l in _languages:
-    if l == 'en':
-        continue
-    languages.append(l)
+languages = ['en'] + sorted(l for l in language_dict.keys() if l != 'en')
diff --git a/webtools/blendsmarkdown.py b/webtools/blendsmarkdown.py
index 2023dfc..d3c5b7c 100644
--- a/webtools/blendsmarkdown.py
+++ b/webtools/blendsmarkdown.py
@@ -1,58 +1,57 @@
-#!/usr/bin/python
+'''To enable better formatting of long descriptions we use Markdown
+and some preprocessing of the RFC822 formatted descriptions'''
 # Copyright 2008: Andreas Tille <tille at debian.org>
 # License: GPL
 
-# To enable better formatting of long descriptions we use Markdown
-# and some preprocessing of the RFC822 formatted descriptions
-
 import re
-
-from sys import stderr, exit
+from sys import stderr
 from markdown import markdown            # Markdown
 from docutils.core import publish_parts  # alternatively reST
 from genshi import Markup
 
 detect_list_start_re = re.compile("^\s+[-*+]\s+")
 detect_code_start_re = re.compile("^\s")
-detect_code_end_re   = re.compile("^[^\s]")
-detect_url_re        = re.compile("[fh]t?tp://")
+detect_code_end_re = re.compile("^[^\s]")
+detect_url_re = re.compile("[fh]t?tp://")
 
 
 def PrepareMarkdownInput(lines):
-    ret    = ''
+    ret = ''
     inlist = 0
     incode = 0
     for line in lines:
         # strip leading space from description as well as useless trailing
         line = re.sub('^ ', '', line.rstrip())
 
-        # a '^\.$' marks in descriptions a new paragraph, markdown
-        # uses an empty line here
+        # a '^\.$' marks in descriptions a new paragraph,
+        # markdown uses an empty line here
         line = re.sub('^\.$', '', line)
 
         # In long descriptions 'o' and '.' are quite frequently used
-        # as bullet in lists which is not recognised by markdown.  So
-        # just turn '[.o]' into '*' to let markdown do its work
-        # successfully.
+        # as bullet in lists which is not recognised by markdown.
+        # So just turn '[.o]' into '*' to let markdown do
+        # its work successfully.
         line = re.sub('^(\s*)[.o]\s+', '\\1* ', line)
 
-        # To enable Markdown debugging and verbose output in remarks a
-        # 'o'/'.' is inserted as '\o'/'\.' in remarks - the original
-        # is restored here:
+        # To enable Markdown debugging and verbose output in remarks
+        # a 'o'/'.' is inserted as '\o'/'\.' in remarks
+        # - the original is restored here:
         line = re.sub('^(\s*)\\\\([.o]\s+)', '\\1\\2', line)
 
         if detect_code_start_re.search(line):
-            if incode == 0:  # If a list or verbatim mode starts MarkDown needs an empty line
+            # If a list or verbatim mode starts MarkDown needs an empty line
+            if incode == 0:
                 ret += "\n"
                 incode = 1
                 if detect_list_start_re.search(line):
                     inlist = 1
         if incode == 1 and inlist == 0:
-            ret += "\t"  # Add a leading tab if in verbatim but not in list mode
-        # If there is an empty line or a not indented line the list or
-        # verbatim text ends It is important to check for empty lines
-        # because some descriptions would insert more lines than
-        # needed in verbose mode (see for instance glam2)
+            # Add a leading tab if in verbatim but not in list mode
+            ret += "\t"
+        # If there is an empty line or a not indented line the list
+        # or verbatim text ends. It is important to check for empty lines
+        # because some descriptions would insert more lines than needed
+        # in verbose mode (see for instance glam2)
         if (detect_code_end_re.search(line) or line == '') and incode == 1:
             inlist = 0  # list ends if indentation stops
             incode = 0  # verbatim mode ends if indentation stops
@@ -64,21 +63,22 @@ def PrepareMarkdownInput(lines):
         if detect_url_re.search(line):
             # some descriptions put URLs in '<>' which is unneeded and might
             # confuse the parsing of '&' in URLs which is needed sometimes
-            line = re.sub('<*([fh]t?tp://[-./\w?=~;&%]+)>*', '[\\1](\\1)', line)
+            line = re.sub('<*([fh]t?tp://[-./\w?=~;&%]+)>*',
+                          '[\\1](\\1)', line)
         ret += line + "\n"
     return ret
 
 
 def render_longdesc(lines):
-    MarkDownInput  = PrepareMarkdownInput(lines)
+    MarkDownInput = PrepareMarkdownInput(lines)
 
     global rendering_lib
     if rendering_lib == 'rest':
         try:
             LongDesc = publish_parts(MarkDownInput, writer_name='html')['body']
         except:
-            stderr.write("Unable to render the following prepared text:\n%s\n" %
-                         MarkDownInput)
+            stderr.write("Unable to render the following prepared text:\n%s\n"
+                         % MarkDownInput)
             LongDesc = "Problems in rendering description using reST"
     else:  # by default use Markdown
         LongDesc = markdown(MarkDownInput)
@@ -90,31 +90,36 @@ def SplitDescription(description):
 
     lines = description.splitlines()
 
-    ShortDesc = lines[0].replace("&", "&").replace("<", "<").replace(">", ">")
-    LongDesc  = render_longdesc(lines[1:])
+    ShortDesc = lines[0].replace("&", "&") \
+                        .replace("<", "<") \
+                        .replace(">", ">")
+    LongDesc = render_longdesc(lines[1:])
 
     return (ShortDesc, LongDesc)
 
 
 def MarkupString(string, pkg, elem, lang='en'):
     # Genshi does not touch strings that are marked with "Markup()"
-    # This function does the actual Markup call for any string with error checking
+    # This function does the actual Markup call
+    # for any string with error checking
 
     if string is None:
         return None
     try:
         string = Markup(string)
     except UnicodeDecodeError as err:
-        stderr.write("----> %s UnicodeDecodeError in %s (lang='%s'): '%s'; Err: %s\n"
+        stderr.write("----> %s UnicodeDecodeError in %s (lang='%s'):" +
+                     " '%s'; Err: %s\n"
                      % (elem, pkg, lang, 'debug-string', str(err)))
         try:
             string = Markup(unicode(string, 'utf-8'))
         except TypeError as err:
-            stderr.write("====> %s TypeError in %s (lang='%s'): '%s'; Err: %s\n"
+            stderr.write("====> %s TypeError in %s (lang='%s'):" +
+                         " '%s'; Err: %s\n"
                          % (elem, pkg, lang, 'debug-string', str(err)))
     except TypeError as err:
         stderr.write("----> %s TypeError in %s (lang='%s'): '%s'; Err: %s\n"
                      % (elem, pkg, lang, 'debug-string', str(err)))
     return string
 
-rendering_lib  = ''
+rendering_lib = ''
diff --git a/webtools/blendstasktools.py b/webtools/blendstasktools.py
index 30c2283..4e2c2cf 100644
--- a/webtools/blendstasktools.py
+++ b/webtools/blendstasktools.py
@@ -1,17 +1,16 @@
-#!/usr/bin/python
-# Copyright 2008-2012: Andreas Tille <tille at debian.org>
-# License: GPL
-
-# Blends metapackages are listing a set of Dependencies
-# These might be fullfilled by the Debian package
-# set or not.
-#
-# This interface provides some classes that contains
-# all available information about this Dependency ,
-# like whether it is an official package or not,
-# in which distribution it is contained
-# or if it is not contained it obtains information
-# from tasks file about home page, license, WNPP etc.
+''' Blends metapackages are listing a set of Dependencies
+These might be fullfilled by the Debian package
+set or not.
+
+This interface provides some classes that contains
+all available information about this Dependency ,
+like whether it is an official package or not,
+in which distribution it is contained
+or if it is not contained it obtains information
+from tasks file about home page, license, WNPP etc.
+Copyright 2008-2012: Andreas Tille <tille at debian.org>
+License: GPL
+'''
 
 from sys import stderr, exit
 from subprocess import Popen, PIPE
@@ -22,13 +21,13 @@ import stat
 import gzip
 import bz2
 import re
-import email.Utils
+import email.utils
 
 import psycopg2
 import gettext
 
 from genshi import Markup
-# ccording to http://genshi.edgewall.org/wiki/GenshiFaq#HowcanIincludeliteralXMLintemplateoutput
+# According to http://genshi.edgewall.org/wiki/GenshiFaq#HowcanIincludeliteralXMLintemplateoutput
 # there are different options to prevent escaping '<' / '>' but HTML does not work ...
 # from genshi.input import HTML
 from blendsmarkdown import SplitDescription, MarkupString, render_longdesc
@@ -95,7 +94,7 @@ pkgstatus = {
         'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package and has high relevance</a>',
         'order'        : 1
     },
-    'official_low' : {  # official package with low priority dependency
+    'official_low': {  # official package with low priority dependency
         'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
         'components'   : ('main', ),
         'dependencies' : ('Suggests', ),
@@ -103,7 +102,7 @@ pkgstatus = {
         'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package but has lower relevance</a>',
         'order'        : 2
     },
-    'non-free'     : {  # package in contrib or non-free, priority decreased to Suggests in any case
+    'non-free': {  # package in contrib or non-free, priority decreased to Suggests in any case
         'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
         'component'    : ('contrib', 'non-free'),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -111,7 +110,7 @@ pkgstatus = {
         'colorcode'    : 'Green: The project is <a href="#%s">available in Debian packaging pool but is not in Debian main</a>',
         'order'        : 3
     },
-    'experimental' : {  # package which exists only in experimental
+    'experimental': {  # package which exists only in experimental
         'releases'     : ('experimental', ),
         'component'    : ('main', 'contrib', 'non-free'),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -119,7 +118,7 @@ pkgstatus = {
         'colorcode'    : 'Yellow: The project is <a href="#%s">available in Debian packaging pool but is regarded as experimental</a>',
         'order'        : 4
     },
-    'new'          : {  # package in new queue
+    'new': {  # package in new queue
         'releases'     : ('new', ),
         'component'    : ('main', 'contrib', 'non-free'),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -127,7 +126,7 @@ pkgstatus = {
         'colorcode'    : 'Yellow: A package of project is <a href="#%s">is in Debian New queue and hopefully available soon</a>',
         'order'        : 5
     },
-    'pkgvcs'       : {  # Not yet packaged but packaging code in Vcs
+    'pkgvcs': {  # Not yet packaged but packaging code in Vcs
         'releases'     : (),
         'component'    : (),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -135,7 +134,7 @@ pkgstatus = {
         'colorcode'    : 'Yellow: The packaging of project is <a href="#%s">has started and a developer might try the packaging code in VCS or help packaging.</a>',
         'order'        : 6
     },
-    'unofficial'   : {  # unofficial packages outside Debian
+    'unofficial': {  # unofficial packages outside Debian
         'releases'     : (),
         'component'    : (),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -143,7 +142,7 @@ pkgstatus = {
         'colorcode'    : 'Yellow: There exists an <a href="#%s">unofficial package</a> of the project',
         'order'        : 7
     },
-    'wnpp'         : {  # project which has at least a WNPP bug filed
+    'wnpp': {  # project which has at least a WNPP bug filed
         'releases'     : (),
         'component'    : (),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -151,7 +150,7 @@ pkgstatus = {
         'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a> but there is some record of interest (WNPP bug).',
         'order'        : 8
     },
-    'prospective'  : {  # projects which might be interesting for a Blend but no work is done yet
+    'prospective': {  # projects which might be interesting for a Blend but no work is done yet
         'releases'     : (),
         'component'    : (),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -159,7 +158,7 @@ pkgstatus = {
         'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a>.',
         'order'        : 9
     },
-    'ignore'       : {  # Package inside Debian which is "under observation"
+    'ignore': {  # Package inside Debian which is "under observation"
         'releases'     : (releases.keys()),
         'component'    : ('main', 'contrib', 'non-free'),
         'dependencies' : ('Ignore', ),
@@ -167,7 +166,7 @@ pkgstatus = {
         'colorcode'    : '%s',
         'order'        : 10
     },
-    'avoid'        : {  # Package inside Debian which should not go to a install medium of the Blend
+    'avoid': {  # Package inside Debian which should not go to a install medium of the Blend
         'releases'     : (releases.keys()),
         'component'    : ('main', 'contrib', 'non-free'),
         'dependencies' : ('Avoid', ),
@@ -175,7 +174,7 @@ pkgstatus = {
         'colorcode'    : '%s',
         'order'        : 11
     },
-    'unknown'      : {  # Everything else
+    'unknown': {  # Everything else
         'releases'     : (),
         'component'    : (),
         'dependencies' : ('Depends', 'Recommends', 'Suggests'),
@@ -262,7 +261,7 @@ def UnlockBlendsTools():
 
 
 def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
-    #  Create a list of status of dependencies out of pkgstatus dictionary
+    # Create a list of status of dependencies out of pkgstatus dictionary
     use_dependencystatus = []
     if dependencystatus == []:
         for pkgstat in pkgstatus_sortedkeys:
@@ -272,9 +271,9 @@ def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
                 continue
             use_dependencystatus.append(pkgstat)
     else:
-        #  verify correctly given dependencies
+        # verify correctly given dependencies
         for pkgstat in dependencystatus:
-            if pkgstat in pkgstatus.keys():
+            if pkgstat in pkgstatus:
                 use_dependencystatus.append(pkgstat)
             else:
                 logger.error("Unknown dependencystatus %s" % pkgstat)
@@ -404,7 +403,7 @@ query = """PREPARE query_new (text[]) AS SELECT
     LEFT OUTER JOIN bibref bibeprint  ON p.source = bibeprint.source  AND bibeprint.rank = 0  AND bibeprint.key  = 'eprint'  AND bibeprint.package = ''
                    WHERE (p.package, p.version) IN
                          (SELECT package, max(version) FROM
-                   new_packages WHERE package = ANY ($1) GROUP BY package)"""
+                   new_packages WHERE package = ANY ($1) GROUP BY package) ORDER BY p.package"""
 _execute_udd_query(query)
 
 query = """PREPARE query_vcs (text[]) AS SELECT
@@ -442,7 +441,7 @@ query = """PREPARE query_vcs (text[]) AS SELECT
     LEFT OUTER JOIN bibref bibnumber  ON p.source = bibnumber.source  AND bibnumber.rank = 0  AND bibnumber.key  = 'number'  AND bibnumber.package = ''
     LEFT OUTER JOIN bibref bibpages   ON p.source = bibpages.source   AND bibpages.rank = 0   AND bibpages.key   = 'pages'   AND bibpages.package = ''
     LEFT OUTER JOIN bibref bibeprint  ON p.source = bibeprint.source  AND bibeprint.rank = 0  AND bibeprint.key  = 'eprint'  AND bibeprint.package = ''
-                   WHERE p.package = ANY ($1)"""
+                   WHERE p.package = ANY ($1) ORDER BY p.package"""
 _execute_udd_query(query)
 
 # This prepared statement is called only once but it makes sense to mention it in the
@@ -654,8 +653,7 @@ def FetchTasksFiles(data):
 
 
 def RowDictionaries(cursor):
-    """Return a list of dictionaries which specify the values by their
-    column names"""
+    """Return a list of dictionaries which specify the values by their column names"""
 
     if not cursor.description:
         # even if there are no data sets to return the description should contain the table structure.  If not something went
@@ -788,11 +786,10 @@ class DependantPackage:
             ret += ", versions: "     + str(self.version)
         if self.desc:
             ret += ", desc: "         + str(self.desc)
-        for prop in self.properties.keys():
+        for prop in self.properties:
             ret += ", %s: %s" % (prop, str(self.properties[prop]))
         try:
-            ret += ", popcon = %i (%i)" % (self.popcon['vote'],
-                                           self.popcon['recent'])
+            ret += ", popcon = %i (%i)" % (self.popcon['vote'], self.popcon['recent'])
         except:
             pass
         ret += ", debtags = " + str(self.debtags)
@@ -873,8 +870,7 @@ class Tasks:
 
         os.system("mkdir -p logs")
         LOG_FILENAME = os.path.join('logs', blendname + '.log')
-        handler = logging.handlers.RotatingFileHandler(filename=LOG_FILENAME,
-                                                       mode='w')
+        handler = logging.handlers.RotatingFileHandler(filename=LOG_FILENAME, mode='w')
         formatter = logging.Formatter("%(levelname)s - %(filename)s (%(lineno)d): %(message)s")
         handler.setFormatter(formatter)
         logger.addHandler(handler)
@@ -931,6 +927,8 @@ class Tasks:
         # one hand is a sign that this Blend has uploaded metapackages at all and on the
         # other hand gives a clue about whether it makes sense to query for description
         # translations
+        # to verify whether a blend has uploaded metapackage or not we can check the boolean
+        # column "metapackage" in blends_tasks table
         query = "SELECT COUNT(*) FROM descriptions WHERE package = ANY ('%s')" % List2PgArray(metapackages)
         _execute_udd_query(query)
         if curs.rowcount > 0:
@@ -951,9 +949,10 @@ class Tasks:
             if pkgname in metapkg_translations:
                 translations = metapkg_translations[pkgname]
             td.SetMetapackageInfo(pkgname, translations)
+            logger.debug("Task : %s " % task)
             if td.GetTaskDependencies(source):
                 self.tasks[task] = td
-            else:  # Kick file that is obviosely no task file from metapackage list
+            else: # Kick file that is obviously no task file from metapackage list
                 self.metapackagekeys = [name for name in self.metapackagekeys if name != task]
 
         if source == 0:
@@ -1003,47 +1002,6 @@ class Tasks:
                 ret[task] = list
         return ret
 
-    def GetNamesAndSourceDict(self, dependencystatus=()):
-        # For the bugs pages we need not only the binary package names but the
-        # source package as well to be able to link to the QA page
-        # The lists are tripels with first value package name and the second source name
-        # The last value characterises the strength of dependency: Possible values
-        # are 'suggested' for Suggested packages, and
-        ret = {}
-        if dependencystatus == ():
-            # see above in GetNamesOnlyDict() ... but when we are looking for bugs a
-            # reasonable default is to use only official dependencystatus
-            dependencystatus = ('official_high', 'official_low', 'non-free',
-                                'experimental')
-
-        for task in self.metapackagekeys:
-            tdeps = self.tasks[task]
-            list = []
-            for dep in dependencystatus:
-                if dep != 'official_high':
-                    bugrelevantdependency = SUGGESTED
-                else:
-                    bugrelevantdependency = DEPENDENT
-                for tdep in tdeps.dependencies[dep]:
-                    bugreldep = bugrelevantdependency
-                    # packages outside main can not be Depends but only Suggests
-                    if bugreldep == DEPENDENT and tdep.component != 'main':
-                        bugreldep = SUGGESTED
-                    list.append({
-                        'pkgname'              : tdep.pkg,
-                        'source'               : tdep.properties['source'],
-                        'homepage'             : tdep.properties['homepage'],
-                        'vcs-browser'          : tdep.properties['vcs-browser'],
-                        'maintainer'           : tdep.responsible,
-                        'bugrelevantdependency': bugreldep
-                    })
-            ret[task] = list
-        return ret
-
-    def GetTaskDescDict(self):
-        # Return dictionary with description information of all tasks of a Blend
-        return self.tasks
-
     def GetAllDependentPackagesOfBlend(self, dependencystatus=[]):
         # David Paleino needs for his DDTP web tool a list of
         # all available Dependencies.
@@ -1068,6 +1026,10 @@ class Tasks:
         self.alldeps_in_main.sort()
         return self.alldeps_in_main
 
+    def GetTaskDescDict(self):
+        # Return dictionary with description information of all tasks of a Blend
+        return self.tasks
+
     def MarkupPreformatedStringsBlend(self):
         # Genshi does not touch strings that are marked with "Markup()" - so just
         # mark the strings that are ready formatted for the whole Blend
@@ -1082,14 +1044,14 @@ class Tasks:
         # otherwise
         for task in self.metapackagekeys:
             tdeps = self.tasks[task]
-            for dependency in tdeps.dependencies.keys():
+            for dependency in tdeps.dependencies:
                 for dep in tdeps.dependencies[dependency]:
                     if dep.properties['Enhances'] != {}:
                         logger.debug("Package %s is enhanced by:" % dep.pkg)
-                        for enh in dep.properties['Enhances'].keys():
+                        for enh in dep.properties['Enhances']:
                             # seek for Enhances on same page
                             found = 0
-                            for seek_dependency in tdeps.dependencies.keys():
+                            for seek_dependency in tdeps.dependencies:
                                 for enhdep in tdeps.dependencies[seek_dependency]:
                                     if enh == enhdep.pkg:
                                         dep.properties['Enhances'][enh] = '#' + enh
@@ -1100,7 +1062,7 @@ class Tasks:
                                     if enhtask == task:
                                         continue
                                     enhtdeps = self.tasks[enhtask]
-                                    for seek_dependency in enhtdeps.dependencies.keys():
+                                    for seek_dependency in enhtdeps.dependencies:
                                         for enhdep in enhtdeps.dependencies[seek_dependency]:
                                             if enh == enhdep.pkg:
                                                 dep.properties['Enhances'][enh] = './' + enhtask + '#' + enh
@@ -1121,31 +1083,7 @@ class Tasks:
                     deps += d
             self._packageByName = dict((dep.pkg, dep) for dep in deps)
             return self._packageByName
-
-    def __str__(self):
-        ret = "Blendname: "       + self.blendname  + ", " \
-              "Metapackagekeys: " + str(self.metapackagekeys) + ", "
-        tab = "\nTasks: "
-        for task in self.metapackagekeys:
-            ret += tab
-            semikolon = ''
-            for pstatus in self.tasks[task].dependencies.keys():
-                if self.tasks[task].dependencies[pstatus] == []:
-                    continue
-                ret += semikolon + pstatus + ': ['
-                semikolon = '; '
-                komma = ''
-                for dep in self.tasks[task].dependencies[pstatus]:
-                    ret += komma + dep.pkg
-                    komma = ', '
-                ret += ']'
-            tab  = "\n       "
-        ret += "\nAll deps in main:" + str(self.alldeps_in_main)
-        ret += ",\nAll deps in main Info:" + str(self.alldeps_in_main_info)
-
-        return ret
-
-
+   
 class TaskDependencies:
     # List of depencencies defined in one metapackage
     def __init__(self, blendname, task, tasksdir=None):
@@ -1205,6 +1143,7 @@ class TaskDependencies:
         if dep is None:
             return
         if dep.dep_strength == 'Ignore' or dep.dep_strength == 'Avoid':
+            logger.debug("Ignore/Avoid package : %s" % dep.pkg)
             return
         if source != 1:
             # In general we can just add the dependency to the list
@@ -1595,24 +1534,30 @@ class TaskDependencies:
                     if dep.properties['vcs-browser'] == HOMEPAGENONE:
                         dep.properties['vcs-browser'] = BrowserFromVcsURL(dep.properties['vcs-type'], dep.properties['vcs-url'])
 
-                if row['enhanced']:
+                if row.get('enhanced'):
                     for pkg in row['enhanced']:
                         dep.properties['Enhances'][pkg] = PKGURLMASK % pkg
                         enhancing_pkgs.append(pkg)
 
-                for i in range(len(row['releases'])):
-                    dep.version.append({
-                        'release': row['releases'][i],
-                        'version': row['versions'][i],
-                        'archs': row['architectures'][i]
-                    })
+                if 'releases' in row:
+                    for rel, ver, arch in sorted(zip(row['releases'], row['versions'],
+                                                     row['architectures']),
+                                                 key=lambda x: release_order[x[0]]):
+                        dep.version.append({
+                            'release': rel,
+                            'version': ver,
+                            'archs': arch
+                        })
 
-                dep.popcon['vote']   = row['vote']
-                dep.popcon['recent'] = row['recent']
-                dep.popcon['insts'] = row['insts']
+                if 'vote' in row:
+                    dep.popcon['vote']   = row['vote']
+                if 'recent' in row:   
+                    dep.popcon['recent'] = row['recent']
+                if 'insts' in row:
+                    dep.popcon['insts'] = row['insts']
 
                 # Debtags as sorted list of dict fields
-                if row['debtags']:
+                if row.get('debtags'):
                     if dep.debtags:  # there is no reasonable way that debtags was set before - so something is wrong here and a warning should be issued
                         logger.warning("Debtags for package '%s' was just set.  A duplicated result from database query is suspected.  Please check the result!" % dep.pkg)
                     tagdict = {}
@@ -1624,13 +1569,13 @@ class TaskDependencies:
                         else:
                             tagdict[tag]  = value
                             taglist.append(tag)
-                        if taglist:
-                            taglist.sort()
+                    if taglist:
+                        taglist.sort()
                     for tag in taglist:
                         dep.debtags.append({'tag': tag, 'value': tagdict[tag]})
 
                 # screenshots
-                if row['icon']:
+                if row.get('icon'):
                     dep.icon           = row['icon'][0]
                     dep.image          = row['image'][0]
                     dep.screenshot_url = 'http://screenshots.debian.net/package/' + dep.pkg
@@ -1641,19 +1586,15 @@ class TaskDependencies:
                         })
 
                 # it might be that the new upstream goes to experimental - this should be ignored here
-                if row['unstable_parsed_version']:
+                if row.get('unstable_parsed_version'):
                     dep.outdated['release']       = 'upstream'
                     dep.outdated['version']       = row['unstable_upstream']
                     dep.outdated['architectures'] = ''
 
                 if row['changed_by']:
-                    try:
-                        changed = row['changed_by']
-                    except TypeError as err:
-                        changed = None
-                        logger.warning("Encoding problem for last uploader of package '%s' in task %s (%s)" % (dep.pkg, dep.taskname, err))
+                    changed = row['changed_by']
                     if changed:
-                        (_name, _url) = email.Utils.parseaddr(changed)
+                        (_name, _url) = email.utils.parseaddr(changed)
                         changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
                         dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')
                         dep.properties['last_uploader'] = changed
@@ -1662,9 +1603,9 @@ class TaskDependencies:
                 # link to packages.debian.org search page to see overview about all
                 # package versions in all releases
                 dep.properties['pkg-url'] = PKGURLMASK % dep.pkg
-                dep.SetPublications(row)
+
                 for l in languages:
-                    if row['description_' + l]:
+                    if row.get('description_' + l):
                         dep.desc[l] = {}
                         dep.desc[l]['short'] = MarkupString(row['description_' + l],
                                                             dep.pkg, 'ShortDesc')
@@ -1673,13 +1614,17 @@ class TaskDependencies:
                 if 'short' not in dep.desc['en']:
                     logger.error("Dep has no English short description: %s", dep.pkg)
                     dep.desc['en']['short'] = "??? missing short description for package %s :-(" % dep.pkg
+
                 (_name, _url) = email.Utils.parseaddr(row['maintainer'])
                 dep.properties['maintainer'] = row['maintainer']
                 dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, _name)
 
-                if row['edam_topics']:
+                if row.get('edam_topics'):
                     logger.info("Edam topics found for package %s: %s" % (dep.pkg, str(row['edam_topics'])))
 
+                # Publications
+                dep.SetPublications(row)
+
                 pkgs_in_pool.append(dep.pkg)
                 # DEBUG
                 # print dep
@@ -1956,15 +1901,15 @@ class TaskDependencies:
                 else:
                     logger.warning("Dependency with unknown status: %s (Task %s)" % (dep.pkg, dep.taskname))
 
-        for dependency in self.dependencies.keys():
-            self.dependencies[dependency].sort()
+        for dependency in self.dependencies.values():
+            dependency.sort()
 
     def MarkupPreformatedStrings(self):
         # Genshi does not touch strings that are marked with "Markup()" - so just
         # mark the strings that are ready formatted
 
-        for dependency in self.dependencies.keys():
-            for dep in self.dependencies[dependency]:
+        for dependency in self.dependencies.values():
+            for dep in dependency:
                 dep.responsible = MarkupString(dep.responsible, dep.pkg, 'responsible')
                 if dep.desc['en'] != {}:
                     dep.desc['en']['short'] = MarkupString(dep.desc['en']['short'], dep.pkg, 'pkgShortDesc')
diff --git a/webtools/tasks.py b/webtools/tasks.py
index c0eb244..c9c8d5c 100755
--- a/webtools/tasks.py
+++ b/webtools/tasks.py
@@ -13,7 +13,7 @@ import gettext
 
 import time
 from datetime import datetime
-from email.Utils import formatdate
+from email.utils import formatdate
 
 from genshi.template import TemplateLoader
 from genshi import Markup
@@ -23,9 +23,9 @@ from blendstasktools import Tasks, GetDependencies2Use, pkgstatus, pkgstatus_sor
 from blendslanguages import languages, language_dict
 
 if len(argv) <= 1:
-    stderr.write("Usage: %s <Blend name>\n" +
-                 "       The <Blend name> needs a matching config file webconf/<Blend name>.conf\n"
-                 % argv[0])
+    stderr.write('Usage: {0} <Blend name>\n'.format(argv[0])
+                 + 'The <Blend name> needs a matching config file '
+                 + 'webconf/<Blend name>.conf')
     exit(-1)
 
 # LockBlendsTools() #  logger handler not defined at this moment, needs rethinking ... FIXME
@@ -64,7 +64,6 @@ l10nstring = dict((lang, gettext.translation(domain, locale_dir,
 # gettext working with genshi all are collected here even if the additional
 # attributes to blendstasktools.pkgstatus rather should go blendstasktools.py
 
-
 # initialize gensi
 loader = TemplateLoader([template_dir], auto_reload=True, default_encoding="utf-8")
 
@@ -81,7 +80,7 @@ with open(os.path.join(outputdir, 'tasks.json'), 'w') as fp:
     json.dump(data, fp, indent=4, sort_keys=True, cls=DefaultEncoder)
 
 t = datetime.now()
-htaccess = outputdir + '/.htaccess'
+htaccess = os.path.join(outputdir, '.htaccess')
 htafp = open(htaccess, 'w')
 htafp.write("DirectoryIndex index index.html\nOptions +MultiViews\n")
 
@@ -130,7 +129,7 @@ for lang in languages:
         # If data['advertising'] is enclosed in _() gettext tries to ask for translations of 'advertising'
         # which makes no sense.  That's why this is masked by an extra string variable
         advertising = data['advertising']
-        # #data['projectadvertising'] = _(advertising) # Hopefully translation will work this way ...
+        # data['projectadvertising'] = _(advertising) # Hopefully translation will work this way ...
         # Genshi needs explicite information that it is dealing with an UTF-8 string which should not be changed
         advertising = _(advertising)
         data['projectadvertising'] = Markup(advertising)
@@ -186,7 +185,6 @@ for lang in languages:
     with codecs.open(outputfile, 'w', 'utf-8') as f:
             f.write(template.generate(**data).render('xhtml'))
     SetFilePermissions(outputfile)
-
     try:
         template = loader.load('%s_idx.xhtml' % tasks.blendname)
         outputfile = os.path.join(tasks.data['outputdir'],
@@ -215,65 +213,54 @@ for lang in languages:
 
     for task in data['taskskeys']:
         data['task']               = task
-        # Keep the Dependency lists per task to be able to loop over all tasks in plain package list
+        # Keep the Dependency lists per task to be able to loop over
+        # all tasks in plain package list
         data['dependencies'][task] = []
-        found_status = {}
         for status in use_dependencystatus:
             if len(tasks.tasks[task].dependencies[status]) > 0:
-                if status == 'unknown':
-                    # Just print an error message if there are packages with unknown status
-                    if lang == 'en':
-                        # ... but only once and not per language
-                        for dep in tasks.tasks[task].dependencies[status]:
-                            stderr.write("Warning: Dependency with unknown status: %s\n" % dep.pkg)
-                else:
+                if status != 'unknown':
                     data['dependencies'][task].append(status)
-                    if status in data['dependencies'][task]:
-                        if status not in found_status:
-                            found_status[status] = 1
+                elif lang == 'en':
+                    # Just print an error message if there are packages with
+                    # unknown status but only once and not per language
+                    for dep in tasks.tasks[task].dependencies[status]:
+                        stderr.write("Warning: Dependency with unknown status: %s\n"
+                                     % dep.pkg)
         # Keep the Project lists per task to be able to loop over all tasks in plain package list
         data['projects'][task] = tasks.tasks[task].dependencies
         data['othertasks']     = _("Links to other tasks")
         data['indexlink']      = _("Index of all tasks")
 
-        outputfile = os.path.join(
-            outputdir,
-            '{0}.{1}.html'.format(task, language_dict[lang]['short']))
-
         if data['projectname'] in ('Debian Astro', 'Debian Hamradio'):
             template = loader.load('packages.xhtml')
         else:
             template = loader.load('tasks.xhtml')
 
-        # We had to mask ampersand ('&') from Genshi but even if the browser
-        # shows the correct character packages.debian.org gets confused.
-        # So turn it back here
+        outputfile = os.path.join(
+            outputdir, '{0}.{1}.html'.format(task, language_dict[lang]['short']))
         with codecs.open(outputfile, "w", "utf-8") as f:
-            for l in template.generate(**data).render('xhtml').split('\n'):
-                if detect_ampersand_code_re.search(l):
-                    l = l.replace('%26', '&')
-                f.write(l + '\n')
+# We had to mask ampersand ('&') from Genshi but even if the browser shows
+# the correct character packages.debian.org gets confused - so turn it back here
+            for line in template.generate(**data).render('xhtml').splitlines():
+                if detect_ampersand_code_re.search(line):
+                    line = line.replace('%26', '&')
+                f.write(line + '\n')
         SetFilePermissions(outputfile)
 
     template = loader.load('packagelist.xhtml')
 
-    outputfile = os.path.join(outputdir,
-                              'packagelist.{0}.html'.format(lang))
-
     data['projectsintasks'] = []
     for task in data['taskskeys']:
         data['projectsintasks']     = tasks.tasks[task].dependencies
 
+    outputfile = os.path.join(outputdir, 'packagelist.{0}.html'.format(lang))
     with codecs.open(outputfile, 'w', 'utf-8') as f:
         f.write(template.generate(**data).render('xhtml'))
     SetFilePermissions(outputfile)
 
 
-htafp.write("LanguagePriority")
-for lang in languages:
-    htafp.write(' ' + language_dict[lang]['htaccess'])
-htafp.write("\n")
-
+htafp.write("LanguagePriority {0}\n".format(
+    ' '.join(language_dict[lang]['htaccess']) for lang in languages))
 htafp.close()
 SetFilePermissions(htaccess)
 
diff --git a/webtools_py3/blendslanguages.py b/webtools_py3/blendslanguages.py
deleted file mode 100644
index f4e85b5..0000000
--- a/webtools_py3/blendslanguages.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/python3
-# Copyright 2010: Andreas Tille <tille at debian.org>
-# License: GPL
-
-# Define languages which are supported in web sentinel pages
-
-from genshi import Markup
-
-# The keys of the following dictionary are used to
-# address the translation files of DDTP
-# The values contain dictionaries in turn with the following meaning:
-#  'short'    = shortcut of language as extension of the output files
-#  'htaccess' = language definition in htaccess
-#  'title'    = English name of the language used in title attribut of link
-#  'printed'  = printed name for links
-language_dict = {'en': {'short': 'en', 'htaccess': 'en', 'title': 'English',
-                        'printed': Markup('English')},
-                 'cs': {'short': 'cs', 'htaccess': 'cs', 'title': 'Czech',
-                        'printed': Markup('česky')},
-                 'da': {'short': 'da', 'htaccess': 'da', 'title': 'Danish',
-                        'printed': Markup('dansk')},
-                 'de': {'short': 'de', 'htaccess': 'de', 'title': 'German',
-                        'printed': Markup('Deutsch')},
-                 'es': {'short': 'es', 'htaccess': 'es', 'title': 'Spanish',
-                        'printed': Markup('español')},
-                 'fi': {'short': 'fi', 'htaccess': 'fi', 'title': 'Finnish',
-                        'printed': Markup('suomi')},
-                 'fr': {'short': 'fr', 'htaccess': 'fr', 'title': 'French',
-                        'printed': Markup('français')},
-                 'hu': {'short': 'hu', 'htaccess': 'hu', 'title': 'Hungarian',
-                        'printed': Markup('magyar')},
-                 'it': {'short': 'it', 'htaccess': 'it', 'title': 'Italian',
-                        'printed': Markup('Italiano')},
-                 'ja': {'short': 'ja', 'htaccess': 'ja', 'title': 'Japanese',
-                        'printed': Markup('日本語\
-                                         (Nihongo)')},
-                 'ko': {'short': 'ko', 'htaccess': 'ko', 'title': 'Korean',
-                        'printed': Markup('한국어\
-                                         (Hangul)')},
-                 'nl': {'short': 'nl', 'htaccess': 'nl', 'title': 'Dutch',
-                        'printed': Markup('Nederlands')},
-                 'pl': {'short': 'pl', 'htaccess': 'pl', 'title': 'Polish',
-                        'printed': Markup('polski')},
-                 'pt_BR': {'short': 'pt', 'htaccess': 'pt-BR',
-                           'title': 'Portuguese',
-                           'printed': Markup('Português')},
-                 'ru': {'short': 'ru', 'htaccess': 'ru', 'title': 'Russian',
-                        'printed': Markup('Русск\
-                                        ий (Russkij)')},
-                 'uk': {'short': 'uk', 'htaccess': 'uk', 'title': 'Ukrainian',
-                        'printed': Markup("украї\
-                                        нська\
-                                         (ukrajins'ka)")},
-                 'sk': {'short': 'sk', 'htaccess': 'sk', 'title': 'Slovenian',
-                        'printed': Markup('slovensky')},
-                 'sr': {'short': 'sr', 'htaccess': 'sr', 'title': 'Serbian',
-                        'printed': Markup('српск\
-                                        и(srpski)')},
-                 'sv': {'short': 'sv', 'htaccess': 'sv', 'title': 'Swedish',
-                        'printed': Markup('svenska')},
-                 'vi': {'short': 'vi', 'htaccess': 'vi', 'title': 'Vietnamese',
-                        'printed': Markup('Tiếng Việt')},
-                 'zh_CN': {'short': 'zh_cn', 'htaccess': 'zh-CN',
-                           'title': 'Chinese (China)',
-                           'printed': Markup('中文(简)')},
-                 'zh_TW': {'short': 'zh_tw', 'htaccess': 'zh-TW',
-                           'title': 'Chinese (Taiwan)',
-                           'printed': Markup('中文(正)')}, }
-# global languages
-_languages = list(language_dict.keys())
-_languages.sort()
-# make sure 'en' comes first because the first language determines the default
-languages = ['en']
-for l in _languages:
-    if l == 'en':
-        continue
-    languages.append(l)
diff --git a/webtools_py3/blendslanguages.py b/webtools_py3/blendslanguages.py
new file mode 120000
index 0000000..271f07a
--- /dev/null
+++ b/webtools_py3/blendslanguages.py
@@ -0,0 +1 @@
+../webtools/blendslanguages.py
\ No newline at end of file
diff --git a/webtools_py3/blendsmarkdown.py b/webtools_py3/blendsmarkdown.py
deleted file mode 100644
index 8c3a973..0000000
--- a/webtools_py3/blendsmarkdown.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/python3
-# Copyright 2008: Andreas Tille <tille at debian.org>
-# License: GPL
-
-# To enable better formatting of long descriptions we use Markdown
-# and some preprocessing of the RFC822 formatted descriptions
-
-
-from __future__ import print_function
-
-import re
-from sys import stderr
-from markdown import markdown            # Markdown
-from docutils.core import publish_parts  # alternatively reST
-from genshi import Markup
-
-detect_list_start_re = re.compile("^\s+[-*+]\s+")
-detect_code_start_re = re.compile("^\s")
-detect_code_end_re = re.compile("^[^\s]")
-detect_url_re = re.compile("[fh]t?tp://")
-
-
-def PrepareMarkdownInput(lines):
-    ret = ''
-    inlist = 0
-    incode = 0
-    for line in lines:
-        # strip leading space from description as well as useless trailing
-        line = re.sub('^ ', '', line.rstrip())
-
-        # a '^\.$' marks in descriptions a new paragraph,
-        # markdown uses an empty line here
-        line = re.sub('^\.$', '', line)
-
-        # In long descriptions 'o' and '.' are quite frequently used
-        # as bullet in lists which is not recognised by markdown.
-        # So just turn '[.o]' into '*' to let markdown do
-        # its work successfully.
-        line = re.sub('^(\s*)[.o]\s+', '\\1* ', line)
-
-        # To enable Markdown debugging and verbose output in remarks
-        # a 'o'/'.' is inserted as '\o'/'\.' in remarks
-        # - the original is restored here:
-        line = re.sub('^(\s*)\\\\([.o]\s+)', '\\1\\2', line)
-
-        if detect_code_start_re.search(line):
-            # If a list or verbatim mode starts MarkDown needs an empty line
-            if incode == 0:
-                ret += "\n"
-                incode = 1
-                if detect_list_start_re.search(line):
-                    inlist = 1
-        if incode == 1 and inlist == 0:
-            # Add a leading tab if in verbatim but not in list mode
-            ret += "\t"
-        # If there is an empty line or a not indented line the list
-        # or verbatim text ends. It is important to check for empty lines
-        # because some descriptions would insert more lines than needed
-        # in verbose mode (see for instance glam2)
-        if (detect_code_end_re.search(line) or line == '') and incode == 1:
-            inlist = 0  # list ends if indentation stops
-            incode = 0  # verbatim mode ends if indentation stops
-        # Mask # at first character in line which would lead to
-        #   MARKDOWN-CRITICAL: "We've got a problem header!"
-        # otherwise
-        if line.startswith('#'):
-            ret += '\\'
-        if detect_url_re.search(line):
-            # some descriptions put URLs in '<>' which is unneeded and might
-            # confuse the parsing of '&' in URLs which is needed sometimes
-            line = re.sub('<*([fh]t?tp://[-./\w?=~;&%]+)>*',
-                          '[\\1](\\1)', line)
-        ret += line + "\n"
-    return ret
-
-
-def render_longdesc(lines):
-    MarkDownInput = PrepareMarkdownInput(lines)
-
-    global rendering_lib
-    if rendering_lib == 'rest':
-        try:
-            LongDesc = publish_parts(MarkDownInput, writer_name='html')['body']
-        except:
-            print("Unable to render the following prepared text: \n %s"
-                  % (MarkDownInput), file=stderr)
-            LongDesc = "Problems in rendering description using reST"
-    else:  # by default use Markdown
-        LongDesc = markdown(MarkDownInput)
-    return LongDesc
-
-
-def SplitDescription(description):
-    # Split first line of Description value as short description
-
-    lines = description.splitlines()
-
-    ShortDesc = lines[0].replace("&", "&")\
-        .replace("<", "<").replace(">", ">")
-    LongDesc = render_longdesc(lines[1:])
-
-    return (ShortDesc, LongDesc)
-
-
-def MarkupString(string, pkg, elem, lang='en'):
-    # Genshi does not touch strings that are marked with "Markup()"
-    # This function does the actual Markup call
-    # for any string with error checking
-
-    if string is None:
-        return None
-    try:
-        string = Markup(string)
-    except UnicodeDecodeError as errtxt:
-        print("----> %s UnicodeDecodeError in %s (lang='%s'): '%s'; ErrTxt: %s"
-              % (elem, pkg, lang, 'debug-string', errtxt), file=stderr)
-        try:
-            string = Markup(str(string, 'utf-8'))
-        except TypeError as errtxt:
-            print("====> %s TypeError in %s (lang='%s'): '%s'; ErrTxt: %s"
-                  % (elem, pkg, lang, 'debug-string', errtxt), file=stderr)
-    except TypeError as errtxt:
-        print("----> %s TypeError in %s (lang='%s'): '%s'; ErrTxt: %s"
-              % (elem, pkg, lang, 'debug-string', errtxt), file=stderr)
-    return string
-
-rendering_lib = ''
diff --git a/webtools_py3/blendsmarkdown.py b/webtools_py3/blendsmarkdown.py
new file mode 120000
index 0000000..d501f7c
--- /dev/null
+++ b/webtools_py3/blendsmarkdown.py
@@ -0,0 +1 @@
+../webtools/blendsmarkdown.py
\ No newline at end of file
diff --git a/webtools_py3/blendstasktools_udd.py b/webtools_py3/blendstasktools_udd.py
index 7772308..f524f0d 100644
--- a/webtools_py3/blendstasktools_udd.py
+++ b/webtools_py3/blendstasktools_udd.py
@@ -1,15 +1,23 @@
-#PORT=5441
-UDDPORT=5452
-PORT=UDDPORT
-DEFAULTPORT=5432
+''' Blends metapackages are listing a set of Dependencies
+These might be fullfilled by the Debian package
+set or not.
+
+This interface provides some classes that contains
+all available information about this Dependency ,
+like whether it is an official package or not,
+in which distribution it is contained
+or if it is not contained it obtains information
+from tasks file about home page, license, WNPP etc.
+Copyright 2008-2012: Andreas Tille <tille at debian.org>
+License: GPL
+'''
 
 from sys import stderr, exit
 from subprocess import Popen, PIPE
+import codecs
 import os
 import grp
 import stat
-import urllib.request, urllib.parse, urllib.error
-import io
 import gzip
 import bz2
 import re
@@ -28,7 +36,6 @@ try:
     from debian import deb822
 except:
     from debian_bundle import deb822
-from blendsunicode import to_unicode
 from blendslanguages import languages
 
 import logging
@@ -37,167 +44,175 @@ logger = logging.getLogger('blends')
 logger.setLevel(logging.INFO)
 # logger.setLevel(logging.DEBUG)
 
+# PORT = 5441
+UDDPORT = 5452
+PORT = UDDPORT
+DEFAULTPORT = 5432
+
 # Seems to have problems on 17.04.2009
 # BASEURL  = 'http://ftp.debian.org/debian'
-BASEURL  = 'http://ftp.de.debian.org/debian'
-KEYSTOIGNORE = ( 'Architecture', 'Comment', 'Leaf', 'NeedConfig', 'Note', 'Section',
-                 'Needconfig', 'DontAvoid',
-                 'Enhances', 'Test-always-lang', 'Metapackage')
+BASEURL = 'http://ftp.de.debian.org/debian'
+KEYSTOIGNORE = ('Architecture', 'Comment', 'Leaf', 'NeedConfig', 'Note', 'Section',
+                'Needconfig', 'DontAvoid',
+                'Enhances', 'Test-always-lang', 'Metapackage')
 
 CONFDIR = 'webconf'
 
-COMPRESSIONEXTENSION='bz2'
-# COMPRESSIONEXTENSION='gz' # Translations are only available as bz2 since April 2009
+COMPRESSIONEXTENSION = 'bz2'
+# COMPRESSIONEXTENSION = 'gz' # Translations are only available as bz2 since April 2009
 
 HOMEPAGENONE = '#'
-HOMEPAGENONEFIELDS = ('homepage', 
-                      'pkg-url',     # Link to packages.debian.org search interface with exact
-                                     # package matches or URL to inofficial package
-                      'vcs-browser', # Browser-URL to packaging stuff in Vcs
-                     )
+HOMEPAGENONEFIELDS = (
+    'homepage',
+    'pkg-url',      # Link to packages.debian.org search interface with exact
+                    # package matches or URL to inofficial package
+    'vcs-browser',  # Browser-URL to packaging stuff in Vcs
+)
 
 PKGURLMASK = 'http://packages.debian.org/search?keywords=%s%%26searchon=names%%26exact=1%%26suite=all%%26section=all'
 
 DEPENDENT  = 0
 SUGGESTED  = 1
 DONE       = 2
-BUGLISTCAT = (DEPENDENT, SUGGESTED, DONE )
+BUGLISTCAT = (DEPENDENT, SUGGESTED, DONE)
 
 # FIXME: Obtain releases from UDD table releases (is this used at all???)
-releases  = {'oldstable'    : ('wheezy', 'wheezy-proposed-updates', 'wheezy-security'),
-             'stable'       : ('jessie', 'jessie-proposed-updates', 'jessie-security'),
-             'testing'      : ('stretch'),
-             'unstable'     : ('sid'),
-             'experimental' : ('experimental')
-            }
-
-pkgstatus = {'official_high' : # official package with high priority dependency
-                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
-                                 'components'   : ('main', ),
-                                 'dependencies' : ('Depends', 'Recommends'),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package and has high relevance</a>',
-                                 'headline'     : 'Official Debian packages with high relevance',
-                                 'pdolinkname'  : 'Official Debian package',
-                                 'order'        : 1
-                               },
-             'official_low'  : # official package with low priority dependency
-                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
-                                 'components'   : ('main', ),
-                                 'dependencies' : ('Suggests', ),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package but has lower relevance</a>',
-                                 'headline'     : 'Official Debian packages with lower relevance',
-                                 'pdolinkname'  : 'Official Debian package',
-                                 'order'        : 2
-                               },
-             'non-free'      : # package in contrib or non-free, priority decreased to Suggests in any case
-                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
-                                 'component'    : ('contrib', 'non-free'),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'Green: The project is <a href="#%s">available in Debian packaging pool but is not in Debian main</a>',
-                                 'headline'     : 'Debian packages in contrib or non-free',
-                                 'pdolinkname'  : 'Debian package in contrib/non-free',
-                                 'order'        : 3
-                               },
-             'experimental'  : # package which exists only in experimental
-                               { 'releases'     : ('experimental', ),
-                                 'component'    : ('main', 'contrib', 'non-free'),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'Yellow: The project is <a href="#%s">available in Debian packaging pool but is regarded as experimental</a>',
-                                 'headline'     : 'Debian packages in experimental',
-                                 'pdolinkname'  : 'Debian package in experimental',
-                                 'order'        : 4
-                               },
-             'new'           : # package in new queue
-                               { 'releases'     : ('new', ),
-                                 'component'    : ('main', 'contrib', 'non-free'),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'Yellow: A package of project is <a href="#%s">is in Debian New queue and hopefully available soon</a>',
-                                 'headline'     : 'Debian packages in New queue (hopefully available soon)',
-                                 'pdolinkname'  : 'New Debian package',
-                                 'order'        : 5
-                               },
-             'pkgvcs'        : # Not yet packaged but packaging code in Vcs
-                               { 'releases'     : (),
-                                 'component'    : (),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : ('vcs-svn', 'vcs-git', 'vcs-browser'),
-                                 'colorcode'    : 'Yellow: The packaging of project is <a href="#%s">has started and a developer might try the packaging code in VCS or help packaging.</a>',
-                                 'headline'     : 'Packaging has started and developers might try the packaging code in VCS',
-                                 'pdolinkname'  : 'New Debian package',
-                                 'order'        : 6
-                               },
-             'unofficial'    : # unofficial packages outside Debian
-                               { 'releases'     : (),
-                                 'component'    : (),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : ('pkg-url', ),
-                                 'colorcode'    : 'Yellow: There exists an <a href="#%s">unofficial package</a> of the project',
-                                 'headline'     : 'Unofficial packages built by somebody else',
-                                 'pdolinkname'  : 'Unofficial Debian package',
-                                 'order'        : 7
-                               },
-             'wnpp'          : # project which has at least a WNPP bug filed
-                               { 'releases'     : (),
-                                 'component'    : (),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : ('wnpp', ),
-                                 'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a> but there is some record of interest (WNPP bug).',
-                                 'headline'     : 'No known packages available but some record of interest (WNPP bug)',
-                                 'pdolinkname'  : 'Debian package not available',
-                                 'order'        : 8
-                               },
-             'prospective'   : # projects which might be interesting for a Blend but no work is done yet
-                               { 'releases'     : (),
-                                 'component'    : (),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : ('homepage', ),  # TODO: a description should be set as well ...
-                                 'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a>.',
-                                 'headline'     : 'Should not show up here',
-                                 'pdolinkname'  : '',
-                                 'order'        : 9
-                               },
-             'ignore'        : # Package inside Debian which is "under observation"
-                               { 'releases'     : (list(releases.keys())),
-                                 'component'    : ('main', 'contrib', 'non-free'),
-                                 'dependencies' : ('Ignore', ),
-                                 'fields-set'   : (),
-                                 'colorcode'    : '%s',
-                                 'headline'     : 'Should not show up here',
-                                 'pdolinkname'  : '',
-                                 'order'        : 10
-                               },
-             'avoid'         : # Package inside Debian which should not go to a install medium of the Blend
-                               { 'releases'     : (list(releases.keys())),
-                                 'component'    : ('main', 'contrib', 'non-free'),
-                                 'dependencies' : ('Avoid', ),
-                                 'fields-set'   : (),
-                                 'colorcode'    : '%s',
-                                 'headline'     : 'Should not show up here',
-                                 'pdolinkname'  : '',
-                                 'order'        : 11
-                               },
-             'unknown'       : # Everything else
-                               { 'releases'     : (),
-                                 'component'    : (),
-                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
-                                 'fields-set'   : (),
-                                 'colorcode'    : 'White: The project has an %s status.',
-                                 'headline'     : 'Should not show up here',
-                                 'pdolinkname'  : '',
-                                 'order'        : 100
-                               },
-             }
+releases = {
+    'oldstable'   : ('wheezy', 'wheezy-proposed-updates', 'wheezy-security'),
+    'stable'      : ('jessie', 'jessie-proposed-updates', 'jessie-security'),
+    'testing'     : ('stretch'),
+    'unstable'    : ('sid'),
+    'experimental': ('experimental')
+}
+
+pkgstatus = {
+    'official_high': {  # official package with high priority dependency
+        'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+        'components'   : ('main', ),
+        'dependencies' : ('Depends', 'Recommends'),
+        'fields-set'   : (),
+        'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package and has high relevance</a>',
+        'headline'     : 'Official Debian packages with high relevance',
+        'pdolinkname'  : 'Official Debian package',
+        'order'        : 1
+    },
+    'official_low': {  # official package with low priority dependency
+        'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+        'components'   : ('main', ),
+        'dependencies' : ('Suggests', ),
+        'fields-set'   : (),
+        'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package but has lower relevance</a>',
+        'headline'     : 'Official Debian packages with lower relevance',
+        'pdolinkname'  : 'Official Debian package',
+        'order'        : 2
+    },
+    'non-free': {  # package in contrib or non-free, priority decreased to Suggests in any case
+        'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+        'component'    : ('contrib', 'non-free'),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : (),
+        'colorcode'    : 'Green: The project is <a href="#%s">available in Debian packaging pool but is not in Debian main</a>',
+        'headline'     : 'Debian packages in contrib or non-free',
+        'pdolinkname'  : 'Debian package in contrib/non-free',
+        'order'        : 3
+    },
+    'experimental': {  # package which exists only in experimental
+        'releases'     : ('experimental', ),
+        'component'    : ('main', 'contrib', 'non-free'),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : (),
+        'colorcode'    : 'Yellow: The project is <a href="#%s">available in Debian packaging pool but is regarded as experimental</a>',
+        'headline'     : 'Debian packages in experimental',
+        'pdolinkname'  : 'Debian package in experimental',
+        'order'        : 4
+    },
+    'new': {  # package in new queue
+        'releases'     : ('new', ),
+        'component'    : ('main', 'contrib', 'non-free'),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : (),
+        'colorcode'    : 'Yellow: A package of project is <a href="#%s">is in Debian New queue and hopefully available soon</a>',
+        'headline'     : 'Debian packages in New queue (hopefully available soon)',
+        'pdolinkname'  : 'New Debian package',
+        'order'        : 5
+    },
+    'pkgvcs': {  # Not yet packaged but packaging code in Vcs
+        'releases'     : (),
+        'component'    : (),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : ('vcs-svn', 'vcs-git', 'vcs-browser'),
+        'colorcode'    : 'Yellow: The packaging of project is <a href="#%s">has started and a developer might try the packaging code in VCS or help packaging.</a>',
+        'headline'     : 'Packaging has started and developers might try the packaging code in VCS',
+        'pdolinkname'  : 'New Debian package',
+        'order'        : 6
+    },
+    'unofficial': {  # unofficial packages outside Debian
+        'releases'     : (),
+        'component'    : (),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : ('pkg-url', ),
+        'colorcode'    : 'Yellow: There exists an <a href="#%s">unofficial package</a> of the project',
+        'headline'     : 'Unofficial packages built by somebody else',
+        'pdolinkname'  : 'Unofficial Debian package',
+        'order'        : 7
+    },
+    'wnpp': {  # project which has at least a WNPP bug filed
+        'releases'     : (),
+        'component'    : (),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : ('wnpp', ),
+        'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a> but there is some record of interest (WNPP bug).',
+        'headline'     : 'No known packages available but some record of interest (WNPP bug)',
+        'pdolinkname'  : 'Debian package not available',
+        'order'        : 8
+    },
+    'prospective': {  # projects which might be interesting for a Blend but no work is done yet
+        'releases'     : (),
+        'component'    : (),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : ('homepage', ),  # TODO: a description should be set as well ...
+        'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a>.',
+        'headline'     : 'Should not show up here',
+        'pdolinkname'  : '',
+        'order'        : 9
+    },
+    'ignore': {  # Package inside Debian which is "under observation"
+        'releases'     : (releases.keys()),
+        'component'    : ('main', 'contrib', 'non-free'),
+        'dependencies' : ('Ignore', ),
+        'fields-set'   : (),
+        'colorcode'    : '%s',
+        'headline'     : 'Should not show up here',
+        'pdolinkname'  : '',
+        'order'        : 10
+    },
+    'avoid': {  # Package inside Debian which should not go to a install medium of the Blend
+        'releases'     : (releases.keys()),
+        'component'    : ('main', 'contrib', 'non-free'),
+        'dependencies' : ('Avoid', ),
+        'fields-set'   : (),
+        'colorcode'    : '%s',
+        'headline'     : 'Should not show up here',
+        'pdolinkname'  : '',
+        'order'        : 11
+    },
+    'unknown': {  # Everything else
+        'releases'     : (),
+        'component'    : (),
+        'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+        'fields-set'   : (),
+        'colorcode'    : 'White: The project has an %s status.',
+        'headline'     : 'Should not show up here',
+        'pdolinkname'  : '',
+        'order'        : 100
+    },
+}
 
 # http://wiki.python.org/moin/HowTo/Sorting#Sortingbykeys
-_tmplist=[]
-for key in list(pkgstatus.keys()):
-    _tmplist.append((key,pkgstatus[key]['order']))
-_tmpsorted = sorted(_tmplist, key=lambda x:(x[1], x[0]))
+_tmplist = []
+for key in pkgstatus.keys():
+    _tmplist.append((key, pkgstatus[key]['order']))
+_tmpsorted = sorted(_tmplist, key=lambda x: (x[1], x[0]))
 pkgstatus_sortedkeys = []
 for _tmp in _tmpsorted:
     pkgstatus_sortedkeys.append(_tmp[0])
@@ -208,45 +223,48 @@ for pkgstat in pkgstatus:
         if dep not in dep_strength_keys:
             dep_strength_keys.append(dep)
 
-rmpub = open('remove-publications-from-tasks-files.dat','w+')
+rmpub = codecs.open('remove-publications-from-tasks-files.dat', 'w+', 'utf-8')
 
-license_in_component = {'main'     : 'DFSG free',
-                        'contrib'  : 'DFSG free, but needs non-free components',
-                        'non-free' : 'non-free'
-            }
+license_in_component = {
+    'main'    : 'DFSG free',
+    'contrib' : 'DFSG free, but needs non-free components',
+    'non-free': 'non-free'
+}
 
 try:
     import psutil
-    has_psutils=True
+    has_psutils = True
 except ImportError:
-    has_psutils=False
+    has_psutils = False
+
 
 def SetFilePermissions(usefile):
     try:
         blendsgid = grp.getgrnam("blends").gr_gid
         os.chown(usefile, -1, blendsgid)
-        os.chmod(usefile,  stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH )
+        os.chmod(usefile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP |
+                 stat.S_IWGRP | stat.S_IROTH)
         # os.system("ls -l %s" % usefile)
     except KeyError:
         # if groups 'blends' does not exist on the machine we are simply testing and setting group permissions is not needed
         pass
 
-LOCKFILE='/var/lock/blends.lock'
+
+LOCKFILE = '/var/lock/blends.lock'
 def LockBlendsTools():
     """Locking mechanism to make sure the scripts will not run in parallel
        which happened because of IO problems on udd.debian.org"""
     if not has_psutils:
-	    logger.warning("Package python-psutil is missing.  No locking support available")
-	    return
+        logger.warning("Package python-psutil is missing.  No locking support available")
+        return
     if os.path.exists(LOCKFILE):
         try:
-            lf = open(LOCKFILE, 'r')
-            pid = int(lf.readline())
-            lf.close()
-            try: # psutils has changed interfacde and get_pid_list() does not exist any more in newer implementations
-                pidlist=psutil.get_pid_list()
+            with open(LOCKFILE, 'r') as lf:
+                pid = int(lf.readline())
+            try:  # psutils has changed interfacde and get_pid_list() does not exist any more in newer implementations
+                pidlist = psutil.get_pid_list()
             except AttributeError:
-                pidlist=psutil.pids()
+                pidlist = psutil.pids()
             if pid in pidlist:
                 logger.error("Another process rebuilding web sentinel pages with PID %i is running. Exit." % pid)
                 exit()
@@ -255,17 +273,17 @@ def LockBlendsTools():
                 os.unlink(LOCKFILE)
         except IOError as e:
             pass
-    pid = os.getpid()
-    lf = open(LOCKFILE, 'w')
-    print(pid, file=lf)
-    lf.close()
+    with open(LOCKFILE, 'w') as lf:
+        lf.write('%i\n' % os.getpid())
     SetFilePermissions(LOCKFILE)
 
+
 def UnlockBlendsTools():
     """Unlock previousely locked file"""
     if os.path.exists(LOCKFILE):
         os.unlink(LOCKFILE)
 
+
 def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
     # Create a list of status of dependencies out of pkgstatus dictionary
     use_dependencystatus = []
@@ -279,7 +297,7 @@ def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
     else:
         # verify correctly given dependencies
         for pkgstat in dependencystatus:
-            if pkgstat in list(pkgstatus.keys()):
+            if pkgstat in pkgstatus:
                 use_dependencystatus.append(pkgstat)
             else:
                 logger.error("Unknown dependencystatus %s" % pkgstat)
@@ -309,16 +327,16 @@ except psycopg2.OperationalError as err:
 curs = conn.cursor()
 # uddlog = open('logs/uddquery.log', 'w')
 
+
 def _execute_udd_query(query):
     try:
         curs.execute(query)
         logger.debug(query)
     except psycopg2.ProgrammingError as err:
-        print("Problem with query\n%s" % ((query)), file=stderr)
-        print(err, file=stderr)
+        stderr.write("Problem with query\n%s\n%s\n" % (query, str(err)))
         exit(-1)
     except psycopg2.DataError as err:
-        print("%s; query was\n%s" % (err, query), file=stderr)
+        stderr.write("%s; query was\n%s\n" % (str(err), query))
 
 query = """PREPARE query_pkgs (text[],text[],text) AS
         SELECT * FROM blends_query_packages($1,$2,$3) AS (
@@ -327,10 +345,10 @@ query = """PREPARE query_pkgs (text[],text[],text) AS
           source text, section text, task text, homepage text,
           maintainer_name text, maintainer_email text,
           "vcs-type" text, "vcs-url" text, "vcs-browser" text,
-      changed_by text,
+          changed_by text,
           enhanced text[],
           releases text[], versions text[], architectures text[],
-      unstable_upstream text, unstable_parsed_version text, unstable_status text,
+          unstable_upstream text, unstable_parsed_version text, unstable_status text,
           vote int, recent int, insts int, -- popcon
           debtags text[],
           screenshot_versions text[], image text[], icon text[],
@@ -392,7 +410,7 @@ query = """PREPARE query_new (text[]) AS SELECT
          bibpages.value   AS "pages",
          bibeprint.value  AS "eprint"
                    FROM new_packages p
-           JOIN new_sources s ON p.source = s.source AND p.version = s.version
+                   JOIN new_sources s ON p.source = s.source AND p.version = s.version
     LEFT OUTER JOIN bibref bibyear    ON p.source = bibyear.source    AND bibyear.rank = 0    AND bibyear.key    = 'year'    AND bibyear.package = ''
     LEFT OUTER JOIN bibref bibtitle   ON p.source = bibtitle.source   AND bibtitle.rank = 0   AND bibtitle.key   = 'title'   AND bibtitle.package = ''
     LEFT OUTER JOIN bibref bibauthor  ON p.source = bibauthor.source  AND bibauthor.rank = 0  AND bibauthor.key  = 'author'  AND bibauthor.package = ''
@@ -404,7 +422,7 @@ query = """PREPARE query_new (text[]) AS SELECT
     LEFT OUTER JOIN bibref bibnumber  ON p.source = bibnumber.source  AND bibnumber.rank = 0  AND bibnumber.key  = 'number'  AND bibnumber.package = ''
     LEFT OUTER JOIN bibref bibpages   ON p.source = bibpages.source   AND bibpages.rank = 0   AND bibpages.key   = 'pages'   AND bibpages.package = ''
     LEFT OUTER JOIN bibref bibeprint  ON p.source = bibeprint.source  AND bibeprint.rank = 0  AND bibeprint.key  = 'eprint'  AND bibeprint.package = ''
-                   WHERE (p.package, p.version) IN  
+                   WHERE (p.package, p.version) IN
                          (SELECT package, max(version) FROM
                    new_packages WHERE package = ANY ($1) GROUP BY package) ORDER BY p.package"""
 _execute_udd_query(query)
@@ -481,15 +499,15 @@ _execute_udd_query(query)
 # Sometimes the tasks file contains dependencies from virtual packages and we have to
 # obtain the real packages which provide this dependency.
 # First check whether there are such packages (only names)
-query = """PREPARE query_provides (text[]) AS 
+query = """PREPARE query_provides (text[]) AS
            SELECT DISTINCT package, provides FROM packages WHERE provides IS NOT NULL AND package = ANY($1) ;"""
 _execute_udd_query(query)
 
 # Obtain more detailed information about packages that might provide a dependency
-#query = """PREPARE query_provides_version_release (text) AS 
+# query = """PREPARE query_provides_version_release (text) AS
 #           SELECT package, version, release FROM packages WHERE provides = $1
 #                  GROUP BY version, package, release ORDER BY version DESC;"""
-#_execute_udd_query(query)
+# _execute_udd_query(query)
 
 # Obtain the releases featuring a certain package, in case a package might show up in different components when
 # considering different releases we apply a preference for main over contrib over non-free.  If this is the case
@@ -516,29 +534,17 @@ if curs.rowcount > 0:
     for release in curs.fetchall():
         release_order[release[0]] = release[1]
 
-def List2PgArray(list):
+def List2PgArray(l):
     # turn a list of strings into the syntax for a PostgreSQL array:
     # {"string1","string2",...,"stringN"}
-    if not list:
-        return '{}'
-    komma='{'
-    PgArray=''
-    for s in list:
-        PgArray=PgArray+komma+'"'+s+'"'
-        komma=','
-    return PgArray+'}'
-
-def List2PgSimilarArray(list):
+    return '{' + ','.join(('"' + s + '"') for s in l) + '}'
+
+
+def List2PgSimilarArray(l):
     # turn a list of strings surrounded by '%' into the syntax for a PostgreSQL array to enable LIKE conditions:
     # {"%string1%","%string2%",...,"%stringN%"}
-    if not list:
-        return '{}'
-    komma='{'
-    PgSimArray=''
-    for s in list:
-        PgSimArray=PgSimArray+komma+'"%'+s+'%"'
-        komma=','
-    return PgSimArray+'}'
+    return '{' + ','.join(('"%' + s + '%"') for s in l) + '}'
+
 
 def ReadConfig(blendname=''):
     # Try to read config file CONFDIR/<blendname>.conf
@@ -547,26 +553,28 @@ def ReadConfig(blendname=''):
         # if config file can not be found in local dir, try /etc/blends/webconf as fallback
         conffile_default = '/etc/blends/webconf/' + blendname + '.conf'
         if not os.access(conffile_default, os.R_OK):
-            logger.error("Unable to open config file %s or %s." % (conffile, conffile_default))
+            logger.error("Unable to open config file %s or %s."
+                         % (conffile, conffile_default))
             exit(-1)
         conffile = conffile_default
     f = open(conffile, 'r')
-    ret = { 'Blend'       : '',
-            'projectname' : '',
-            'projecturl'  : '',
-            'homepage'    : '',
-            'aliothurl'   : '',
-            'projectlist' : '',
-            'pkglist'     : '',
-            'logourl'     : '',
-            'css'         : '',
-            'outputdir'   : '',
-            'datadir'     : '',
-            'advertising' : None,  # headline with advertising string is optional
-            'ubuntuhome'  : None,
-            'projectubuntu':None,
-          }
-    for stanza in deb822.Sources.iter_paragraphs(f, shared_storage=False):    
+    ret = {
+        'Blend'        : '',
+        'projectname'  : '',
+        'projecturl'   : '',
+        'homepage'     : '',
+        'aliothurl'    : '',
+        'projectlist'  : '',
+        'pkglist'      : '',
+        'logourl'      : '',
+        'css'          : '',
+        'outputdir'    : '',
+        'datadir'      : '',
+        'advertising'  : None,  # headline with advertising string is optional
+        'ubuntuhome'   : None,
+        'projectubuntu': None,
+    }
+    for stanza in deb822.Sources.iter_paragraphs(f, shared_storage=False):
         ret['Blend']       = stanza['blend']        # short name of the project
         ret['projectname'] = stanza['projectname']  # Printed name of the project
         ret['projecturl']  = stanza['projecturl']   # Link to the developer page with dynamic content
@@ -597,6 +605,7 @@ def ReadConfig(blendname=''):
 
     return ret
 
+
 def CheckOrCreateOutputDir(maindir, subdir):
     outputdir = maindir + '/' + subdir
     if not os.access(outputdir, os.W_OK):
@@ -607,32 +616,23 @@ def CheckOrCreateOutputDir(maindir, subdir):
             try:
                 os.system("mkdir -p %s" % outputdir)
             except:
-                logger.error("Unable to create output dir " + outputdir)
+                logger.error("Unable to create output dir %s" % outputdir)
                 return None
     return outputdir
 
 def RowDictionaries(cursor):
     """Return a list of dictionaries which specify the values by their column names"""
 
-    description = cursor.description
-    if not description:
+    if not cursor.description:
         # even if there are no data sets to return the description should contain the table structure.  If not something went
-        # wrong and we return NULL as to represent a problem
-        return NULL
-    if cursor.rowcount <= 0:
-	# if there are no rows in the cursor we return an empty list
-        return []
-
-    data = cursor.fetchall()
-    result = []
-    for row in data:
-        resultrow = {}
-        i = 0
-        for dd in description:
-            resultrow[dd[0]] = row[i]
-            i += 1
-        result.append(resultrow)
-    return result
+        # wrong and we return None as to represent a problem
+        return None
+
+    return [dict((dd[0],
+                  unicode(dv.decode('utf-8')) if isinstance(dv, str) else dv)
+                 for (dd, dv) in zip(cursor.description, row))
+            for row in cursor]
+
 
 def BrowserFromVcsURL(vcs_type, vcs_url):
     # Guess Vcs-Browser URL from VCS URL
@@ -667,6 +667,7 @@ detect_vcs_cvs_re        = re.compile("://.*cvs")
 detect_vcs_svn_re        = re.compile("://.*svn")
 detect_vcs_git_re        = re.compile("://.*git")
 
+
 def VcsTypeFromBrowserURL(vcs_browser):
     # Guess Vcs-Type from vcs_browser
     if detect_vcs_cvs_re.search(vcs_browser):
@@ -677,13 +678,13 @@ def VcsTypeFromBrowserURL(vcs_browser):
         return 'Git'
     return 'Unknown VCS'
 
-
 # The following keys will be mostly used for programs that
 # are not yet existing in Debian and will go to our todo list
-PROPERTIES=('homepage', # Homepage of program
-            'section',  # Section of package in the Debian hierarchy
-            'source',   # Keep the source package name which is needed for ddpo subscription
-           )
+PROPERTIES = (
+    'homepage',  # Homepage of program
+    'section',   # Section of package in the Debian hierarchy
+    'source',    # Keep the source package name which is needed for ddpo subscription
+)
 
 ###################################################################################################################
 #=================================================================================================================#
@@ -692,68 +693,63 @@ PROPERTIES=('homepage', # Homepage of program
 #######################################################
 
 
-def GetPkgstat(strength):        
-        if strength == 'd' or strength == 'r':
-            pkgstat = 'official_high'
-        elif strength == 's':
-            pkgstat = 'official_low'
-        elif strength == 'i':
-            pkgstat = 'ignore'
-        elif strength == 'a':
-            pkgstat = 'avoid'
-        else:
-            pkgstat = 'unknown'
-        return pkgstat
+def GetPkgstat(strength):
+    return {
+        'r': 'official_high',
+        'd': 'official_high',
+        's': 'official_low',
+        'i': 'ignore',
+        'a': 'avoid',
+        }.get(strength, 'unknown')
 
 
 class DependantPackage:
     # Hold information about a package that is in dependency list
 
     def __init__(self, blendname=None, taskname=None):
-        self.blendname      = blendname # Blend that includes the package in dependency list
-        self.taskname       = taskname  # Task which includes the Dependency
-        self.pkg            = None # Name of dependant package
-        self.source         = None # Name of source package of the dependency
-        self.PrintedName    = None # Only for Meta package names - no use for a real dependant package
-                                   # FIXME -> object model
-        self.pkgstatus      = 'unknown' # global pkgstatus: characterizes status of dependency, release, packaging status
-        self.releases       = []   # List of releases a package might be in
-        self.component      = None # One of: 'main', 'contrib', 'non-free', if a package shows up in several components which
-                                   # might happen over different releases, just prefer main over contrib over non-free
-        self.why            = None # basically used as comment
+        self.blendname      = blendname  # Blend that includes the package in dependency list
+        self.taskname       = taskname   # Task which includes the Dependency
+        self.pkg            = None  # Name of dependant package
+        self.source         = None  # Name of source package of the dependency
+        self.PrintedName    = None  # Only for Meta package names - no use for a real dependant package
+                                    # FIXME -> object model
+        self.pkgstatus      = 'unknown'  # global pkgstatus: characterizes status of dependency, release, packaging status
+        self.releases       = []    # List of releases a package might be in
+        self.component      = None  # One of: 'main', 'contrib', 'non-free', if a package shows up in several components which
+                                    # might happen over different releases, just prefer main over contrib over non-free
+        self.why            = None  # basically used as comment
 
         self.properties     = {}
         self.properties['license']     = 'unknown'
         for field in HOMEPAGENONEFIELDS:
             self.properties[field]    = HOMEPAGENONE
-        self.properties['Enhances'] = {} # Dictionary Enhancing pkg name as key, Link to package information as value; empty in most cases
-                                         # because Enhances relations are quite seldom
-        self.properties['stable_testing_version'] = [] # (release, version) tuples where release is codename for stable and testing
-        self.vcs_found      = 0    # we need a flag to store the fact whether Vcs information of a package is in UDD
-        self.version        = []   # list of {'release', 'version', 'archs'} dictionary containing version and architecture information
-        self.outdated       = {}   # If not empty directory then release='upstream' and package is outdated
-        self.popcon         = {}   # dictionary containing vote and recnt values of popcon information
-        self.popconsubmit   = 0    # number of popcon submissions - in case popcon import into UDD might be broken this remains 0
-        self.debtags        = []   # list of {'tag', 'value'} dictionary containing debtag information
-        self.screenshots    = []   # list of {'version', 'url'} dictionary containing screenshot information
-        self.icon           = None # URL of small screenshot icon
-        self.screenshot_url = None # URL to screenshots.debian.net
-        self.responsible    = None # E-Mail address of issuer of ITP or some person
-                                   # who volunteered to care for this program
-        self.filename       = None # Filename of package in the Debian pool
-        self.desc           = {}   # Prospective packages should have a description ...
-                                   # ... which could be copied to (or from if exists)
-                                   # WNPP bug and finally can be used for packaging
-        self.desc['en']     = {}   # An English description should be available in any case
-        self.experimental   = 0    # Set to 1 if package *only* in experimental but not in unstable/testing/stable
-        self.remark         = {}   # Optional remark for a package
-        self.dep_strength   = 0    # Type of Dependency (Depends, Recommends, Suggests, Experimental, New, Avoid, Ignore, WNPP
-
+        self.properties['Enhances'] = {}  # Dictionary Enhancing pkg name as key, Link to package information as value; empty in most cases
+                                          # because Enhances relations are quite seldom
+        self.properties['stable_testing_version'] = []  # (release, version) tuples where release is codename for stable and testing
+        self.vcs_found      = 0     # we need a flag to store the fact whether Vcs information of a package is in UDD
+        self.version        = []    # list of {'release', 'version', 'archs'} dictionary containing version and architecture information
+        self.outdated       = {}    # If not empty directory then release='upstream' and package is outdated
+        self.popcon         = {}    # dictionary containing vote and recnt values of popcon information
+        self.popconsubmit   = 0     # number of popcon submissions - in case popcon import into UDD might be broken this remains 0
+        self.debtags        = []    # list of {'tag', 'value'} dictionary containing debtag information
+        self.screenshots    = []    # list of {'version', 'url'} dictionary containing screenshot information
+        self.icon           = None  # URL of small screenshot icon
+        self.screenshot_url = None  # URL to screenshots.debian.net
+        self.responsible    = None  # E-Mail address of issuer of ITP or some person
+                                    # who volunteered to care for this program
+        self.filename       = None  # Filename of package in the Debian pool
+        self.desc           = {}    # Prospective packages should have a description ...
+                                    # ... which could be copied to (or from if exists)
+                                    # WNPP bug and finally can be used for packaging
+        self.desc['en']     = {}    # An English description should be available in any case
+        self.experimental   = 0     # Set to 1 if package *only* in experimental but not in unstable/testing/stable
+        self.remark         = {}    # Optional remark for a package
+        self.dep_strength   = 0     # Type of Dependency (Depends, Recommends, Suggests, Experimental, New, Avoid, Ignore, WNPP
 
     # sort these objects according to the package name
     def __cmp__(self, other):
         # Comparing with None object has to return something reasonable
-        if other == None:
+        if other is None:
             return -2
         # Sort according to package name
         return cmp(self.pkg, other.pkg)
@@ -774,11 +770,8 @@ class DependantPackage:
             ret += ", versions: "     + str(self.version)
         if self.desc:
             ret += ", desc: "         + str(self.desc)
-        for prop in list(self.properties.keys()):
-            try:
-                ret += ", %s: %s" % (prop, self.properties[prop])
-            except UnicodeEncodeError:            
-                ret += ", %s: <UnicodeEncodeError>" % (prop)
+        for prop in self.properties:
+            ret += ", %s: %s" % (prop, str(self.properties[prop]))
         try:
             ret += ", popcon = %i (%i)" % (self.popcon['vote'], self.popcon['recent'])
         except:
@@ -794,7 +787,8 @@ class DependantPackage:
         return ret
 
     def SetPublications(self, row):
-        for pub in ("year", "title", "authors", "doi", "pubmed", "url", "journal", "volume", "number", "pages", "eprint" ):
+        for pub in ("year", "title", "authors", "doi", "pubmed", "url",
+                    "journal", "volume", "number", "pages", "eprint"):
             if row[pub]:
                 if pub == "pages":
                     row[pub] = re.sub("--", "-", row[pub])
@@ -831,13 +825,20 @@ class DependantPackage:
                     if row[pub] != authors_string:
                         # emergency brake if algorithm fails to detect non-names like '1000 Genome Project Data Processing Subgroup'
                         if authors_string.count(',') > row[pub].count(' and '):
-                            logger.warning("Refuse to change Author string in %s: '%s'(%i) -> '%s'(%i)", \
-                                            self.pkg, to_unicode(row[pub]), row[pub].count(' and '), to_unicode(authors_string), authors_string.count(','))
+                            logger.warning("Refuse to change Author string in %s: '%s'(%i) -> '%s'(%i)"
+                                           % (self.pkg, row[pub], row[pub].count(' and '), authors_string, authors_string.count(',')))
                         else:
-                            logger.debug("Author string changed in %s: '%s' -> '%s'", self.pkg, to_unicode(row[pub]), to_unicode(authors_string))
+                            logger.debug("Author string changed in %s: '%s' -> '%s'"
+                                         % (self.pkg, row[pub], authors_string))
                             row[pub] = authors_string
                 if 'published' not in self.properties:
                     self.properties['published'] = {}
+                if pub in self.properties['published']:
+                    if self.properties['published'][pub] == row[pub]:
+                        rmpub.write("%s: %s: Published-%s: %s" % (self.taskname, self.pkg, pub, row[pub]))
+                        logger.info("%s/%s: Publication-%s = %s can be removed"  % (self.taskname, self.pkg, pub, row[pub]))
+                    else:
+                        logger.info("%s conflicting fields Publication-%s in tasks file with value '%s' and in UDD with value '%s'" % (self.pkg, pub, self.properties['published'][pub], row[pub]))
                 self.properties['published'][pub] = row[pub]
 
 
@@ -846,14 +847,14 @@ class Tasks:
     # This class concerns _all_ tasks of a Blend and is the most
     # complete source of information.  If only a single task
     # should be handled by a tool that uses blendtasktools
-    # probably the class  (see below) is
+    # probably the class TaskDependencies (see below) is
     # your friend
 
     def __init__(self, blendname):
 
         os.system("mkdir -p logs")
-        LOG_FILENAME = 'logs/'+blendname+'.log'
-        handler = logging.handlers.RotatingFileHandler(filename=LOG_FILENAME,mode='w')
+        LOG_FILENAME = os.path.join('logs', blendname + '.log')
+        handler = logging.handlers.RotatingFileHandler(filename=LOG_FILENAME, mode='w')
         formatter = logging.Formatter("%(levelname)s - %(filename)s (%(lineno)d): %(message)s")
         handler.setFormatter(formatter)
         logger.addHandler(handler)
@@ -866,24 +867,20 @@ class Tasks:
         self.data            = ReadConfig(blendname)
         self.blendname       = self.data['Blend']
         self._InitMetapackages()
-        self.tasks           = {} # Dictionary of TasksDependency objects
-        self.alldeps_in_main = [] # sorted string list of package names with all packages
-                                  # relevant for a Blend that are in main Debian (for use in DDTP)
-        self.alldeps_in_main_info = {} # complete dictionary with package information
-                                  # with all packages relevant for a Blend that are in
-                                  # main to easily feed DDTP translation into the structures
-                                  # -->
-                                  # self.alldeps_in_main = self.alldeps_in_main_info.keys().sort()
+        self.tasks           = {}  # Dictionary of TasksDependency objects
+        self.alldeps_in_main = []  # sorted string list of package names with all packages
+                                   # relevant for a Blend that are in main Debian (for use in DDTP)
+        self.alldeps_in_main_info = {}  # complete dictionary with package information
+                                   # with all packages relevant for a Blend that are in
+                                   # main to easily feed DDTP translation into the structures
+                                   # -->
+                                   # self.alldeps_in_main = self.alldeps_in_main_info.keys().sort()
 
     def _InitMetapackages(self):
         # sorted list of metapackage names
-        self.metapackagekeys = []
         query = "SELECT task FROM blends_tasks WHERE blend = '%s' ORDER BY task" % (self.blendname)
         _execute_udd_query(query)
-        if curs.rowcount > 0:
-            temp = curs.fetchall()
-            self.metapackagekeys = [t[0] for t in temp]
-
+        self.metapackagekeys = [t[0] for t in curs.fetchall()]
 
     def GetAllDependencies(self, source=0):
         # If we want to subscribe ddpo we need the source package names.
@@ -924,7 +921,6 @@ class Tasks:
             translations = None
             if pkgname in metapkg_translations:
                 translations = metapkg_translations[pkgname]
-        
             td.SetMetapackageInfo(pkgname, translations)
             logger.debug("Task : %s " % task)
             if td.GetTaskDependencies(source):
@@ -948,7 +944,7 @@ class Tasks:
         # datastructure
         ret = {}
         use_dependencystatus = GetDependencies2Use(dependencystatus, 'experimental')
-             
+
         for task in self.metapackagekeys:
             tdeps = self.tasks[task]
             list = []
@@ -982,7 +978,6 @@ class Tasks:
         self.alldeps_in_main.sort()
         return self.alldeps_in_main
 
-
     def GetTaskDescDict(self):
         # Return dictionary with description information of all tasks of a Blend
         return self.tasks
@@ -1001,29 +996,29 @@ class Tasks:
         # otherwise
         for task in self.metapackagekeys:
             tdeps = self.tasks[task]
-            for dependency in list(tdeps.dependencies.keys()):
+            for dependency in tdeps.dependencies:
                 for dep in tdeps.dependencies[dependency]:
                     if dep.properties['Enhances'] != {}:
                         logger.debug("Package %s is enhanced by:" % dep.pkg)
-                        for enh in sorted(list(dep.properties['Enhances'].keys())):
+                        for enh in dep.properties['Enhances']:
                             # seek for Enhances on same page
                             found = 0
-                            for seek_dependency in list(tdeps.dependencies.keys()):
+                            for seek_dependency in tdeps.dependencies:
                                 for enhdep in tdeps.dependencies[seek_dependency]:
                                     if enh == enhdep.pkg:
-                                        dep.properties['Enhances'][enh] = '#'+enh
-                                        found = 1 # found enhances in same task
+                                        dep.properties['Enhances'][enh] = '#' + enh
+                                        found = 1  # found enhances in same task
                                         break
-                            if found == 0: # If not found seek in other tasks
+                            if found == 0:  # If not found seek in other tasks
                                 for enhtask in self.metapackagekeys:
                                     if enhtask == task:
                                         continue
                                     enhtdeps = self.tasks[enhtask]
-                                    for seek_dependency in list(enhtdeps.dependencies.keys()):
+                                    for seek_dependency in enhtdeps.dependencies:
                                         for enhdep in enhtdeps.dependencies[seek_dependency]:
                                             if enh == enhdep.pkg:
                                                 dep.properties['Enhances'][enh] = './' + enhtask + '#' + enh
-                                                found = 1 # found enhances in other task
+                                                found = 1  # found enhances in other task
                                                 break
                                     if found == 1:
                                         break
@@ -1066,72 +1061,34 @@ class TaskDependencies:
         # This is NOT YET implemented
         self.metadepends     = None
 
-
     def SetMetapackageInfo(self, pkgname, ddtptranslations=None):
         # Gather information (specifically description translations if exists) about metapackage itself
         self.metapkg             = DependantPackage(self.blendname, self.task)
-	#print "self.metapkg", self.metapkg
         self.metapkg.pkg         = pkgname
         self.metapkg.source      = self.blendname
         if not ddtptranslations:
             return
         for lang in languages:
-            if ddtptranslations['description_'+lang]:
+            if ddtptranslations['description_' + lang]:
                 self.metapkg.desc[lang] = {}
+                short = ddtptranslations['description_' + lang]
+                self.metapkg.desc[lang]['short'] = MarkupString(short, self.metapkg.pkg, 'taskShortDesc', lang)
                 try:
-                    short = (ddtptranslations['description_'+lang])
-                    self.metapkg.desc[lang]['short'] = MarkupString(short, self.metapkg.pkg, 'taskShortDesc', lang)
-
-                except UnicodeEncodeError as err:
-                    logger.error("===> UnicodeDecodeError in metapackage %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['description_'+lang], err))
-                    short = ddtptranslations['description_'+lang]
-                    self.metapkg.desc[lang]['short'] = MarkupString(short, self.metapkg.pkg, 'taskShortDesc' + lang)
-
-                try:
-                    self.metapkg.desc[lang]['long'] = Markup(render_longdesc(ddtptranslations['long_description_'+lang].splitlines()))
-
-                except UnicodeDecodeError as err:
-                    logger.error("===> UnicodeDecodeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['long_description_'+lang], err))
-                    self.metapkg.desc[lang]['long'] = 'UnicodeDecodeError'
-
+                    self.metapkg.desc[lang]['long'] = Markup(render_longdesc(ddtptranslations['long_description_' + lang].splitlines()))
                 except AttributeError as err:
-                    logger.error("===> AttributeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['long_description_'+lang], err))
+                    logger.error("===> AttributeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s"
+                                 % (self.metapkg.pkg, lang, ddtptranslations['long_description_' + lang], err))
                     self.metapkg.desc[lang]['long'] = 'Missing long description'
 
-
     def _AppendDependency2List(self, dep):
         # Append dependency which was found in the tasks file if not Ignore / Avoid and
         # no dupplication in case of source depencencies
-        if dep == None:
+        if dep is None:
             return
         if dep.dep_strength == 'i' or dep.dep_strength == 'a':
             logger.debug("Ignore/Avoid package : %s" % dep.pkg)
             return
 
-	    # Solves UnicodeEncodeError because of characters present in the
-        # long description of the dependecies in 'unoficial','prospective' packages
-        # try:
-        #    dep.desc['en']['long'] = dep.desc['en']['long'].encode('ascii','xmlcharrefreplace')
-        # except:
-        #    pass
-
-        # Solves UnicodeEncodeError because of characters present in the
-        # authors name of the dependencies in 'wnpp' packages
-        # try:
-        #    dep.properties['published']['authors'] = dep.properties['published']['authors'].encode('ascii','xmlcharrefreplace')
-        # except:
-        #   pass
-
-        # Solves UnicodeEncodeError because of characters present in the title of 
-        # dependencies in blends debian-science, debian-pan
-        # try:
-        #    dep.properties['published']['title'] = dep.properties['published']['title'].encode('ascii','xmlcharrefreplace')
-        # except:
-        #    pass
-
         logger.debug("Appending package to list : %s" % dep.pkg)
         self.dependencies[dep.pkgstatus].append(dep)
         return
@@ -1183,7 +1140,7 @@ class TaskDependencies:
                 dep.version = [row['version'],]
             
             # if a package is released *only* in experimental decrease package status
-            if 'release' in row and row['release'] == 'experimental':
+            if row.get('release') == 'experimental':
                 dep.pkgstatus = 'experimental'
 
             if dep.dep_strength == 'i':
@@ -1210,17 +1167,21 @@ class TaskDependencies:
                     dep.properties['vcs-browser'] = BrowserFromVcsURL(dep.properties['vcs-type'], dep.properties['vcs-url'])
 
             # enhanced by
-            if 'enhanced' in row and row['enhanced'] != None:
+            if row.get('enhanced'):
                 for pkg in row['enhanced']:
                     dep.properties['Enhances'][pkg] = PKGURLMASK % pkg
 
             if 'releases' in row:
                 # reorder the 'releases' based on the release order (as per 'sort' field in releases table)
                 # also reorder the corresponding 'versions' and 'architectures' with the 'releases'
-                row['releases'], row['versions'], row['architectures'] = (list(r) for r in zip(*sorted(zip(row['releases'], row['versions'], row['architectures']),                                                                                                 key=lambda x: release_order[x[0]])))
-                for i in range(len(row['releases'])): 
-                    dep.version.append({'release':row['releases'][i], 'version': row['versions'][i], 'archs':row['architectures'][i]})
-
+                for rel, ver, arch in sorted(zip(row['releases'], row['versions'],
+                                                 row['architectures']),
+                                             key=lambda x: release_order[x[0]]):
+                    dep.version.append({
+                        'release': rel,
+                        'version': ver,
+                        'archs': arch
+                    })
             
             if 'vote' in row:
                 dep.popcon['vote']   = row['vote']
@@ -1230,68 +1191,60 @@ class TaskDependencies:
                 dep.popcon['insts'] = row['insts']
                            
             # Debtags as sorted list of dict fields
-            if 'debtags' in row:
-                if row['debtags']:
-                    if dep.debtags: # there is no reasonable way that debtags was set before - so something is wrong here and a warning should be issued
-                        logger.warning("Debtags for package '%s' was just set.  A duplicated result from database query is suspected.  Please check the result!" % dep.pkg)
-                    tagdict = {}
-                    taglist = []
-                    for debtag in row['debtags']:
-                        (tag,value) = debtag.split('::')
-                        if tag in tagdict:
-                            tagdict[tag] += ', ' + value
-                        else:
-                            tagdict[tag]  = value
-                            taglist.append(tag)
-                    if taglist:
-                        taglist.sort()
-                                
-                    for tag in taglist:
-                        dep.debtags.append({'tag':tag, 'value':tagdict[tag]})
+            if row.get('debtags'):
+                if dep.debtags:  # there is no reasonable way that debtags was set before - so something is wrong here and a warning should be issued
+                    logger.warning("Debtags for package '%s' was just set.  A duplicated result from database query is suspected.  Please check the result!" % dep.pkg)
+                tagdict = {}
+                taglist = []
+                for debtag in row['debtags']:
+                    (tag, value) = debtag.split('::')
+                    if tag in tagdict:
+                        tagdict[tag] += ', ' + value
+                    else:
+                        tagdict[tag]  = value
+                        taglist.append(tag)
+                if taglist:
+                    taglist.sort()
+                for tag in taglist:
+                    dep.debtags.append({'tag': tag, 'value': tagdict[tag]})
 
             # screenshots
-            if 'icon' in row:
-                if row['icon']:
-                    dep.icon           = row['icon'][0]                        
-                    dep.image          = row['image'][0]                        
-                    dep.screenshot_url = 'http://screenshots.debian.net/package/' + dep.pkg                        
-                    for i in range(1,len(row['image'])):
-                        dep.screenshots.append({'version':row['screenshot_versions'][i], 'url':row['image'][i]})
-                                
+            if row.get('icon'):
+                dep.icon           = row['icon'][0]
+                dep.image          = row['image'][0]
+                dep.screenshot_url = 'http://screenshots.debian.net/package/' + dep.pkg
+                for i in range(1, len(row['image'])):
+                    dep.screenshots.append({
+                        'version': row['screenshot_versions'][i],
+                        'url': row['image'][i]
+                    })
+
             # it might be that the new upstream goes to experimental - this should be ignored here
-            if 'unstable_parsed_version' in row:
-                if row['unstable_parsed_version']:
-                    dep.outdated['release']       = 'upstream'
-                    dep.outdated['version']       = row['unstable_upstream']
-                    dep.outdated['architectures'] = ''
+            if row.get('unstable_parsed_version'):
+                dep.outdated['release']       = 'upstream'
+                dep.outdated['version']       = row['unstable_upstream']
+                dep.outdated['architectures'] = ''
 
             if row['changed_by']:
-                try:
-                    changed = row['changed_by']
-                except TypeError as err:
-                    changed = None
-                    logger.warning("Encoding problem for last uploader of package '%s' in task %s (%s)" % (dep.pkg, dep.taskname, err))
+                changed = row['changed_by']
                 if changed:
-                    try:
-                        (_name, _url) = email.utils.parseaddr(changed)
-                        changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
-                        dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')                                
-                        dep.properties['last_uploader'] = (changed)                                  
-                        dep.properties['last_uploader_simple'] = ('%s <%s>' % (_name, _url))
-                    except UnicodeDecodeError as err:
-                        logger.error("Encoding problem for last uploader - assume same as maintainer for package %s (%s)", dep.pkg, err)
+                    (_name, _url) = email.utils.parseaddr(changed)
+                    changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
+                    dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')                                
+                    dep.properties['last_uploader'] = (changed)                                  
+                    dep.properties['last_uploader_simple'] = ('%s <%s>' % (_name, _url))
 
             # link to packages.debian.org search page to see overview about all
             # package versions in all releases
             dep.properties['pkg-url'] = PKGURLMASK % dep.pkg
 
             for l in languages:
-                if 'description_'+l in row and row['description_'+l] != None:
+                if row.get('description_' + l):
                     dep.desc[l] = {}
-                    dep.desc[l]['short'] = MarkupString((row['description_'+l]), dep.pkg, 'ShortDesc')
-                    if row['long_description_'+l]:
-                        dep.desc[l]['long']  = Markup(render_longdesc(row['long_description_'+l].splitlines()))
-            
+                    dep.desc[l]['short'] = MarkupString((row['description_' + l]),
+                                                        dep.pkg, 'ShortDesc')
+                    if row['long_description_' + l]:
+                        dep.desc[l]['long']  = Markup(render_longdesc(row['long_description_' + l].splitlines()))
             if 'short' not in dep.desc['en']:
                 logger.error("Dep has no English short description: %s", dep.pkg)
                 dep.desc['en']['short'] = "??? missing short description for package %s :-(" % dep.pkg
@@ -1300,13 +1253,10 @@ class TaskDependencies:
             dep.properties['maintainer'] = (row['maintainer'])
             dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, (_name))
 
-            try:
-                if row['edam_topics']:
-                    logger.info("Edam topics found for package %s: %s" % (dep.pkg, str(row['edam_topics'])))
-            except KeyError:
-                pass
+            if row.get('edam_topics'):
+                logger.info("Edam topics found for package %s: %s" % (dep.pkg, str(row['edam_topics'])))
 
-            if 'remark' in row and row['remark']:
+            if row.get('remark'):
                 (short, long) = SplitDescription(row['remark'])
                 dep.remark['short'] = MarkupString(short.encode('utf-8'), dep.pkg, 'RemarkShort')
                 dep.remark['long']  = MarkupString(long.encode('utf-8'),  dep.pkg, 'RemarkLong')
@@ -1418,13 +1368,13 @@ class TaskDependencies:
         # Genshi does not touch strings that are marked with "Markup()" - so just
         # mark the strings that are ready formatted
 
-        for dependency in list(self.dependencies.keys()):
-            for dep in self.dependencies[dependency]:
-                dep.responsible         = MarkupString(dep.responsible, dep.pkg, 'responsible')
+        for dependency in self.dependencies.values():
+            for dep in dependency:
+                dep.responsible = MarkupString(dep.responsible, dep.pkg, 'responsible')
                 if dep.desc['en'] != {}:
                     dep.desc['en']['short'] = MarkupString(dep.desc['en']['short'], dep.pkg, 'pkgShortDesc')
                     try:
-                        dep.desc['en']['long']  = MarkupString(dep.desc['en']['long'],  dep.pkg, 'pkgLongDesc')
+                        dep.desc['en']['long'] = MarkupString(dep.desc['en']['long'], dep.pkg, 'pkgLongDesc')
                     except KeyError:
                         logger.error("Dep has no English long description: %s", dep.pkg)
                         dep.desc['en']['long'] = "??? Missing long description for package %s" % dep.pkg
diff --git a/webtools_py3/bugs.py b/webtools_py3/bugs.py
deleted file mode 100755
index 892640d..0000000
--- a/webtools_py3/bugs.py
+++ /dev/null
@@ -1,484 +0,0 @@
-#!/usr/bin/python
-# Copyright 2013: Andreas Tille <tille at debian.org>
-# License: GPL
-
-#PORT=5441
-UDDPORT=5452
-PORT=UDDPORT
-DEFAULTPORT=5432
-
-SLOWQUERYREPORTLIMIT=30
-
-debug=0
-
-from sys import argv, stderr, exit
-import os
-import psycopg2
-import json
-import re
-import time
-from datetime import datetime
-from email.Utils import formatdate
-import email.Utils
-import gettext
-
-from genshi.template import TemplateLoader
-from genshi import Markup
-from genshi.template.eval import UndefinedError
-
-from blendsunicode   import to_unicode
-from blendstasktools import ReadConfig, RowDictionaries, CheckOrCreateOutputDir, SetFilePermissions
-from blendsmarkdown  import MarkupString
-
-###########################################################################################
-# Define several prepared statements to query UDD
-try:
-  conn = psycopg2.connect(host="localhost",port=PORT,user="guest",database="udd")
-except psycopg2.OperationalError as err:
-  try:
-    conn = psycopg2.connect("service=udd")
-  except psycopg2.OperationalError as err:
-    # logger not known at this state: logger.warning
-    print("Service=udd seems not to be installed on this host.\tMessage: %s" % (str(err)), file=stderr)
-    try:
-        conn = psycopg2.connect(host="localhost",port=DEFAULTPORT,user="guest",database="udd")
-    except psycopg2.OperationalError:
-	# Hmmm, I observed a really strange behaviour on one of my machines where connecting to
-	# localhost does not work but 127.0.0.1 works fine.  No idea why ... but this should
-	# do the trick for the moment
-	conn = psycopg2.connect(host="127.0.0.1",port=DEFAULTPORT,user="guest",database="udd")
-
-curs = conn.cursor()
-# uddlog = open('logs/uddquery.log', 'w')
-
-def _execute_udd_query(query):
-    try:
-        t = time.time()
-        curs.execute(query)
-        elapsed_time = time.time() - t
-        if elapsed_time > SLOWQUERYREPORTLIMIT: # report what query took longer than SLOWQUERYREPORTLIMIT seconds
-            print("Time: %s\nQuery: %s" % (str(elapsed_time), query))
-    except psycopg2.ProgrammingError as err:
-        print("Problem with query\n%s" % (query), file=stderr)
-        print(err, file=stderr)
-        exit(-1)
-    except psycopg2.DataError as err:
-        print("%s; query was\n%s" % (err, query), file=stderr)
-
-
-def main():
-
-    if len(argv) <= 1:
-        print("Usage: %s <Blend name>" % argv[0], file=stderr)
-        exit(-1)
-
-    blendname = argv[1]
-    config = ReadConfig(blendname)
-
-    # Metadata of packages that might have bugs
-    query = """PREPARE query_bug_packages (text) AS
-      SELECT distinct sources.source, task, CASE WHEN (tasks.dependency = 'd' OR tasks.dependency = 'r') AND component = 'main' AND experimental_flag > 0 THEN 'depends' ELSE 'suggests' END AS status,
-                      homepage, CASE WHEN vcs_browser IS NULL THEN '#' ELSE vcs_browser END AS vcs_browser, maintainer
-        FROM (
-          SELECT s.source, b.component, s.homepage, s.vcs_browser, s.maintainer, s.version, row_number() OVER (PARTITION BY s.source ORDER BY s.version DESC)
-            FROM blends_dependencies b
-            JOIN packages p ON p.package = b.package
-            JOIN bugs bu    ON bu.source = p.source
-            JOIN sources s  ON s.source  = p.source
-            WHERE blend = $1 AND b.distribution = 'debian'
-        ) sources
-        -- check status of dependency relation because only suggested packages are less important for bugs sentinel
-        LEFT OUTER JOIN (
-          SELECT source, task, dependency FROM (
-            SELECT p.source, b.task, bdp.dependency, row_number() OVER (PARTITION BY p.source, b.task ORDER BY bdp.priority)
-              FROM packages p
-              JOIN blends_dependencies b ON b.package = p.package
-              JOIN sources s ON p.source = s.source AND p.release = s.release
-              JOIN blends_dependencies_priorities bdp ON b.dependency = bdp.dependency
-              WHERE b.blend = $1
-          ) tmp
-          WHERE row_number = 1
-        ) tasks ON sources.source = tasks.source
-        -- Check, whether a package is in experimental only which makes it less important for bugs sentinel
-        LEFT OUTER JOIN (
-          SELECT source, MAX(sort) AS experimental_flag FROM sources s
-            JOIN releases r ON s.release = r.release
-          GROUP BY source
-        ) exp ON sources.source = exp.source
-        WHERE row_number = 1
-        ORDER BY source;
-    """
-    _execute_udd_query(query)
-
-    # Actual bugs in packages of Blends dependencies
-    query = """PREPARE query_bugs (text) AS
-      SELECT source, bu.id, title, severity, status, done_by, tags FROM (
-        SELECT distinct bu.source, bu.id, bu.title, bu.severity, bu.status, bu.done AS done_by
-          FROM blends_dependencies b
-          JOIN packages p ON p.package = b.package
-          JOIN bugs bu    ON bu.source = p.source
-          WHERE blend = $1 AND b.distribution = 'debian'
-        ) bu
-        LEFT OUTER JOIN (
-          SELECT id, array_agg(tag) AS tags FROM bugs_tags GROUP BY id
-        ) bt ON bu.id = bt.id
-        ORDER BY source, bu.id;
-    """
-    _execute_udd_query(query)
-    
-    # What tasks are involved
-    query = """PREPARE query_get_tasks (text) AS
-      SELECT task, title, description, long_description FROM blends_tasks WHERE blend = $1 ORDER BY task;
-    """
-    _execute_udd_query(query)
-
-    STATES = ['depends', 'suggests', 'done']
-    SEVERITIES = ('critical', 'grave', 'serious', 'important', 'normal', 'minor', 'wishlist')
-
-    # Sense of weight: We want to find a measure how much care a metapackage needs.
-    # So we build the weighted sums of bugs and define limits for the status
-    # The weights below are used for suggested packages.  In case a package has
-    # a stronger dependency (Depends, Recommends) the weight is multiplied by 3
-    WEIGHT = { 'critical'  : 10,
-               'grave'     : 10,
-               'serious'   : 10,
-               'important' :  5,
-               'normal'    :  3,
-               'minor'     :  1,
-               'wishlist'  :  0
-             }
-    BAD          = 100 # if weighted bug sum >= BAD, the meta package is in a bad shape
-                       # Dependent packages might have 3 or more 5 RC bugs
-    PASS         =  70 # this deserves a look - potentially two RC bugs in dependent packages
-    SATISFACTORY =  50 # consider looking at this
-    GOOD         =  30 # at least no RC bug in a dependent package
-    VERYGOOD     =  10 # nothing burning
-    EXCELLENT    =   5 # There is no real need to look at this meta package
-
-    # initialise bugs_data dictionary for all tasks
-    _execute_udd_query( "EXECUTE query_get_tasks('%s')" % blendname)
-    bugs_data = {}
-    if curs.rowcount > 0:
-        for t in RowDictionaries(curs):
-            task = t['task']
-            bugs_data[task] = {}
-            bugs_data[task]['title']		= t['title']
-            bugs_data[task]['description']	= t['description']
-            bugs_data[task]['long_description']	= to_unicode(t['long_description'])
-            bugs_data[task]['nopenbugs']	= 0
-            bugs_data[task]['ndonebugs']	= 0
-            bugs_data[task]['weighttask']	= 0
-            for status in STATES:
-                bugs_data[task][status + '_l']     = [] # enable sorting
-                bugs_data[task][status]            = {}
-                bugs_data[task][status]['sources'] = []
-                if status != 'done':
-                    bugs_data[task][status]['severitysummary'] = '' # string listing number of bugs in different severity / dependency classes
-                    bugs_data[task][status]['severities'] = {}
-                    for s in SEVERITIES:
-                        bugs_data[task][status]['severities'][s] = 0
-    else:
-        print("No tasks metadata received for Blend", blendname, file=stderr)
-        exit(1)
-
-    # Fetch bugs of all Blends dependencies and store them in a dictionary
-    _execute_udd_query( "EXECUTE query_bugs('%s')" % blendname)
-    bugs = {}
-    if curs.rowcount > 0:
-        for bug in RowDictionaries(curs):
-            if bug['source'] not in bugs:
-                bugs[bug['source']] = {}
-                bugs[bug['source']]['severities'] = {}
-                for s in SEVERITIES:
-                    bugs[bug['source']]['severities'][s] = 0
-                bugs[bug['source']]['nopenbugs'] = 0
-                bugs[bug['source']]['ndonebugs'] = 0
-                bugs[bug['source']]['open'] = []
-                bugs[bug['source']]['done'] = []
-            b = {}
-            for k in list(bug.keys()):
-                if k in ('source', 'status') :
-                    continue
-                if k == 'title':
-                    b[k] = to_unicode(bug[k])
-                elif k == 'tags':
-                    komma = ''
-                    b['tags'] = ''
-                    if bug[k]:
-                        for tag in bug[k]:
-                           b['tags'] += komma + tag
-                           komma      = ', '
-                else:
-                    b[k] = bug[k]
-            if bug['status'] == 'done':
-                bugs[bug['source']]['done'].append(b)
-                bugs[bug['source']]['ndonebugs'] += 1
-            else:
-                bugs[bug['source']]['open'].append(b)
-                bugs[bug['source']]['nopenbugs'] += 1
-                bugs[bug['source']]['severities'][bug['severity']] += 1
-    else:
-        print("No bug data received for Blend", blendname, file=stderr)
-        exit(1)
-
-    # Merge metadata of packages and bugs together in bugs_data dictionary, also do statistics about bugs
-    _execute_udd_query( "EXECUTE query_bug_packages('%s')" % blendname)
-    if curs.rowcount > 0:
-        for pkg in RowDictionaries(curs):
-                task = pkg['task']
-                sources = {}
-                sources['source']      = pkg['source']
-                sources['homepage']    = pkg['homepage']
-                sources['vcs_browser'] = pkg['vcs_browser']
-                (_name, _url) = email.Utils.parseaddr(pkg['maintainer'])
-                sources['maintainer_email'] = _url
-                sources['maintainer_name']  = to_unicode(_name)
-                sources['maintainer']  = MarkupString('<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name)), pkg['source'], 'maintainer')
-                if pkg['status'] == 'depends':
-                    sources['bugs'] = bugs[pkg['source']]['open']
-                    bugs_data[task][pkg['status']+'_l'].append(pkg['source'])
-                    for s in SEVERITIES:
-                        bugs_data[task][pkg['status']]['severities'][s] += bugs[pkg['source']]['severities'][s]
-                        bugs_data[task]['weighttask'] += 3 * WEIGHT[s] * bugs[pkg['source']]['severities'][s]
-                elif pkg['status'] == 'suggests':
-                    sources['bugs'] = bugs[pkg['source']]['open']
-                    bugs_data[task][pkg['status']+'_l'].append(pkg['source'])
-                    for s in SEVERITIES:
-                        bugs_data[task][pkg['status']]['severities'][s] += bugs[pkg['source']]['severities'][s]
-                        bugs_data[task]['weighttask'] += 1 * WEIGHT[s] * bugs[pkg['source']]['severities'][s]
-                else:
-                    print("%s: Wrong status %s in task %s for source %s" % (blendname, pkg['status'], task, pkg['source']), file=stderr)
-                    exit(1)
-                bugs_data[task][pkg['status']]['sources'].append(sources)
-                bugs_data[task]['nopenbugs'] += bugs[pkg['source']]['nopenbugs']
-                bugs_data[task]['ndonebugs'] += bugs[pkg['source']]['ndonebugs']
-                if bugs[pkg['source']]['done']:
-                    sources = {}
-                    sources['source']      = pkg['source']
-                    sources['homepage']    = pkg['homepage']
-                    sources['vcs_browser'] = pkg['vcs_browser']
-                    (_name, _url) = email.Utils.parseaddr(pkg['maintainer'])
-                    sources['maintainer_email'] = _url
-                    sources['maintainer_name']  = to_unicode(_name)
-                    sources['maintainer']  = MarkupString('<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name)), pkg['source'], 'maintainer')
-                    sources['bugs']        = bugs[pkg['source']]['done']
-                    bugs_data[task]['done_l'].append(pkg['source'])
-                    bugs_data[task]['done']['sources'].append(sources)
-    else:
-        print("No information about buggy packages received for Blend", blendname, file=stderr)
-        exit(1)
-
-    # Define directories used
-    current_dir  = os.path.dirname(__file__)
-    # locale_dir   = os.path.join(current_dir, 'locale')
-    template_dir = os.path.join(current_dir, 'templates')
-    
-    # initialize gensi
-    loader = TemplateLoader([template_dir], auto_reload=True)
-    
-    outputdir = CheckOrCreateOutputDir(config['outputdir'],'bugs') # FIXME: as long as we are not finished use different dir
-    if outputdir == None:
-            exit(-1)
-    
-    t = datetime.now()
-    
-    # Initialize i18n
-    domain = 'blends-webtools'
-    gettext.install(domain)
-
-    data={}
-    data['projectname'] = blendname
-    data['bugs_data']   = bugs_data
-    if 'advertising' in config and config['advertising'] != None:
-        # we have to remove the gettext _() call which was inserted into the config
-        # file to enable easy input for config file editors - but the call has to
-        # be made explicitely in the python code
-        advertising = re.sub('_\(\W(.+)\W\)', '\\1', config['advertising'])
-        # gettext needs to escape '"' thus we need to remove the escape character '\'
-        data['projectadvertising'] = Markup(to_unicode(re.sub('\\\\"', '"', advertising)))
-    else:
-        data['projectadvertising'] = None
-
-    data['summary']           = to_unicode(_('Summary'))
-    data['idxsummary']        = to_unicode(_("""A %sDebian Pure Blend%s is a Debian internal project which assembles
-a set of packages that might help users to solve certain tasks of their work.  The list on
-the right shows the tasks of %s.""" ) \
-                                      % ('<a href="http://blends.alioth.debian.org/blends/">', '</a>', data['projectname']))
-    data['idxsummary']        = Markup(to_unicode(data['idxsummary']))
-
-    t = datetime.now()
-    data['lang']              = 'en'
-    data['othertasks']        = _("Links to other tasks")
-    data['taskslink']         = _("Tasks")
-    data['bugslink']          = _("Tasks overview")
-    data['legend']            = _("Legend")
-    data['bugsoftask']        = _("Bugs of task")
-    data['totalbugs']         = _("Total bugs")
-    data['openbugs']          = _("Open bugs")
-    data['fixedbugs']         = _("Fixed bugs")
-    data['summary']           = _('Summary')
-    data['bugssummary']       = _("""A %sDebian Pure Blend%s is a Debian internal project which assembles
-    a set of packages that might help users to solve certain tasks of their work.  This page should be helpful
-    to track down the bugs of packages that are interesting for the %s project to enable developers a quick
-    overview about possible problems.""" ) \
-                                          % ('<a href="http://blends.alioth.debian.org/blends/">', '</a>', data['projectname'])
-    data['bugssummary']        = Markup(data['bugssummary'])
-    data['gtstrBugsPage']     = _("Bugs page")
-    data['gtstrListOfBugspages'] = _("This is a list of metapackages.  The links are leading to the respective bugs page.")
-    data['timestamp']   = formatdate(time.mktime(t.timetuple()))
-    data['weightexplanation'] = _("""To estimate the overall status of the packages in the dependencies of
-    a metapackage a weighted severity is calculated.  Done bugs are ignored and bugs in dependent and
-    recommended packages are weighted by factor three compared to suggested packages.  Release critical
-    bugs have a much larger weight than important, while the contribution of normal bugs is even smaller
-    and minor bugs have a very small weight.  Wishlist bugs are ignored in this calculation.  The resulting
-    sum is compared to some boundaries to find a verbal form.  The actual numbers need some adjustment
-    to make real sense - this evaluation method is in testing phase.""")
-    data['weightdetails']     = _("The severities of bugs are weighted as follows")
-    
-    data['assessments']     = [ (EXCELLENT,    'excellent'),
-                                (VERYGOOD,     'verygood'),
-                                (GOOD,         'good'),
-                                (SATISFACTORY, 'satisfactory'),
-                                (PASS,         'pass'),
-                                (BAD,          'bad')
-                              ]
-    
-    for task in bugs_data:
-        if bugs_data[task]['weighttask'] < data['assessments'][0][0]:
-            bugs_data[task]['weightedsev']   = _('Task is in excellent shape')
-            bugs_data[task]['weightedclass'] = data['assessments'][0][1]
-        elif bugs_data[task]['weighttask'] < data['assessments'][1][0]:
-            bugs_data[task]['weightedsev']   = _('Task is in very good shape')
-            bugs_data[task]['weightedclass'] = data['assessments'][1][1]
-        elif bugs_data[task]['weighttask'] < data['assessments'][2][0]:
-            bugs_data[task]['weightedsev']   = _('Task is in good shape')
-            bugs_data[task]['weightedclass'] = data['assessments'][2][1]
-        elif bugs_data[task]['weighttask'] < data['assessments'][3][0]:
-            bugs_data[task]['weightedsev']   = _('Consider looking into bugs of this task')
-            bugs_data[task]['weightedclass'] = data['assessments'][3][1]
-        elif bugs_data[task]['weighttask'] < data['assessments'][4][0]:
-            bugs_data[task]['weightedsev']   = _('Looking into bugs of this task is recommended')
-            bugs_data[task]['weightedclass'] = data['assessments'][4][1]
-        else:
-            bugs_data[task]['weightedsev']   = _('Immediately looking into bugs of the dependencies of this task is advised')
-            bugs_data[task]['weightedclass'] = data['assessments'][5][1]
-        bugs_data[task]['weightedsev']   +=  ' (%i)' % bugs_data[task]['weighttask']
-
-    # Debuging output in JSON file
-    if debug > 0:
-        f = open(blendname+'_bugs.json', 'w')
-        if debug > 1:
-            for task in bugs_data:
-                print("*** %s ***" % task, file=f)
-                for status in STATES:
-                    if status in bugs_data[task]:
-                        print(status, file=f)
-                        print(json.dumps(bugs_data[task][status]), file=f)
-                print(file=f)
-        print(json.dumps(bugs_data), file=f)
-        f.close()
-        SetFilePermissions(blendname+'_bugs.json')
-
-    nbugs           = {}
-    ndone           = {}
-    buglist         = {}
-    weightedsev     = {} # verbal interpretation of weighted bugs
-    weightedclass   = {} # CSS class according bug weight
-    weighttask      = {} # weighted severity as number per task
-    
-    wsev = 0  # sumarise weighted severities
-    for task in bugs_data:
-    	for status in STATES:
-            if status != 'done':
-                komma  = ''
-                for s in SEVERITIES:
-    		     if bugs_data[task][status]['severities'][s] != 0:
-    		        bugs_data[task][status]['severitysummary'] += '%s %i %s' % (komma, bugs_data[task][status]['severities'][s], s)
-    		        komma = ','
-    	if   wsev < data['assessments'][0][0]:
-    		weightedsev[task]   = _('Metapackage is in excellent shape')
-    		weightedclass[task] = data['assessments'][0][1]
-    	elif wsev < data['assessments'][1][0]:
-    		weightedsev[task]   = _('Metapackage is in very good shape')
-    		weightedclass[task] = data['assessments'][1][1]
-    	elif wsev < data['assessments'][2][0]:
-    		weightedsev[task]   = _('Metapackage is in good shape')
-    		weightedclass[task] = data['assessments'][2][1]
-    	elif wsev < data['assessments'][3][0]:
-    		weightedsev[task]   = _('Consider looking into bugs of this metapackage')
-    		weightedclass[task] = data['assessments'][3][1]
-    	elif wsev < data['assessments'][4][0]:
-    		weightedsev[task]   = _('Looking into bugs of this metapackage is recommended')
-    		weightedclass[task] = data['assessments'][4][1]
-    	else:
-    		weightedsev[task]   = _('Immediately looking into bugs of the dependencies of this metapackage is advised')
-    		weightedclass[task] = data['assessments'][5][1]
-    	weightedsev[task] += ' (%i)' % wsev
-    	weighttask[task]   = wsev
-    
-    data['headings'] = {'dependent' : _('Open bugs in dependent packages'),
-                        'suggested' : _('Open bugs in suggested packages'),
-                        'done'      : _('Done bugs')
-                       }
-    data['nobugs']   = {'dependent' : _('No open bugs in dependent packages'),
-                        'suggested' : _('No open bugs in suggested packages'),
-                        'done'      : _('No done bugs')
-                       }
-    data['cssclass'] = {'dependent' : 'bugsdependent',
-                        'suggested' : 'bugssuggested',
-                        'done'      : 'bugsdone'
-                       }
-    # FIXME: just always use 'depends' or 'dependent' etc.  This translation is just to be able to compare with old output
-    data['category'] = {'depends'   : 'dependent',
-                        'suggests'  : 'suggested',
-                        'done'      : 'done'
-                       }
-    
-    data['nbugs']           = nbugs
-    data['ndone']           = ndone
-    data['weight']          = WEIGHT
-    data['severities']      = SEVERITIES
-    data['states']          = STATES
-    data['nohomepage']      = _('Homepage not available')
-    data['novcsbrowser']    = _('Not maintained in Vcs')
-    data['vcslocation']     = _('Vcs')
-    
-    data['weighttask']      = weighttask
-    data['weightedclass']   = weightedclass
-
-    for key in ('css', 'homepage', 'projecturl', 'projectname', 'logourl', 'ubuntuhome', 'projectubuntu'):
-        data[key] = config[key]
-    
-    for task in bugs_data:
-    	data['task']            = task
-    	#data['buglist']         = buglist[task]
-    	#data['weightedsev']     = weightedsev[task]
-    	#data['severitystat']    = severitystat[task]
-    
-    	template = loader.load('bugs.xhtml')
-    	f = open(outputdir + '/' + task + '.html', 'w')
-    	try:
-            print(template.generate(**data).render('xhtml'), file=f)
-        except UnicodeDecodeError as err:
-            fd = open('debug_'+blendname+'_bugs.json', 'w')
-            print(json.dumps(bugs_data[task]), file=fd)
-            fd.close()
-            SetFilePermissions(outputdir + '/' + task + '.html')
-            print(err)
-    
-    	f.close()
-        SetFilePermissions(outputdir + '/' + task + '.html')
-    
-    template = loader.load('bugs_idx.xhtml')
-    outputfile = outputdir + '/index.html'
-    try:
-    	os.unlink(outputfile)
-    except: # simply continue if file does not exist
-    	pass
-    f = open(outputfile, 'w')
-    print(template.generate(**data).render('xhtml'), file=f)
-    f.close()
-    SetFilePermissions(outputfile)
-
-if __name__ == '__main__':
-    main()
diff --git a/webtools_py3/bugs.py b/webtools_py3/bugs.py
new file mode 120000
index 0000000..bee0c3b
--- /dev/null
+++ b/webtools_py3/bugs.py
@@ -0,0 +1 @@
+../webtools/bugs.py
\ No newline at end of file
diff --git a/webtools_py3/ddpo_register.py b/webtools_py3/ddpo_register.py
deleted file mode 100755
index 322d2b9..0000000
--- a/webtools_py3/ddpo_register.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/python
-# create set of subscribe statements for ddtp for those packages
-# that are listed in the Dependencies in the tasks files of a project
-# use ddpo_register to finally send the mail
-
-from sys import argv, exit, stderr
-
-from blendstasktools import Tasks
-
-if len(argv) <= 1:
-	print("Usage: %s <Blend name>\n       The <Blend name> needs a matching config file webconf/<Blend name>.conf"\
-                        % argv[0], file=stderr)
-	exit(-1)
-
-tasks  = Tasks(argv[1])
-if tasks.data['pkglist'] == '':
-	print("Config file webconf/%s.conf is lacking pkglist field." % (argv[1]), file=stderr)
-	exit(-1)
-tasks.GetAllDependencies(source=1)
-packages = tasks.GetNamesOnlyDict(dependencystatus=['official_high', 'official_low', 'non-free', 'experimental'])
-
-print("user", tasks.data['pkglist'])
-for task in list(packages.keys()):
-	for pkg in packages[task]:
-		print("subscribe %s %s" % ( pkg, task ))
-print("thanks")
-
-
-# Perhaps we should also send a mail to pts at qa.debian.org
-#     keyword %(pkg) %(list) = bts bts-control upload-source katie-other summary default cvs ddtp derivatives contact
-# to make sure the mailing list gets full information about packages including new upstream etc
-# see http://www.debian.org/doc/manuals/developers-reference/resources.html#pkg-tracking-system
diff --git a/webtools_py3/ddpo_register.py b/webtools_py3/ddpo_register.py
new file mode 120000
index 0000000..c508582
--- /dev/null
+++ b/webtools_py3/ddpo_register.py
@@ -0,0 +1 @@
+../webtools/ddpo_register.py
\ No newline at end of file
diff --git a/webtools_py3/new_upstream.py b/webtools_py3/new_upstream.py
deleted file mode 100755
index 37795bf..0000000
--- a/webtools_py3/new_upstream.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Check for packages of Blend which are not up to date and send
-# E-Mail to Blend developer list
-
-from sys import argv, exit, stderr
-
-from blendstasktools import Tasks
-from blendsunicode import to_unicode
-
-if len(argv) <= 1:
-	print("Usage: %s <Blend name>\n       The <Blend name> needs a matching config file webconf/<Blend name>.conf"\
-                        % argv[0], file=stderr)
-	exit(-1)
-
-tasks  = Tasks(argv[1])
-if tasks.data['pkglist'] == '':
-	print("Config file webconf/%s.conf is lacking pkglist field." % (argv[1]), file=stderr)
-	exit(-1)
-tasks.GetAllDependencies(source=1)
-packages = tasks.GetUpdatablePackages(dependencystatus=['official_high', 'official_low', 'non-free', 'experimental'])
-
-for task in list(packages.keys()):
-	print("Updatable packages in Task", task)
-	for pkg_v_o in packages[task]:
-		printstring = "\t%s:\n\t\tHighest version in Debian is %s\n\t\tUpstream has %s\n\t\tMaintainer is %s" % ( pkg_v_o[0] )
-		if pkg_v_o[1]:
-			printstring = printstring + "\n\t\tLast uploader was " + pkg_v_o[1]
-		try:
-			print(printstring)
-		except UnicodeEncodeError as err:
-			print("\t%s: (Problem printing UTF-8 data)\n\t\tHighest version in Debian is %s\n\t\tUpstream has %s\n" % \
-			    ( pkg_v_o[0][0], pkg_v_o[0][1], pkg_v_o[0][2] ))
-			try:
-				print("type(pkg_v_o[0][3]) =", type(pkg_v_o[0][3]))
-				print("type(pkg_v_o[1]) =", type(pkg_v_o[1]))
-				# maintainerfield of package gnudatalanguage which is
-				#   Gürkan Sengün <gurkan at phys.ethz.ch>
-				# breaks print for some reason I do not understand because everything should be UTF-8
-				# error is: 'ascii' codec can't encode character u'\xfc' in position 104: ordinal not in range(128)
-				# just stick to the e-mail address to do something useful ...
-				print_wo_maintainer = pkg_v_o[0][3][0] # re.sub('^.+(<.+ at .+>.*)', '\\1', pkg_v_o[0][3])
-				print("\t\tMaintainer is ... %s" % ( print_wo_maintainer ))
-				# print print_wo_maintainer[0:80]
-				if pkg_v_o[1]:
-					printstring = printstring + "\n\t\tLast uploader was " + pkg_v_o[1]
-			except:
-				pass
-
-# Perhaps we should also send a mail to pts at qa.debian.org
-#     keyword %(pkg) %(list) = bts bts-control upload-source katie-other summary default cvs ddtp derivatives contact
-# to make sure the mailing list gets full information about packages including new upstream etc
-# see http://www.debian.org/doc/manuals/developers-reference/resources.html#pkg-tracking-system
diff --git a/webtools_py3/new_upstream.py b/webtools_py3/new_upstream.py
new file mode 120000
index 0000000..ed9d2c9
--- /dev/null
+++ b/webtools_py3/new_upstream.py
@@ -0,0 +1 @@
+../webtools/new_upstream.py
\ No newline at end of file
diff --git a/webtools_py3/tasks_udd.py b/webtools_py3/tasks_udd.py
index f49791b..cb234c3 100755
--- a/webtools_py3/tasks_udd.py
+++ b/webtools_py3/tasks_udd.py
@@ -3,7 +3,9 @@
 import apt
 import apt_pkg
 import apt_inst
+import codecs
 
+import json
 from sys import argv, exit, stderr
 import os
 import re
@@ -18,7 +20,6 @@ from genshi import Markup
 from genshi.template.eval import UndefinedError
 
 from blendstasktools_udd import Tasks, GetDependencies2Use, pkgstatus, pkgstatus_sortedkeys, UnlockBlendsTools, CheckOrCreateOutputDir, SetFilePermissions
-from blendsunicode   import to_unicode
 from blendslanguages import languages, language_dict
 
 if len(argv) <= 1:
@@ -31,7 +32,6 @@ if len(argv) <= 1:
 tasks    = Tasks(argv[1])
 tasks.GetAllDependencies()
 packages = tasks.GetNamesOnlyDict()
-# print ("pacakges : \n", packages)
 tasks.GetAllDependentPackagesOfBlend()
 tasks.MarkupPreformatedStringsBlend()
 
@@ -44,7 +44,7 @@ data['PackageByName']    = tasks.PackageByName
 data['pkgcount']         = len(data['PackageByName'])
 try:
     data['popconsubmit']     = tasks.popconsubmit
-except: 
+except:
     data['popconsubmit']     = 'unknown'
 data['languages']        = languages
 data['language_dict']    = language_dict
@@ -59,22 +59,30 @@ template_dir = os.path.join(current_dir, 'templates')
 # Initialize i18n
 domain = 'blends-webtools'
 gettext.install(domain)
-l10nstring = {}
-for lang in languages:
-    l10nstring[lang] = gettext.translation(domain, locale_dir, languages=[lang], fallback = True)
-
+l10nstring = dict((lang, gettext.translation(domain, locale_dir,
+                                             languages=[lang], fallback=True))
+                  for lang in languages)
 # Translated strings regarding the categorising of dependencies need to
 # be translated and because I did not found a working solution to get
 # gettext working with genshi all are collected here even if the additional
 # attributes to blendstasktools.pkgstatus rather should go blendstasktools.py
 
 # initialize gensi
-loader = TemplateLoader([template_dir], auto_reload=True)
+loader = TemplateLoader([template_dir], auto_reload=True, default_encoding="utf-8")
 
 outputdir = CheckOrCreateOutputDir(tasks.data['outputdir'],'tasks_udd')
-if outputdir == None:
+if outputdir is None:
     exit(-1)
 
+
+class DefaultEncoder(json.JSONEncoder):
+    def default(self, obj):
+        return obj.__dict__
+
+with open(os.path.join(outputdir, 'tasks.json'), 'w') as fp:
+    json.dump(data, fp, indent=4, sort_keys=True, cls=DefaultEncoder)
+
+t = datetime.now()
 htaccess = os.path.join(outputdir, 'htaccess')
 htafp = open(htaccess, 'w')
 htafp.write("DirectoryIndex index index.html\nOptions +MultiViews\n")
@@ -96,7 +104,7 @@ for lang in languages:
         # data['projectadvertising'] = _(advertising) # Hopefully translation will work this way ...
         # Genshi needs explicite information that it is dealing with an UTF-8 string which should not be changed
         # advertising = _(advertising)
-        data['projectadvertising'] = Markup((advertising))
+        data['projectadvertising'] = Markup(advertising)
     else:
         data['projectadvertising'] = None
 
@@ -106,7 +114,7 @@ for lang in languages:
     # Create the index page
     # Make sure that strings that need to be rendered as they are because they might
     # contain several html tags will be marked correctly
-    VERBATIM=('projectadvertising', )
+    VERBATIM = ('projectadvertising', )
     for verbatim in VERBATIM:
         if verbatim in data and data[verbatim] != None:
             data[verbatim] = Markup(data[verbatim])
@@ -114,7 +122,7 @@ for lang in languages:
     template = loader.load('tasks_idx.xhtml')
 
     outputfile = os.path.join(outputdir, 'index.{0}.html'.format(lang))
-    with open(outputfile, 'w') as f:
+    with codecs.open(outputfile, 'w', 'utf-8') as f:
         try:
             f.write(template.generate(**data).render('xhtml'))
         except UnicodeDecodeError as err:
@@ -126,7 +134,7 @@ for lang in languages:
         template = loader.load('%s_idx.xhtml' % tasks.blendname)
         outputfile = os.path.join(tasks.data['outputdir'],
                                   'index.{0}.html'.format(lang))
-        with open(outputfile, 'w') as f:
+        with codecs.open(outputfile, "w", "utf-8") as f:
             try:
                 f.write(template.generate(**data).render('xhtml'))
             except UnicodeDecodeError as err:
@@ -146,8 +154,8 @@ for lang in languages:
 
     # I18n for headlines, link description, maintainer
     for status in use_dependencystatus:
-        data['headline'][status]   = Markup(pkgstatus[status]['headline'])
-        data['pdolinkname'][status]= Markup((pkgstatus[status]['pdolinkname']))
+        data['headline'][status]    = Markup(pkgstatus[status]['headline'])
+        data['pdolinkname'][status] = Markup(pkgstatus[status]['pdolinkname'])
         if pkgstatus[status]['order'] <= pkgstatus['experimental']['order']:
             data['maintainer'][status] = ('Maintainer')
         else:
@@ -166,9 +174,8 @@ for lang in languages:
                     # Just print an error message if there are packages with
                     # unknown status but only once and not per language
                     for dep in tasks.tasks[task].dependencies[status]:
-                        stderr.write("Warning: Dependency with unknown status: "
-                                     + dep.pkg)
-
+                        stderr.write("Warning: Dependency with unknown status: %s\n"
+                                     % dep.pkg)
         # Keep the Project lists per task to be able to loop over all tasks in plain package list
         data['projects'][task] = tasks.tasks[task].dependencies
 
@@ -178,7 +185,7 @@ for lang in languages:
             template = loader.load('tasks.xhtml')
 
         outputfile = os.path.join(outputdir, '{0}.{1}.html'.format(task, lang))
-        with open(outputfile, "w") as f:
+        with codecs.open(outputfile, "w", "utf-8") as f:
             try:
 # We had to mask ampersand ('&') from Genshi but even if the browser shows
 # the correct character packages.debian.org gets confused - so turn it back here
@@ -203,7 +210,7 @@ for lang in languages:
         data['projectsintasks']     = tasks.tasks[task].dependencies
 
     outputfile = os.path.join(outputdir, 'packagelist.{0}.html'.format(lang))
-    with open(outputfile, 'w') as f:
+    with codecs.open(outputfile, 'w', 'utf-8') as f:
         try:
             f.write(template.generate(**data).render('xhtml'))
         except UnicodeDecodeError as err:
@@ -221,6 +228,8 @@ for lang in languages:
     htafp.write(language_dict[lang]['htaccess'] + ' ')
 htafp.write("\n")
 
+htafp.write("LanguagePriority {0}\n".format(
+    ' '.join(language_dict[lang]['htaccess']) for lang in languages))
 htafp.close()
 SetFilePermissions(htaccess)
 
diff --git a/webtools_py3/thermometer.py b/webtools_py3/thermometer.py
deleted file mode 100755
index 9def0fe..0000000
--- a/webtools_py3/thermometer.py
+++ /dev/null
@@ -1,426 +0,0 @@
-#!/usr/bin/python
-# Copyright 2013: Andreas Tille <tille at debian.org>
-# License: GPL
-
-#PORT=5441
-UDDPORT=5452
-PORT=UDDPORT
-DEFAULTPORT=5432
-
-from sys import argv, stderr, exit
-import os
-import psycopg2
-import json
-import re
-import time
-from datetime import datetime
-from email.Utils import formatdate
-import gettext
-
-from genshi.template import TemplateLoader
-from genshi import Markup
-from genshi.template.eval import UndefinedError
-
-from blendsunicode   import to_unicode
-from blendstasktools import ReadConfig, RowDictionaries, CheckOrCreateOutputDir, SetFilePermissions
-
-###########################################################################################
-# Define several prepared statements to query UDD
-try:
-  conn = psycopg2.connect(host="localhost",port=PORT,user="guest",database="udd")
-except psycopg2.OperationalError as err:
-  try:
-    conn = psycopg2.connect("service=udd")
-  except psycopg2.OperationalError as err:
-    # logger not known at this state: logger.warning
-    print("Service=udd seems not to be installed on this host.\tMessage: %s" % (str(err)), file=stderr)
-    try:
-        conn = psycopg2.connect(host="localhost",port=DEFAULTPORT,user="guest",database="udd")
-    except psycopg2.OperationalError:
-	# Hmmm, I observed a really strange behaviour on one of my machines where connecting to
-	# localhost does not work but 127.0.0.1 works fine.  No idea why ... but this should
-	# do the trick for the moment
-	conn = psycopg2.connect(host="127.0.0.1",port=DEFAULTPORT,user="guest",database="udd")
-
-curs = conn.cursor()
-# uddlog = open('logs/uddquery.log', 'w')
-
-def _execute_udd_query(query):
-    try:
-        curs.execute(query)
-    except psycopg2.ProgrammingError as err:
-        print("Problem with query\n%s" % (query), file=stderr)
-        print(err, file=stderr)
-        exit(-1)
-    except psycopg2.DataError as err:
-        print("%s; query was\n%s" % (err, query), file=stderr)
-
-
-def main():
-
-    if len(argv) <= 1:
-        print("Usage: %s <Blend name>" % argv[0], file=stderr)
-        exit(-1)
-
-    blendname = argv[1]
-    config = ReadConfig(blendname)
-
-    # obtain release names automatically
-    query = "SELECT release FROM ubuntu_packages WHERE NOT release LIKE '%-%' GROUP BY release ORDER BY release DESC LIMIT 3"
-    _execute_udd_query(query)
-    if curs.rowcount > 0:
-        releasenames = curs.fetchall()
-        latestubuntu = releasenames[0][0]
-        ubuntuprev1  = releasenames[1][0]
-        ubuntuprev2  = releasenames[2][0]
-    else:
-        print("Failed to obtain Ubuntu release names.", file=stderr)
-        exit(1)
-
-    query = """PREPARE query_thermometer (text) AS
-     SELECT b.source,
-           stable.version AS stable,
-           testing.version AS testing,
-           unstable.version AS unstable,
-           stable_bpo.version AS "stable_bpo",
-           experimental.version AS experimental,
-           new.version AS "NEW",
-           unreleased.version AS "UNRELEASED",
-           ubuntuprev2.version AS ubuntuprev2,
-           ubuntuprev1.version AS ubuntuprev1,
-           latestubuntu.version AS latestubuntu,
-           d.upstream_version AS upstream,
-           -- map values from former dehs to upstream table values
-           CASE WHEN d.status IS NULL OR
-                     d.status = '' OR
-                     d.status = 'error'					THEN 'none'
-                WHEN d.status = 'Newer version available'		THEN 'outdated'
-                WHEN d.status = 'up to date'				THEN 'upToDate'
-                WHEN d.status = 'Debian version newer than remote site'	THEN 'newer-in-debian'
-                ELSE 'unknown' -- should not occure!
-           END AS upstreamstatus,
-           homepage,
-           wnpp,
-           is_in_debian,
-           vcs_browser,
-           tasks.tasks,
-           CASE WHEN stable.version >= unstable.version THEN 'upToDate'
-                WHEN stable.version <  unstable.version THEN 'debianOutOfDate'
-                WHEN stable.version IS NOT NULL AND unstable.version IS NULL THEN 'obsolete'
-                WHEN stable.version IS NULL AND testing.version IS NULL AND unstable.version IS NULL AND new.version IS NULL THEN 'unpackaged'
-                WHEN new.version IS NULL AND (experimental.version IS NOT NULL OR unreleased.version IS NOT NULL) THEN 'workInProgress'
-                WHEN new.version IS NOT NULL THEN 'new'
-                ELSE 'unknown' END AS debianstatus,
-           CASE WHEN latestubuntu.version >= unstable.version THEN 'upToDate'
-                WHEN latestubuntu.version <  unstable.version THEN 'ubuntuOutOfDate'
-                WHEN stable.version IS NOT NULL AND unstable.version IS NULL THEN 'obsolete'
-                WHEN stable.version IS NULL AND testing.version IS NULL AND unstable.version IS NULL THEN 'unpackaged'
-                WHEN experimental.version IS NOT NULL OR unreleased.version IS NOT NULL THEN 'workInProgress'
-                ELSE 'unknown' END AS ubuntustatus
-     FROM (
-      SELECT DISTINCT p.source, '' AS wnpp FROM packages p
-      JOIN blends_dependencies bd ON bd.package = p.package
-      JOIN releases r ON p.release = r.release
-      WHERE bd.blend = $1 AND
-            (r.sort >= (SELECT sort FROM releases WHERE role = 'stable') OR r.sort = 0) -- forget older releases than stable but allow experimental
-      UNION
-      SELECT DISTINCT n.source, '' AS wnpp FROM new_packages n
-      JOIN blends_dependencies bd ON bd.package = n.package
-      WHERE bd.blend = $1 AND bd.distribution = 'new'
-      UNION
-      SELECT DISTINCT u.source, '' AS wnpp FROM ubuntu_packages u
-      JOIN blends_dependencies bd ON bd.package = u.package
-      WHERE bd.blend = $1 AND bd.distribution = 'ubuntu'
-      UNION
-      SELECT DISTINCT pr.source, CASE WHEN wnpp!=0 THEN CAST(pr.wnpp AS text) ELSE '' END AS wnpp FROM blends_prospectivepackages pr
-      JOIN blends_dependencies bd ON bd.package = pr.package
-      WHERE bd.blend = $1 AND bd.distribution = 'prospective'
-     ) b
-     LEFT OUTER JOIN ( SELECT source, homepage FROM (
-      SELECT source, homepage, row_number() OVER (PARTITION BY source ORDER BY version DESC) FROM (
-       SELECT DISTINCT p.source, p.homepage, p.version FROM packages p
-         JOIN blends_dependencies bd ON bd.package = p.package
-         JOIN releases r ON p.release = r.release
-         WHERE bd.blend = $1 AND
-            (r.sort >= (SELECT sort FROM releases WHERE role = 'stable') OR r.sort = 0) -- forget older releases than stable but allow experimental
-       UNION
-       SELECT DISTINCT n.source, n.homepage, n.version FROM new_packages n
-         JOIN blends_dependencies bd ON bd.package = n.package
-         WHERE bd.blend = $1 AND bd.distribution = 'new'
-       UNION
-       SELECT DISTINCT u.source, u.homepage, u.version FROM ubuntu_packages u
-         JOIN blends_dependencies bd ON bd.package = u.package
-         WHERE bd.blend = $1 AND bd.distribution = 'ubuntu'
-         UNION
-       SELECT DISTINCT pr.source, pr.homepage, pr.chlog_version as version FROM blends_prospectivepackages pr
-         JOIN blends_dependencies bd ON bd.package = pr.package
-         WHERE bd.blend = $1 AND bd.distribution = 'prospective'
-       ) hpversion
-      GROUP BY source, homepage, version
-      ) tmp
-      WHERE row_number = 1
-     ) homepage ON b.source = homepage.source
-     LEFT OUTER JOIN ( SELECT source, vcs_browser FROM (
-      SELECT source, vcs_browser, row_number() OVER (PARTITION BY source ORDER BY version DESC) FROM (
-       SELECT DISTINCT p.source, s.vcs_browser, p.version FROM packages p
-         JOIN blends_dependencies bd ON bd.package = p.package
-         JOIN sources s ON p.source = s.source AND p.release = s.release
-         JOIN releases r ON p.release = r.release
-         WHERE bd.blend = $1 AND
-            (r.sort >= (SELECT sort FROM releases WHERE role = 'stable') OR r.sort = 0) -- forget older releases than stable but allow experimental
-       UNION
-       SELECT DISTINCT np.source, us.vcs_browser, np.version FROM new_packages np
-         JOIN blends_dependencies bd ON bd.package = np.package
-         JOIN new_sources us ON np.source = us.source
-         WHERE bd.blend = $1 AND bd.distribution = 'new'
-       UNION
-       SELECT DISTINCT up.source, us.vcs_browser, up.version FROM ubuntu_packages up
-         JOIN blends_dependencies bd ON bd.package = up.package
-         JOIN ubuntu_sources us ON up.source = us.source AND up.release = us.release
-         WHERE bd.blend = $1 AND bd.distribution = 'ubuntu'
-       UNION
-       SELECT DISTINCT pr.source, pr.vcs_browser,  pr.chlog_version as version FROM blends_prospectivepackages pr
-         JOIN blends_dependencies bd ON bd.package = pr.package
-         WHERE bd.blend = $1 AND bd.distribution = 'prospective'
-       ) hpversion
-      GROUP BY source, vcs_browser, version
-      ) tmp
-      WHERE row_number = 1
-     ) vcs ON b.source = vcs.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM packages p
-        JOIN releases r ON p.release = r.release
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        WHERE b.blend = $1 AND r.role = 'unstable' AND p.distribution = 'debian'
-        GROUP BY p.source
-        ORDER BY p.source
-      ) unstable ON b.source = unstable.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM packages p
-        JOIN releases r ON p.release = r.release
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        WHERE b.blend = $1 AND r.role = 'testing' AND p.distribution = 'debian'
-        GROUP BY p.source
-        ORDER BY p.source
-      ) testing ON b.source = testing.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM packages p
-        JOIN releases r ON p.release = r.release
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        WHERE b.blend = $1 AND r.role = 'stable' AND p.distribution = 'debian'
-        GROUP BY p.source
-        ORDER BY p.source
-      ) stable ON b.source = stable.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM packages p
-        JOIN releases r ON p.release = r.release
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        WHERE b.blend = $1 AND r.role = 'experimental' AND p.distribution = 'debian'
-        GROUP BY p.source
-        ORDER BY p.source
-      ) experimental ON b.source = experimental.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM packages p
-        JOIN releases r ON p.release = r.release
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        WHERE b.blend = $1 AND p.release = (SELECT release FROM releases WHERE role='stable') || '-backports' AND p.distribution = 'debian'
-        GROUP BY p.source
-        ORDER BY p.source
-      ) stable_bpo ON b.source = stable_bpo.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT p.source, 1 AS is_in_debian FROM packages p
-     ) is_in_debian ON b.source = is_in_debian.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT np.source, strip_binary_upload(MAX(np.version)) AS version
-        FROM new_packages np
-        JOIN blends_dependencies b ON b.package = np.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN new_sources ns ON np.source = ns.source 
-        WHERE b.blend = $1 AND b.distribution = 'new'
-        GROUP BY np.source
-        ORDER BY np.source
-     ) new ON b.source = new.source
-     LEFT OUTER JOIN ( -- an 'UNRELEASED' version can be due to not yet finished work in VCS or not yet uploaded at all
-      SELECT DISTINCT source, version FROM (
-       SELECT DISTINCT p.source, strip_binary_upload(MAX(v.version)) AS version
-        FROM packages p
-        JOIN blends_dependencies b ON b.package = p.package
-        -- make sure we get the right source version that does not necessarily match binary version
-        JOIN sources s ON p.source = s.source AND p.release = s.release
-        JOIN vcs v ON s.source = v.source
-        WHERE b.blend = $1 AND v.distribution = 'UNRELEASED'
-        GROUP BY p.source
-       UNION
-       SELECT DISTINCT pr.source, strip_binary_upload(chlog_version) AS version
-        FROM blends_dependencies b
-        JOIN blends_prospectivepackages pr ON b.package = pr.package
-        WHERE b.blend = $1 AND b.distribution != 'new'
-       ) tmp
-      ) unreleased ON b.source = unreleased.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT u.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM ubuntu_packages u
-        JOIN blends_dependencies b ON b.package = u.package
-        JOIN ubuntu_sources s ON u.source = s.source AND u.release = s.release
-        WHERE b.blend = $1 AND u.release = '%s'
-        GROUP BY u.source
-        ORDER BY u.source
-      ) ubuntuprev2 ON b.source = ubuntuprev2.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT u.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM ubuntu_packages u
-        JOIN blends_dependencies b ON b.package = u.package
-        JOIN ubuntu_sources s ON u.source = s.source AND u.release = s.release
-        WHERE b.blend = $1 AND u.release = '%s'
-        GROUP BY u.source
-        ORDER BY u.source
-      ) ubuntuprev1 ON b.source = ubuntuprev1.source
-     LEFT OUTER JOIN (
-      SELECT DISTINCT u.source, strip_binary_upload(MAX(s.version)) AS version
-        FROM ubuntu_packages u
-        JOIN blends_dependencies b ON b.package = u.package
-        JOIN ubuntu_sources s ON u.source = s.source AND u.release = s.release
-        WHERE b.blend = $1 AND u.release = '%s'
-        GROUP BY u.source
-        ORDER BY u.source
-      ) latestubuntu ON b.source = latestubuntu.source
-      LEFT OUTER JOIN (
-        SELECT source, array_agg(task) AS tasks FROM (
-          SELECT DISTINCT p.source, b.task
-            FROM all_packages p                                  -- needs 'all_packages' / 'all_sources' and NOT verifying releases to fetch Ubuntu only packages
-            JOIN blends_dependencies b ON b.package = p.package
-            JOIN all_sources s ON p.source = s.source
-            WHERE b.blend = $1
-          UNION
-          SELECT DISTINCT np.source, b.task
-            FROM new_packages np
-            JOIN blends_dependencies b ON b.package = np.package
-            JOIN new_sources ns ON np.source = ns.source
-            WHERE b.blend = $1
-          UNION
-            SELECT DISTINCT pr.source, bd.task FROM blends_prospectivepackages pr
-              JOIN blends_dependencies bd ON bd.package = pr.package
-              WHERE bd.blend = $1 AND bd.distribution = 'prospective'
-          ) tmp
-        GROUP BY source
-      ) tasks ON b.source = tasks.source
-      LEFT OUTER JOIN (SELECT source, upstream_version, status FROM upstream WHERE release = 'sid') d ON b.source = d.source
-      ORDER BY b.source
-    ;
-    """ % ( ubuntuprev2, ubuntuprev1, latestubuntu )
-    _execute_udd_query(query)
-    
-    _execute_udd_query( "EXECUTE query_thermometer('%s')" % blendname)
-    
-    if curs.rowcount > 0:
-        blend_data = RowDictionaries(curs)
-        #f = open(blendname+'_thermometer.json', 'w')
-        #print >>f, json.dumps(blend_data)
-        #f.close()
-        #SetFilePermissions(blendname+'_thermometer.json')
-    else:
-        print("No data received for Blend", blendname, file=stderr)
-        exit(1)
-
-    # Define directories used
-    current_dir  = os.path.dirname(__file__)
-    # locale_dir   = os.path.join(current_dir, 'locale')
-    template_dir = os.path.join(current_dir, 'templates')
-    
-    # initialize gensi
-    loader = TemplateLoader([template_dir], auto_reload=True)
-    
-    outputdir = CheckOrCreateOutputDir(config['outputdir'],'thermometer')
-    if outputdir == None:
-            exit(-1)
-    
-    t = datetime.now()
-    
-    # Initialize i18n
-    domain = 'blends-webtools'
-    gettext.install(domain)
-
-    data={}
-    data['projectname'] = blendname
-    data['blend_data']  = blend_data
-    if 'advertising' in config and config['advertising'] != None:
-        # we have to remove the gettext _() call which was inserted into the config
-        # file to enable easy input for config file editors - but the call has to
-        # be made explicitely in the python code
-        advertising = re.sub('_\(\W(.+)\W\)', '\\1', config['advertising'])
-        # gettext needs to escape '"' thus we need to remove the escape character '\'
-        data['projectadvertising'] = Markup(to_unicode(re.sub('\\\\"', '"', advertising)))
-    else:
-        data['projectadvertising'] = None
-
-    legend = [
-               ['upToDate',        'Up to date'],
-               ['debianOutOfDate', 'Debian stable behind unstable'],
-               ['ubuntuOutOfDate', 'Ubuntu behind Debian unstable'],
-               ['new',             'Waiting in NEW'],
-               ['unpackaged',      'Not packaged'],
-               ['obsolete',        'Obsolete'],
-               ['newer-in-debian', 'Upstream behind unstable'],
-               ['uptodate',        'Unstable fits upstream'],
-               ['outdated',        'Unstable behind upstream'],
-             ]
-
-    ulegend = legend[:]
-    ulegend.remove(['new', 'Waiting in NEW'])
-
-
-    data['ubuntuprev2']       = ubuntuprev2
-    data['ubuntuprev1']       = ubuntuprev1
-    data['latestubuntu']      = latestubuntu
-    data['legend']            = legend
-    data['ulegend']           = ulegend
-
-    for key in ('homepage', 'projecturl', 'projectname', 'logourl', 'ubuntuhome', 'projectubuntu'):
-        data[key] = config[key]
-    data['timestamp']   = formatdate(time.mktime(t.timetuple()))
-
-    data['thermometer']  = blendname + '_thermometer.html'
-    os.system("ln -sf %s %s/index.html" % (data['thermometer'], outputdir))
-    data['uthermometer'] = 'ubuntu_' + blendname + '_thermometer.html'
-    outputfile  = outputdir + '/' + data['thermometer']
-    uoutputfile = outputdir + '/' + data['uthermometer']
-    try:
-        os.unlink(outputfile)
-        os.unlink(uoutputfile)
-    except: # simply continue if file does not exist
-        pass
-
-    f = open(outputfile, 'w')
-    template = loader.load('thermometer.xhtml')
-    try:
-         print(template.generate(**data).render('xhtml'), file=f)
-    except TypeError as err:
-         print("Problem creating thermometer.html.\tMessage: %s" % (str(err)), file=stderr)
-    f.close()
-    SetFilePermissions(outputfile)
-    f = open(uoutputfile, 'w')
-    utemplate = loader.load('uthermometer.xhtml')
-    try:
-        print(utemplate.generate(**data).render('xhtml'), file=f)
-    except TypeError as err:
-         print("Problem creating uthermometer.html.\tMessage: %s" % (str(err)), file=stderr)
-    f.close()
-    SetFilePermissions(uoutputfile)
-
-if __name__ == '__main__':
-    main()
diff --git a/webtools_py3/thermometer.py b/webtools_py3/thermometer.py
new file mode 120000
index 0000000..65c8e69
--- /dev/null
+++ b/webtools_py3/thermometer.py
@@ -0,0 +1 @@
+../webtools/thermometer.py
\ No newline at end of file

-- 
Static and dynamic websites for Debian Pure Blends



More information about the Blends-commit mailing list