[Blends-commit] r2604 - in /blends/trunk/webtools: blendstasktoolsold.py blendsunicodeold.py update-all-tasks

tille at users.alioth.debian.org tille at users.alioth.debian.org
Tue Jan 18 07:53:07 UTC 2011


Author: tille
Date: Tue Jan 18 07:53:01 2011
New Revision: 2604

URL: http://svn.debian.org/wsvn/blends/?sc=1&rev=2604
Log:
Really keep the old code for the bugs pages (as long as it is not rewritten)

Added:
    blends/trunk/webtools/blendstasktoolsold.py
Removed:
    blends/trunk/webtools/blendsunicodeold.py
Modified:
    blends/trunk/webtools/update-all-tasks

Added: blends/trunk/webtools/blendstasktoolsold.py
URL: http://svn.debian.org/wsvn/blends/blends/trunk/webtools/blendstasktoolsold.py?rev=2604&op=file
==============================================================================
--- blends/trunk/webtools/blendstasktoolsold.py (added)
+++ blends/trunk/webtools/blendstasktoolsold.py Tue Jan 18 07:53:01 2011
@@ -1,0 +1,1701 @@
+#!/usr/bin/python
+# Copyright 2008: Andreas Tille <tille at debian.org>
+# License: GPL
+
+# Blends metapackages are listing a set of Dependencies
+# These might be fullfilled by the Debian package
+# set or not.
+#
+# This interface provides some classes that contains
+# all available information about this Dependency ,
+# like whether it is an official package or not,
+# in which distribution it is contained
+# or if it is not contained it obtains information
+# from tasks file about home page, license, WNPP etc.
+
+PORT=5441
+DEFAULTPORT=5432
+
+from sys import stderr, exit
+from subprocess import Popen, PIPE
+import os
+import urllib
+import StringIO
+import gzip
+import bz2
+import re
+import email.Utils
+
+import psycopg2
+import gettext
+
+from genshi import Markup
+# ccording to http://genshi.edgewall.org/wiki/GenshiFaq#HowcanIincludeliteralXMLintemplateoutput
+# there are different options to prevent escaping '<' / '>' but HTML does not work ...
+# from genshi.input import HTML
+from blendsmarkdown import SplitDescription, MarkupString, render_longdesc
+
+from debian_bundle import deb822
+from blendsunicode import to_unicode
+
+import logging
+import logging.handlers
+logger = logging.getLogger('blends')
+logger.setLevel(logging.INFO)
+
+# Seems to have problems on 17.04.2009
+# BASEURL  = 'http://ftp.debian.org/debian'
+BASEURL  = 'http://ftp.de.debian.org/debian'
+# SVNHOST  = 'svn+ssh://svn.debian.org'
+SVNHOST  = 'svn://svn.debian.org'
+KEYSTOIGNORE = ( 'Architecture', 'Comment', 'Leaf', 'NeedConfig', 'Note', 'Section',
+                 'Needconfig', 'DontAvoid',
+                 'Enhances', 'Test-always-lang', 'Metapackage')
+# DDTP now exports to official debian mirror
+#DDTPURL = "http://ddtp.debian.net/debian/dists/"
+DDTPURL = "http://ftp.de.debian.org/debian/dists/"
+DDTPDIR = "/i18n/Translation-"
+DDTPLISTS = ('etch', 'lenny', 'sid')
+
+CONFDIR = 'webconf'
+
+COMPRESSIONEXTENSION='bz2'
+# COMPRESSIONEXTENSION='gz' # Translations are only available as bz2 since April 2009
+
+HOMEPAGENONE = '#'
+HOMEPAGENONEFIELDS = ('homepage', 
+                      'pkg-url',     # Link to packages.debian.org search interface with exact
+                                     # package matches or URL to inofficial package
+                      'vcs-browser', # Browser-URL to packaging stuff in Vcs
+                     )
+
+PKGURLMASK = 'http://packages.debian.org/search?keywords=%s%%26searchon=names%%26exact=1%%26suite=all%%26section=all'
+
+DEPENDENT  = 0
+SUGGESTED  = 1
+DONE       = 2
+BUGLISTCAT = (DEPENDENT, SUGGESTED, DONE )
+
+releases  = {'oldstable'    : ('etch', 'etch-proposed-updates', 'etch-security'),
+             'stable'       : ('lenny', 'lenny-proposed-updates', 'lenny-security'),
+             'testing'      : ('squeeze'),
+             'unstable'     : ('sid'),
+             'experimental' : ('experimental')
+            }
+
+pkgstatus = {'official_high' : # official package with high priority dependency
+                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+                                 'components'   : ('main', ),
+                                 'dependencies' : ('Depends', 'Recommends'),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package and has high relevance</a>',
+                                 'order'        : 1
+                               },
+             'official_low'  : # official package with low priority dependency
+                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+                                 'components'   : ('main', ),
+                                 'dependencies' : ('Suggests', ),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'Green: The project is <a href="#%s">available as an official Debian package but has lower relevance</a>',
+                                 'order'        : 2
+                               },
+             'non-free'      : # package in contrib or non-free, priority decreased to Suggests in any case
+                               { 'releases'     : ('oldstable', 'stable', 'testing', 'unstable'),
+                                 'component'    : ('contrib', 'non-free'),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'Green: The project is <a href="#%s">available in Debian packaging pool but is not in Debian main</a>',
+                                 'order'        : 3
+                               },
+             'experimental'  : # package which exists only in experimental
+                               { 'releases'     : ('experimental', ),
+                                 'component'    : ('main', 'contrib', 'non-free'),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'Yellow: The project is <a href="#%s">available in Debian packaging pool but is regarded as experimental</a>',
+                                 'order'        : 4
+                               },
+             'new'           : # package in new queue
+                               { 'releases'     : ('new', ),
+                                 'component'    : ('main', 'contrib', 'non-free'),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'Yellow: A package of project is <a href="#%s">is in Debian New queue and hopefully available soon</a>',
+                                 'order'        : 5
+                               },
+             'pkgvcs'        : # Not yet packaged but packaging code in Vcs
+                               { 'releases'     : (),
+                                 'component'    : (),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : ('vcs-svn', 'vcs-git', 'vcs-browser'),
+                                 'colorcode'    : 'Yellow: The packaging of project is <a href="#%s">has started and a developer might try the packaging code in VCS or help packaging.</a>',
+                                 'order'        : 6
+                               },
+             'unofficial'    : # unofficial packages outside Debian
+                               { 'releases'     : (),
+                                 'component'    : (),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : ('pkg-url', ),
+                                 'colorcode'    : 'Yellow: There exists an <a href="#%s">unofficial package</a> of the project',
+                                 'order'        : 7
+                               },
+             'wnpp'          : # project which has at least a WNPP bug filed
+                               { 'releases'     : (),
+                                 'component'    : (),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : ('wnpp', ),
+                                 'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a> but there is some record of interest (WNPP bug).',
+                                 'order'        : 8
+                               },
+             'prospective'   : # projects which might be interesting for a Blend but no work is done yet
+                               { 'releases'     : (),
+                                 'component'    : (),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : ('homepage', ),  # TODO: a description should be set as well ...
+                                 'colorcode'    : 'Red: The project is <a href="#%s">not (yet) available as a Debian package</a>.',
+                                 'order'        : 9
+                               },
+             'ignore'        : # Package inside Debian which is "under observation"
+                               { 'releases'     : (releases.keys()),
+                                 'component'    : ('main', 'contrib', 'non-free'),
+                                 'dependencies' : ('Ignore', ),
+                                 'fields-set'   : (),
+                                 'colorcode'    : '%s',
+                                 'order'        : 10
+                               },
+             'avoid'         : # Package inside Debian which should not go to a install medium of the Blend
+                               { 'releases'     : (releases.keys()),
+                                 'component'    : ('main', 'contrib', 'non-free'),
+                                 'dependencies' : ('Avoid', ),
+                                 'fields-set'   : (),
+                                 'colorcode'    : '%s',
+                                 'order'        : 11
+                               },
+             'unknown'       : # Everything else
+                               { 'releases'     : (),
+                                 'component'    : (),
+                                 'dependencies' : ('Depends', 'Recommends', 'Suggests'),
+                                 'fields-set'   : (),
+                                 'colorcode'    : 'White: The project has an %s status.',
+                                 'order'        : 100
+                               },
+             }
+
+# http://wiki.python.org/moin/HowTo/Sorting#Sortingbykeys
+_tmplist=[]
+for key in pkgstatus.keys():
+    _tmplist.append((key,pkgstatus[key]['order']))
+_tmpsorted = sorted(_tmplist, key=lambda x:(x[1], x[0]))
+pkgstatus_sortedkeys = []
+for _tmp in _tmpsorted:
+    pkgstatus_sortedkeys.append(_tmp[0])
+
+dep_strength_keys = []
+for pkgstat in pkgstatus:
+    for dep in pkgstatus[pkgstat]['dependencies']:
+        if dep not in dep_strength_keys:
+            dep_strength_keys.append(dep)
+
+license_in_component = {'main'     : 'DFSG free',
+                        'contrib'  : 'DFSG free, but needs non-free components',
+                        'non-free' : 'non-free'
+            }
+
+def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
+    # Create a list of status of dependencies out of pkgstatus dictionary
+    use_dependencystatus = []
+    if dependencystatus == []:
+        for pkgstat in pkgstatus_sortedkeys:
+            # per default = if no expliczite dependencystatus are given, we are only interested in
+            # dependencies which are at least of order experimental
+            if pkgstatus[pkgstat]['order'] > pkgstatus[max_order]['order']:
+                continue
+            use_dependencystatus.append(pkgstat)
+    else:
+        # verify correctly given dependencies
+        for pkgstat in dependencystatus:
+            if pkgstat in pkgstatus.keys():
+                use_dependencystatus.append(pkgstat)
+            else:
+                logger.error("Unknown dependencystatus %s" % pkgstat)
+        if use_dependencystatus == []:
+            logger.error("No valid dependencystatus in", dependencystatus)
+    return use_dependencystatus
+
+###########################################################################################
+# Define several prepared statements to query UDD
+try:
+    conn = psycopg2.connect(host="localhost",port=PORT,user="guest",database="udd")
+except psycopg2.OperationalError:
+    logger.debug("PostgreSQL does not seem to run on port %i .. trying default port %i." % (PORT, DEFAULTPORT))
+    try:
+        conn = psycopg2.connect(host="localhost",port=DEFAULTPORT,user="guest",database="udd")
+    except psycopg2.OperationalError:
+	# Hmmm, I observed a really strange behaviour on one of my machines where connecting to
+	# localhost does not work but 127.0.0.1 works fine.  No odea why ... but this should
+	# do the trick for the moment
+	conn = psycopg2.connect(host="127.0.0.1",port=DEFAULTPORT,user="guest",database="udd")
+
+curs = conn.cursor()
+query = """PREPARE query_pkg (text) AS SELECT 
+                   distribution, component, version, architecture, maintainer,
+                   source, section, task, distribution, release, component, homepage,
+                   description, long_description FROM packages
+                   WHERE package = $1 ORDER BY version"""
+curs.execute(query)
+
+query = """PREPARE query_new (text) AS SELECT 
+                   distribution, component, version, architecture, maintainer,
+                   source, section, distribution, 'new' AS release, component, homepage,
+                   description, long_description FROM new_packages
+                   WHERE package = $1 ORDER BY version LIMIT 1"""
+curs.execute(query)
+
+# Sometimes the tasks file contains dependencies from virtual packages and we have to
+# obtain the real packages which provide this dependency.
+# First check whether there are such packages (only names)
+query = """PREPARE query_provides (text) AS 
+           SELECT package FROM packages WHERE provides = $1 GROUP BY package;"""
+curs.execute(query)
+
+# Obtain more detailed information about packages that might provide a dependency
+#query = """PREPARE query_provides_version_release (text) AS 
+#           SELECT package, version, release FROM packages WHERE provides = $1
+#                  GROUP BY version, package, release ORDER BY version DESC;"""
+#curs.execute(query)
+
+# Obtain the component a certain package might be in
+query = "PREPARE pkg_component (text) AS SELECT component FROM packages WHERE package = $1 GROUP BY component "
+curs.execute(query)
+
+# Obtain the releases featuring a certain package, in case a package might show up in different components when
+# considering different releases we apply a preference for main over contrib over non-free.  If this is the case
+# we mention only the releases of the selected component
+query = "PREPARE pkg_releases (text, text) AS SELECT release FROM packages WHERE package = $1 AND component = $2 GROUP BY release "
+curs.execute(query)
+
+# Obtain available versions including the architectures where this version is available
+#query = """PREPARE pkg_versions_arch (text, text) AS
+#   SELECT release, regexp_replace(version, '^[0-9]:', '') AS version, array_to_string(array_accum(architecture),',') AS architectures FROM
+#     ( SELECT architecture, version,
+#          release || CASE WHEN char_length(substring(distribution from '-.*')) > 0 THEN substring(distribution from '-.*') ELSE '' END AS release FROM packages
+#          WHERE package = $1 AND component = $2
+#          GROUP BY architecture, version, release, distribution
+#          ORDER BY architecture
+#     ) AS av
+#     GROUP BY version, release ORDER BY version DESC;"""
+query = """PREPARE pkg_versions_arch (text) AS
+   SELECT r as release, version, archs, component
+     FROM versions_archs_component($1) AS (r text, version text, archs text, component text)
+          -- you have to specify the column names because plain RECORD type is returned
+     JOIN releases ON releases.release = r
+          -- JOIN with release table to enable reasonable sorting
+    WHERE r NOT LIKE '%-%'
+          -- ignore releases like *-security etc.
+    ORDER BY releases.sort ASC, version DESC;"""
+curs.execute(query)
+
+query = """PREPARE pkg_versions_stable_testing (text) AS
+   SELECT release,  regexp_replace(regexp_replace(debversion, '-.*', ''), '[.+~]dfsg.*', '') AS version, debversion
+     FROM (SELECT r AS release, MAX(version) AS debversion
+             FROM versions_archs_component($1) AS (r text, version text, archs text, component text)
+            WHERE r IN ('lenny', 'squeeze') GROUP BY r) AS zw;"""  # Change here releases onace Squeeze is released
+curs.execute(query)
+
+# Obtain upstream version in case it is newer than Debian version
+query = """PREPARE pkg_dehs (text) AS
+   SELECT DISTINCT d.source, unstable_upstream, unstable_parsed_version, unstable_status, experimental_parsed_version, experimental_status FROM dehs d
+      JOIN  packages p ON p.source = d.source
+      WHERE p.package = $1 AND unstable_status = 'outdated' ;"""
+curs.execute(query)
+
+# Obtain Vcs Information from source table - use only latest version because this is 
+query = """PREPARE src_vcs (text) AS
+           SELECT vcs_type AS "vcs-type", vcs_url AS "vcs-url", vcs_browser AS "vcs-browser" FROM sources
+            WHERE source = $1 AND (vcs_type IS NOT NULL OR vcs_url IS NOT NULL OR vcs_browser IS NOT NULL)
+            ORDER BY version desc LIMIT 1"""
+curs.execute(query)
+
+# Obtain description in foreign language from DDTP project if available
+# For the moment just query for the highest available version of the description
+query = """PREPARE ddtp_description (text) AS
+           SELECT language, description, long_description, version FROM DdtpLanguageMaxVersion($1)"""
+curs.execute(query)
+
+# Obtain popcon data:
+#   inst     : number of people who installed this package;
+# * vote     : number of people who use this package regularly;
+#   old      : number of people who installed, but don't use this package regularly;
+# * recent   : number of people who upgraded this package recently;
+#   no-files : number of people whose entry didn't contain enough information (atime and ctime were 0).
+query = "PREPARE popcon (text) AS SELECT vote, recent FROM popcon WHERE package = $1"
+curs.execute(query)
+
+# Number of submissions
+query = "PREPARE popcon_submissions AS SELECT vote FROM popcon WHERE package = '_submissions'"
+curs.execute(query)
+
+# Debtags
+query = """PREPARE debtags (text) AS 
+            SELECT * FROM debtags WHERE
+                package = $1 AND
+                tag NOT LIKE 'implemented-in::%' AND
+                tag NOT LIKE 'protocol::%' AND
+                tag NOT LIKE '%::TODO' AND
+                tag NOT LIKE '%not-yet-tagged%';"""
+curs.execute(query)
+
+query = """PREPARE query_screenshots (text) AS
+           SELECT screenshot_url, large_image_url AS image, small_image_url AS icon, version
+                  FROM screenshots WHERE package = $1
+                  ORDER BY version DESC, image ;"""
+curs.execute(query)
+
+# This query might result in more packages than wanted, because when seeking for a
+# package 'foo'  it also matches an enhances field of 'bar, xfoo-xx, foo-doc, bazz'
+# and thus we get a false positive.  We have to preprocess the resultset by splitting
+# it into single packages and check again the whole word for matching.  This is
+# implemented below in Python
+######################################################################################
+# ATTENTION: Call this "EXECUTE query_check_enhances('%"+pkg+"%')"                   #
+#            I have no idea how to otherwise mask the '%' in the prepared statement. #
+######################################################################################
+query = """PREPARE query_check_enhances (text) AS
+            SELECT DISTINCT package, enhances FROM packages WHERE enhances LIKE $1"""
+curs.execute(query)
+
+# Obtain e-mail address of latest uploader of source package
+query = """PREPARE query_get_latest_uploader (text) AS
+           SELECT changed_by FROM upload_history WHERE source = $1 ORDER BY version DESC LIMIT 1;"""
+curs.execute(query)
+
+#########################################################################################
+
+def ReadConfig(blendname=''):
+    # Try to read config file CONFDIR/<blendname>.conf
+    conffile = CONFDIR + '/' + blendname + '.conf'
+    if not os.access(conffile, os.R_OK):
+        # if config file can not be found in local dir, try /etc/blends/webconf as fallback
+        conffile_default = '/etc/blends/webconf/' + blendname + '.conf'
+        if not os.access(conffile_default, os.R_OK):
+            logger.error("Unable to open config file %s or %s." % (conffile, conffile_default))
+            exit(-1)
+        conffile = conffile_default
+    f = open(conffile, 'r')
+    ret = { 'Blend'       : '',
+            'projectname' : '',
+            'projecturl'  : '',
+            'homepage'    : '',
+            'aliothurl'   : '',
+            'projectlist' : '',
+            'pkglist'     : '',
+            'logourl'     : '',
+            'css'         : '',
+            'outputdir'   : '',
+            'datadir'     : '',
+            'advertising' : None,  # headline with advertising string is optional
+          }
+    for stanza in deb822.Sources.iter_paragraphs(f, shared_storage=False):    
+        ret['Blend']       = stanza['blend']        # short name of the project
+        ret['projectname'] = stanza['projectname']  # Printed name of the project
+        ret['projecturl']  = stanza['projecturl']   # Link to the developer page with dynamic content
+                                                    # like for instance these tasks pages
+        ret['homepage']    = stanza['homepage']     # Homepage with general information about the project
+                                                    # for instance at www.debian.org or wiki.debian.org
+        ret['aliothurl']   = stanza['aliothurl']    # Link to the Alioth page of the project
+        ret['projectlist'] = stanza['projectlist']  # Mailinglist of the project
+        if stanza.has_key('pkglist'):
+    	    ret['pkglist'] = stanza['pkglist']      # Packaging Mailinglist = Maintainer of group maintained packages
+        if stanza.has_key('logourl'):
+            ret['logourl'] = stanza['logourl']      # URL to logo image (might be missing
+        ret['css']         = stanza['css']          # (relative) URL to CSS file
+        ret['outputdir']   = stanza['outputdir']    # Dir for storing output HTML files
+        ret['datadir']     = stanza['datadir']      # Dir for storing SVN information about project
+        ret['vcsdir']      = stanza['vcsdir']       # Path to Blend information files at svn.debian.org
+        if stanza.has_key('advertising'):
+            # we have to remove the gettext _() call which was inserted into the config
+            # file to enable easy input for config file editors - but the call has to
+            # be made explicitely in the python code
+            advertising = re.sub('_\(\W(.+)\W\)', '\\1', stanza['advertising'])
+            # gettext needs to escape '"' thus we need to remove the escape character '\'
+            ret['advertising'] = re.sub('\\\\"', '"', advertising)
+
+    return ret
+
+def FetchTasksFiles(data):
+    # Fetch tasks files from SVN of a Blend
+    # The specification of the repository containing the tasks files
+    # of a Blend can be done in webconf/<Blend>.conf
+
+    # tasks directory to obtain dependencies and debian/control to obtain meta
+    # information like the metapackage prefix
+    for dir in ('tasks', 'debian'):
+        tasksdir = data['datadir'] + '/' + dir
+        if not os.access(tasksdir, os.W_OK):
+            try:
+		os.makedirs(tasksdir)
+            except:
+		logger.error("Unable to create data directory", tasksdir)
+        # Checkout/Update tasks from SVN
+        svncommand = "svn %%s %s/%s/%s %s >> /dev/null" % (SVNHOST, data['vcsdir'], dir, tasksdir)
+        if os.path.isdir(tasksdir+'/.svn'):
+    	    svncommand = svncommand % 'up'
+        else:
+            os.system("mkdir -p %s" % (tasksdir))
+            svncommand = svncommand % 'co'
+        if os.system(svncommand):
+    	    logger.error("SVN command %s failed" % (svncommand))
+    	    if os.path.isdir(tasksdir+'/.svn'):
+    		logger.error("Trying old files in %s ..." % tasksdir)
+    	    else:
+    		logger.error("There are no old files in %s -> giving up" % tasksdir)
+    		exit(-1)
+    return data['datadir'] + '/tasks'
+
+def RowDictionaries(cursor):
+    """Return a list of dictionaries which specify the values by their column names"""
+
+    description = cursor.description
+    if not description:
+        # even if there are no data sets to return the description should contain the table structure.  If not something went
+        # wrong and we return NULL as to represent a problem
+        return NULL
+    if cursor.rowcount <= 0:
+        # if there are no rows in the cursor we return an empty list
+        return []
+
+    data = cursor.fetchall()
+    result = []
+
+    for row in data:
+        resultrow = {}
+        i = 0
+        for dd in description:
+            resultrow[dd[0]] = row[i]
+            i += 1
+        result.append(resultrow)
+    return result
+
+def BrowserFromVcsURL(vcs_type, vcs_url):
+    # Guess Vcs-Browser URL from VCS URL
+    if vcs_type.lower().startswith('svn'):
+        ret_url = re.sub('^svn:', 'http:', vcs_url)
+        ret_url = re.sub('/svn/', '/wsvn/', ret_url)
+        ret_url = re.sub('$', '?rev=0&sc=0', ret_url)
+    elif vcs_type.lower().startswith('git'):
+        ret_url = re.sub('^git:', 'http:', vcs_url)
+        ret_url = re.sub('/git/', '/?p=', ret_url)
+    elif vcs_type.lower().startswith('hg'):
+        # Seems that vcs_browser = vcs_url in Mercurial
+        return vcs_url
+    elif vcs_type.lower().startswith('bzr') or vcs_type.lower().startswith('cvs'):
+        logger.warning("No idea how to guess vcs_browser for %s URLS" % vcs_type)
+        return
+    else:
+        logger.warning("Unknown VCS for " + vcs_url)
+        return HOMEPAGENONE
+
+    if ret_url == vcs_url:
+        logger.warning("Unable to obtain Vcs-Browser from " + vcs_url)
+        return HOMEPAGENONE
+    return ret_url
+
+detect_vcs_cvs_re        = re.compile("://.*cvs")
+detect_vcs_svn_re        = re.compile("://.*svn")
+detect_vcs_git_re        = re.compile("://.*git")
+
+def VcsTypeFromBrowserURL(vcs_browser):
+    # Guess Vcs-Type from vcs_browser
+    if detect_vcs_cvs_re.search(vcs_browser):
+        return 'Cvs'
+    if detect_vcs_svn_re.search(vcs_browser):
+        return 'Svn'
+    if detect_vcs_git_re.search(vcs_browser):
+        return 'Git'
+    return 'Unknown VCS'
+
+# The following keys will be mostly used for programs that
+# are not yet existing in Debian and will go to our todo list
+PROPERTIES=('homepage', # Homepage of program
+            'section',  # Section of package in the Debian hierarchy
+            'source',   # Keep the source package name which is needed for ddpo subscription
+           )
+
+class DependantPackage:
+    # Hold information about a package that is in dependency list
+
+    def __init__(self, blendname=None, taskname=None):
+        self.blendname      = blendname # Blend that includes the package in dependency list
+        self.taskname       = taskname  # Task which includes the Dependency
+        self.pkg            = None # Name of dependant package
+        self.PrintedName    = None # Only for Meta package names - no use for a real dependant package
+                                   # FIXME -> object model
+        self.pkgstatus      = 'unknown' # global pkgstatus: characterizes status of dependency, release, packaging status
+        self.releases       = []   # List of releases a package might be in
+        self.component      = None # One of: 'main', 'contrib', 'non-free', if a package shows up in several components which
+                                   # might happen over different releases, just prefer main over contrib over non-free
+        self.why            = None # basically used as comment
+
+        self.properties     = {}
+        self.properties['license']     = 'unknown'
+        for field in HOMEPAGENONEFIELDS:
+            self.properties[field]    = HOMEPAGENONE
+        self.properties['Enhances'] = {} # Dictionary Enhancing pkg name as key, Link to package information as value; empty in most cases
+                                         # because Enhances relations are quite seldom
+        self.properties['stable_testing_version'] = [] # (release, version) tuples where release is codename for stable and testing
+        self.vcs_found      = 0    # we need a flag to store the fact whether Vcs information of a package is in UDD
+        self.version        = []   # list of {'release', 'version', 'archs'} dictionary containing version and architecture information
+        self.outdated       = {}   # If not empty directory then release='upstream' and package is outdated
+        self.popcon         = {}   # dictionary containing vote and recnt values of popcon information
+        self.popconsubmit   = 0    # number of popcon submissions - in case popcon import into UDD might be broken this remains 0
+        self.debtags        = []   # list of {'tag', 'value'} dictionary containing debtag information
+        self.screenshots    = []   # list of {'version', 'url'} dictionary containing screenshot information
+        self.icon           = None # URL of small screenshot icon
+        self.screenshot_url = None # URL to screenshots.debian.net
+        self.responsible    = None # E-Mail address of issuer of ITP or some person
+                                   # who volunteered to care for this program
+        self.filename       = None # Filename of package in the Debian pool
+        self.desc           = {}   # Prospective packages should have a description ...
+                                   # ... which could be copied to (or from if exists)
+                                   # WNPP bug and finally can be used for packaging
+        self.desc['en']     = {}   # An English description should be available in any case
+        self.experimental   = 0    # Set to 1 if package *only* in experimental but not in unstable/testing/stable
+        self.remark         = {}   # Optional remark for a package
+        self.dep_strength   = 0    # Type of Dependency (Depends, Recommends, Suggests, Experimental, New, Avoid, Ignore, WNPP
+
+    def _QueryUDD4Package(self, source):
+        # Query UDD for several properties of a package
+        query = "EXECUTE pkg_component ('%s')" % self.pkg
+        curs.execute(query)
+        if curs.rowcount == 0:
+            # the package does not exist in UDD table packages
+            # verify whether we have some information in NEW queue
+            query = "EXECUTE query_new ('%s')" % self.pkg
+            curs.execute(query)
+            if curs.rowcount == 0:
+                return
+
+            row = RowDictionaries(curs)[0]
+            self.component = row['component']
+            self.version   = [{'release':'new', 'version': row['version'], 'archs':row['architecture']} , ]
+            self.pkgstatus = 'new'
+            self.properties['pkg-url'] = 'http://ftp-master.debian.org/new/%s_%s.html' % (row['source'], row['version'])
+            for prop in PROPERTIES:
+                self.properties[prop] = row[prop]
+            self.desc['en']['short'] = MarkupString(to_unicode(row['description']), self.pkg, 'ShortDesc - New')
+            self.desc['en']['long']  = Markup(render_longdesc(row['long_description'].splitlines()))
+            (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+            self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+            logger.info("The package %s is not yet in Debian but it is just in the new queue. (Task %s)" % (self.pkg, self.taskname))
+
+            return
+        # = the package exists in UDD table packages
+        else:
+            # This might happen in cases where a package was in non-free and moved to main later or
+            # something like this.  If we find a certain package in more than one components we
+            # prefer main over contrib over non-free.
+            if curs.rowcount > 1:
+                in_main = 0
+                in_contrib = 0
+                for component in curs.fetchall():
+                    if component[0] == 'main':
+                        self.component = 'main'
+                        in_main       = 1
+                        break
+                    if component[0] == 'contrib':
+                        in_contrib    = 1
+                if in_main == 0:
+                    if in_contrib == 1:
+                        self.component = 'contrib'
+                    else:
+                        self.component = 'non-free'
+            else:
+                self.component = curs.fetchone()[0]
+        # Debian Edu contains packages from main/debian-installer
+        if self.component == 'main/debian-installer':
+                self.component = 'main'
+        self.properties['license'] = license_in_component[self.component]
+
+        query = "EXECUTE pkg_releases ('%s', '%s')" % (self.pkg, self.component)
+        curs.execute(query)
+        has_official = 0
+        for rel in curs.fetchall():
+            self.releases.append(rel[0])
+            if rel[0] != 'experimental':
+                has_official = 1
+
+        # query = "EXECUTE pkg_versions_arch ('%s', '%s')" % (self.pkg, self.component)
+        query = "EXECUTE pkg_versions_arch ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            for row in RowDictionaries(curs):
+                if self.component.startswith(row['component']):
+                    self.version.append({'release':row['release'], 'version': row['version'], 'archs':row['archs']})
+                else:
+                    self.version.append({'release':row['release'], 'version': row['version'] + ' (' + row['component'] + ')', 'archs':row['archs']})
+
+        # Version in stable / testing for long table
+        query = "EXECUTE pkg_versions_stable_testing ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            prefix = 'Versions: '
+            for row in RowDictionaries(curs):
+                self.properties['stable_testing_version'].append((row['release'], row['debversion'], row['version'], prefix))
+                prefix = ', '
+
+        query = "EXECUTE pkg_dehs ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            row = RowDictionaries(curs)[0]
+            # it might be that the new upstream goes to experimental - this should be ignored here
+            if row['experimental_status'] != 'uptodate':
+                self.outdated['release']       = 'upstream'
+                self.outdated['version']       = row['unstable_upstream']
+                self.outdated['architectures'] = ''
+
+        query = "EXECUTE popcon ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            row = RowDictionaries(curs)[0]
+            self.popcon['vote']   = row['vote']
+            self.popcon['recent'] = row['recent']
+
+        query = "EXECUTE debtags ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            tagdict = {}
+            taglist = []
+            for row in RowDictionaries(curs):
+                (tag,value) = row['tag'].split('::')
+                if tagdict.has_key(tag):
+                    tagdict[tag] += ', ' + value
+                else:
+                    tagdict[tag]  = value
+                    taglist.append(tag)
+            if taglist:
+                taglist.sort()
+                for tag in taglist:
+                    self.debtags.append({'tag':tag, 'value':tagdict[tag]})
+
+        query = "EXECUTE query_screenshots ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            rows = RowDictionaries(curs)
+            self.icon           = rows[0]['icon']
+            self.image          = rows[0]['image']
+            self.screenshot_url = rows[0]['screenshot_url']
+            # if a package has more than one screenshots provide a list of these
+            if curs.rowcount > 1:
+                for row in rows:
+                    self.screenshots.append({'version':row['version'], 'url':row['image']})
+
+        if has_official == 1:
+            if self.component == 'main':
+                if self.dep_strength == 'Depends' or self.dep_strength == 'Recommends':
+                    self.pkgstatus = 'official_high'
+                else:
+                    self.pkgstatus = 'official_low'
+            else:
+                    self.pkgstatus = 'non-free'
+        else:
+            self.pkgstatus = 'experimental'
+
+        # link to packages.debian.org search page to see overview about all
+        # package versions in all releases
+        self.properties['pkg-url'] = PKGURLMASK % self.pkg
+
+        query = "EXECUTE query_pkg ('%s')" % self.pkg
+        curs.execute(query)
+
+        for row in RowDictionaries(curs):
+            for prop in PROPERTIES:
+                self.properties[prop] = row[prop]
+            self.desc['en']['short'] = MarkupString(to_unicode(row['description']), self.pkg, 'ShortDesc')
+            self.desc['en']['long']  = Markup(render_longdesc(to_unicode(row['long_description']).splitlines()))
+            (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+            self.properties['maintainer'] = to_unicode(row['maintainer'])
+            self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+
+        if self.properties.has_key('source'):
+            query = "EXECUTE src_vcs ('%s')" % (self.properties['source'])
+            curs.execute(query)
+            if curs.rowcount > 0:
+                # There is only one line returned by this query
+                row = RowDictionaries(curs)[0]
+                # If some information about Vcs is found in the database make sure it is ignored from tasks file
+                self.vcs_found = 1
+                for prop in row.keys():
+                    if row[prop]:
+                        self.properties[prop] = row[prop]
+                if not self.properties.has_key('vcs-browser') or self.properties['vcs-browser'] == HOMEPAGENONE:
+                    try:
+                        self.properties['vcs-browser'] = BrowserFromVcsURL(self.properties['vcs-type'], self.properties['vcs-url'])
+                    except KeyError, err:
+                        logger.warning("Vcs Property missing in Database:", self.properties, err)
+                if not self.properties.has_key('vcs-type') or not self.properties['vcs-type']:
+                    if self.properties.has_key('vcs-browser') and self.properties['vcs-browser'] != HOMEPAGENONE:
+                        self.properties['vcs-type']    = VcsTypeFromBrowserURL(self.properties['vcs-browser'])
+            # We are only interested in source packages (for instance for Bugs page)
+            if source == 1:
+                self.pkg = self.properties['source']
+            # Stop using source package in self.pkg because we need the source package to obtain latest uploaders and
+            # and bugs should be rendered in the same job - so we need the differentiation anyway
+            self.src = self.properties['source']
+        else:
+            logger.error("Failed to obtain source for package", self.pkg)
+            return
+
+        query = "EXECUTE query_get_latest_uploader ('%s')" % (self.src)
+        curs.execute(query)
+        try:
+            changed = to_unicode(curs.fetchone()[0])
+	except TypeError, err:
+            changed = None
+            logger.info("Query '%s' does not result in a valid changed entry (%s)" % (query, err))
+        if changed:
+            # self.properties['maintainer'] = to_unicode(self.properties['maintainer']) # .encode('utf-8')
+            try:
+                if not changed.startswith(self.properties['maintainer']):
+                    (_name, _url) = email.Utils.parseaddr(changed)
+                    changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
+                    self.properties['changed_by']    = MarkupString(changed, self.pkg, 'changed_by')
+                    self.properties['last_uploader'] = to_unicode(changed)
+                    self.properties['last_uploader_simple'] = to_unicode('%s <%s>' % (_name, _url))
+            except UnicodeDecodeError, err:
+                logger.error("Failed to compare changed with maintainer - assume both are the same for package %s (%s)", self.pkg, err)
+
+        if source == 0: # If we are querying for source packages to render BTS pages
+                # tranlations are irrelevant - so only obtain ddtp translations
+                # otherwise
+            query = "EXECUTE ddtp_description ('%s')" % (self.pkg)
+            curs.execute(query)
+
+            for row in RowDictionaries(curs):
+                lang = row['language']
+                self.desc[lang] = {}
+                self.desc[lang]['short'] = MarkupString(to_unicode(row['description']), self.pkg, 'ShortDesc - ' + lang)
+                self.desc[lang]['long']  = Markup(render_longdesc(to_unicode(row['long_description']).splitlines()))
+
+        query = "EXECUTE query_check_enhances ('%"+self.pkg+"%')"
+        curs.execute(query)
+
+        if curs.rowcount > 0:
+            for row in RowDictionaries(curs):
+                enhancelist = row['enhances'].split(', ')
+                if self.pkg in enhancelist:
+                    if not row['package'] in self.properties['Enhances'].keys():
+                        self.properties['Enhances'][row['package']] = PKGURLMASK % row['package']
+            # if self.properties['Enhances'] != {}:
+            #    print "The following packages are enhancing %s: " % self.pkg,
+            #    for enh in self.properties['Enhances'].keys():
+            #        print enh,
+            #    print
+
+    # sort these objects according to the package name
+    def __cmp__(self, other):
+        # Comparing with None object has to return something reasonable
+        if other == None:
+            return -2
+        # Sort according to package name
+        return cmp(self.pkg, other.pkg)
+
+    def __str__(self):
+        ret = "Blend: " + self.blendname + ", " \
+              "Task:"   + self.taskname  + ", " \
+              "pkg:"    + self.pkg
+        if self.dep_strength:
+            ret += ", Dep_strength: " + self.dep_strength
+        if self.pkgstatus:
+            ret += ", Pkg_status: "   + str(self.pkgstatus)
+        if self.releases:
+            ret += ", releases: "     + str(self.releases)
+        if self.component:
+            ret += ", component: "    + self.component
+        if self.version:
+            ret += ", versions: "     + str(self.version)
+        if self.desc:
+            ret += ", desc: "         + str(self.desc)
+        for prop in self.properties.keys():
+            ret += ", %s: %s" % (prop, str(self.properties[prop]))
+        # if self.desc['en']:
+        #    ret += ", desc['en']:"   + str(self.desc['en'])
+        return ret
+
+
+class Tasks:
+    # Provide a list of depencencies defined in metapackages
+    # This class concerns _all_ tasks of a Blend and is the most
+    # complete source of information.  If only a single task
+    # should be handled by a tool that uses blendtasktools
+    # probably the class TaskDependencies (see below) is
+    # your friend
+
+    def __init__(self, blendname):
+
+        os.system("mkdir -p logs")
+        LOG_FILENAME = 'logs/'+blendname+'.log'
+        handler = logging.handlers.RotatingFileHandler(filename=LOG_FILENAME,mode='w')
+        formatter = logging.Formatter("%(levelname)s - %(filename)s (%(lineno)d): %(message)s")
+        handler.setFormatter(formatter)
+        logger.addHandler(handler)
+
+        # This Instance of the Available class contains all
+        # information about packages that are avialable in Debian
+        # See below for more information about Available class
+        self.data            = ReadConfig(blendname)
+        self.blendname       = self.data['Blend']
+        self.tasksdir        = FetchTasksFiles(self.data)
+        self._InitMetapackages()
+        self.tasks           = {} # Dictionary of TasksDependency objects
+        self.alldeps_in_main = [] # sorted string list of package names with all packages
+                                  # relevant for a Blend that are in main Debian (for use in DDTP)
+        self.alldeps_in_main_info = {} # complete dictionary with package information
+                                  # with all packages relevant for a Blend that are in
+                                  # main to easily feed DDTP translation into the structures
+                                  # -->
+                                  # self.alldeps_in_main = self.alldeps_in_main_info.keys().sort()
+
+    def _InitMetapackages(self):
+        # sorted list of metapackage names
+        self.metapackagekeys = []
+        for task in os.listdir(self.tasksdir):
+            if os.path.isfile("%s/%s" % (self.tasksdir, task)):
+                self.metapackagekeys.append(task)
+        self.metapackagekeys.sort()
+
+    def GetAllDependencies(self, source=0):
+        # If we want to subscribe ddpo we need the source package names.
+        # In this case set source=1
+
+        if self.metapackagekeys == []:
+            self._GetMetapackages()
+
+        # Obtain the prefix of the meta packages of the Blend using blends-dev tools blend_get_names
+        if os.access('/usr/share/blends-dev/blend-get-names', os.X_OK):
+            blend_get_names = '/usr/share/blends-dev/blend-get-names'
+        elif os.access(os.getcwd() + '/blend-get-names', os.X_OK):
+            blend_get_names = os.getcwd() + '/blend-get-names'
+        else:
+            logger.critical("Please either install package Blends-dev or install a copy of devtools/blend-get-names in your working directory")
+            exit(-1)
+
+        # The prefix is used to build the meta package name that belongs to the task
+        cmd = "cd %s; %s metapackageprefix" % (self.data['datadir'], blend_get_names)
+        pipe = Popen(cmd, shell=True, stdout=PIPE).stdout
+        prefix = pipe.read().strip() + '-'
+        pipe.close()
+        for task in self.metapackagekeys:
+            td = TaskDependencies(self.blendname, task=task, tasksdir=self.tasksdir, prefix=prefix)
+            td.GetTaskDependencies(source)
+            self.tasks[task] = td
+
+        if source == 0:
+            # overall popcon submissions
+            query = "EXECUTE popcon_submissions"
+            curs.execute(query)
+            if curs.rowcount > 0:
+                self.popconsubmit = curs.fetchone()[0]
+            self.LinkEnhances()
+
+    def GetNamesOnlyDict(self, dependencystatus=[]):
+        # David Paleino needs for his web tools a dictionary
+        # { taskname : [list of dependencies]}
+        # This will be prepared here from the main
+        # datastructure
+        ret = {}
+        use_dependencystatus = GetDependencies2Use(dependencystatus, 'experimental')
+             
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            list = []
+            for dep in use_dependencystatus:
+                for tdep in tdeps.dependencies[dep]:
+                    list.append(tdep.pkg)
+            ret[task] = list
+        return ret
+
+    def GetUpdatablePackages(self, dependencystatus=[]):
+        # List of Updatable packages: sourcepkg, version, upstream version
+        ret = {}
+        use_dependencystatus = GetDependencies2Use(dependencystatus, 'experimental')
+
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            list = []
+            for dep in use_dependencystatus:
+                for tdep in tdeps.dependencies[dep]:
+            	    if tdep.outdated != {}:
+                        if tdep.properties.has_key('last_uploader_simple'):
+                            last_uploader = tdep.properties['last_uploader_simple']
+                        else:
+                            last_uploader = None
+            		# versions are ordered lists      ---v--- last one is needed
+                	list.append(((tdep.pkg, tdep.version[-1]['version'], tdep.outdated['version'], tdep.properties['maintainer']), last_uploader))
+            if list:
+        	ret[task] = list
+        return ret
+
+    def GetNamesAndSourceDict(self, dependencystatus=()):
+        # For the bugs pages we need not only the binary package names but the
+        # source package as well to be able to link to the QA page
+        # The lists are tripels with first value package name and the second source name
+        # The last value characterises the strength of dependency: Possible values
+        # are 'suggested' for Suggested packages, and 
+        ret = {}
+        if dependencystatus == ():
+            # see above in GetNamesOnlyDict() ... but when we are looking for bugs a
+            # reasonable default is to use only official dependencystatus
+            dependencystatus=('official_high', 'official_low', 'non-free', 'experimental')
+
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            list = []
+            for dep in dependencystatus:
+                if dep != 'official_high':
+                    bugrelevantdependency = SUGGESTED
+                else:
+                    bugrelevantdependency = DEPENDENT
+                for tdep in tdeps.dependencies[dep]:
+                    bugreldep = bugrelevantdependency
+                    # packages outside main can not be Depends but only Suggests
+                    if bugreldep == DEPENDENT and tdep.component != 'main':
+                        bugreldep = SUGGESTED
+                    list.append( { 'pkgname'              : tdep.pkg,
+                                   'source'               : tdep.properties['source'],
+                                   'homepage'             : tdep.properties['homepage'],
+                                   'vcs-browser'          : tdep.properties['vcs-browser'],
+                                   'maintainer'           : tdep.responsible,
+                                   'bugrelevantdependency': bugreldep
+                                   } )
+            ret[task] = list
+        return ret
+
+    def GetTaskDescDict(self):
+        # Return dictionary with description information of all tasks of a Blend
+        return self.tasks
+
+    def GetAllDependentPackagesOfBlend(self, dependencystatus=[]):
+        # David Paleino needs for his DDTP web tool a list of
+        # all available Dependencies.
+        # Here only those packages are returned that are in
+        # Debian main, because there are no DDTP translations
+        # for contrib and non-free available
+        if self.alldeps_in_main != []:
+            return self.alldeps_in_main
+
+        use_dependencystatus = GetDependencies2Use(dependencystatus, 'unknown')
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            for dep in use_dependencystatus:
+                for tdep in tdeps.dependencies[dep]:
+                    # add only packages in main, because others do not have DDTP translations
+                    if tdep.component == 'main':
+                        self.alldeps_in_main.append(tdep.pkg)
+                        self.alldeps_in_main_info[tdep.pkg] = tdep
+            # Also add meta package itself to make use of translated meta package description
+            self.alldeps_in_main.append(self.tasks[task].metapkg.pkg)
+            self.alldeps_in_main_info[self.tasks[task].metapkg.pkg] = self.tasks[task].metapkg
+        self.alldeps_in_main.sort()
+        return self.alldeps_in_main
+
+    def MarkupPreformatedStringsBlend(self):
+        # Genshi does not touch strings that are marked with "Markup()" - so just
+        # mark the strings that are ready formatted for the whole Blend
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            tdeps.MarkupPreformatedStrings()
+
+    def CheckOrCreateOutputDir(self, subdir):
+        outputdir = self.data['outputdir'] + '/' + subdir
+        if not os.access(outputdir, os.W_OK):
+            try:
+		os.mkdir(outputdir)
+            except:
+		# if more than one dir in the tree has to be crated just use mkdir -p ...
+		try:
+			os.system("mkdir -p %s" % outputdir)
+		except:
+			logger.error("Unable to create output dir " + outputdir)
+			return None
+        return outputdir
+
+    def LinkEnhances(self):
+        # To provide a connection between packages enhancing other packages a set of links
+        # will be provided.  The links should point to paragraphs on the tasks pages if the
+        # Enhancing package is listed in the metapackages of the blend and to packages.debian.org
+        # otherwise
+        for task in self.metapackagekeys:
+            tdeps = self.tasks[task]
+            for dependency in tdeps.dependencies.keys():
+                for dep in tdeps.dependencies[dependency]:
+                    if dep.properties['Enhances'] != {}:
+                        logger.debug("Package %s is enhanced by:" % dep.pkg)
+                        for enh in dep.properties['Enhances'].keys():
+                            # seek for Enhances on same page
+                            found = 0
+                            for seek_dependency in tdeps.dependencies.keys():
+                                for enhdep in tdeps.dependencies[seek_dependency]:
+                                    if enh == enhdep.pkg:
+                                        dep.properties['Enhances'][enh] = '#'+enh
+                                        found = 1 # found enhances in same task
+                                        break
+                            if found == 0: # If not found seek in other tasks
+                                for enhtask in self.metapackagekeys:
+                                    if enhtask == task:
+                                        continue
+                                    enhtdeps = self.tasks[enhtask]
+                                    for seek_dependency in enhtdeps.dependencies.keys():
+                                        for enhdep in enhtdeps.dependencies[seek_dependency]:
+                                            if enh == enhdep.pkg:
+                                                dep.properties['Enhances'][enh] = './' + enhtask + '#' + enh
+                                                found = 1 # found enhances in other task
+                                                break
+                                    if found == 1:
+                                        break
+                            logger.debug(" %s -> %s" % (enh, dep.properties['Enhances'][enh]))
+
+    def __str__(self):
+        ret = "Blendname: "       + self.blendname  + ", " \
+              "Metapackagekeys: " + str(self.metapackagekeys) + ", "
+        tab = "\nTasks: "
+        for task in self.metapackagekeys:
+            ret += tab
+            semikolon = ''
+            for pstatus in self.tasks[task].dependencies.keys():
+                if self.tasks[task].dependencies[pstatus] == []:
+                    continue
+                ret += semikolon + pstatus + ': ['
+                semikolon = '; '
+                komma = ''
+                for dep in self.tasks[task].dependencies[pstatus]:
+                    ret += komma + dep.pkg
+                    komma = ', '
+                ret += ']'
+            tab  = "\n       "
+        ret = ret + "\n" \
+              "All deps in main:" + str(self.alldeps_in_main) + ",\n" \
+              "All deps in main Info:" + str(self.alldeps_in_main_info)
+
+        return ret
+
+class TaskDependencies:
+    # List of depencencies defined in one metapackage
+    def __init__(self, blendname, task, tasksdir=None, prefix=''):
+
+        self.data     = ReadConfig(blendname)
+        self.blendname  = self.data['Blend']
+        if tasksdir != None:
+            self.tasksdir = tasksdir
+        else:
+            self.tasksdir = InitTasksFiles(self.data)
+        self.taskfile = self.tasksdir+'/'+task
+        if os.path.isfile(self.taskfile):
+            self.task = task
+        else:
+            logger.error("No such task file %s." % self.taskfile)
+            return None
+
+        # Dictionary with satus of dependencies as key and list of DependantPackage
+        # instances
+        self.dependencies = {}
+        for pkgstat in pkgstatus:
+            self.dependencies[pkgstat] = []
+
+        # Main information for a task
+        self.metapkg             = DependantPackage(self.blendname, self.task)
+        self.metapkg.pkg         = prefix + task
+
+        # If a Blend just bases on the meta package of an other Blend (this is the
+        # case in Debian Science which bases on med-bio for biology and gis-workstation
+        # for geography it makes no sense to build an own sentinel page but read
+        # meta package information of other meta packages and include the content
+        # of these while enabling to add further Dependencies as well
+        #
+        # metadepends should be a SVN URL
+        #
+        # This is NOT YET implemented
+        self.metadepends     = None
+
+    def _AppendDependency2List(self, dep, source):
+        # Append dependency which was found in the tasks file if not Ignore / Avoid and
+        # no dupplication in case of source depencencies
+        if dep == None:
+            return
+        if dep.dep_strength == 'Ignore' or dep.dep_strength == 'Avoid':
+            return
+        if source != 1:
+            # In general we can just add the dependency to the list
+            self.dependencies[dep.pkgstatus].append(dep)
+            return
+
+        # if we are seeking for ddpo source packages we have to make sure that
+        # no duplication occures
+        hasnot = 1
+        for hasdep in self.dependencies[dep.pkgstatus]:
+            if hasdep.pkg == dep.pkg:
+                hasnot = 0
+                break
+        if hasnot == 1:
+            self.dependencies[dep.pkgstatus].append(dep)
+
+    def GetTaskDependencies(self, source=0):
+        global dep_strength_keys
+
+        f = file(self.taskfile)
+        for stanza in deb822.Sources.iter_paragraphs(f):
+            # Why and Responsible can be valid for more than one dependency
+            # Store them in strings and use them for all Dependent Package objects
+            why               = None
+            responsible       = None
+            dep               = None
+            remark            = {}
+            tmp_dep_list      = []
+            fields_duplicated = None
+            fields_obsolete   = []
+            for key in stanza:
+                if key == 'Task':
+                    # also the task name might be utf-8 encoded
+                    self.metapkg.PrintedName = to_unicode(stanza['task'])
+                    continue
+                if key == 'Description':
+                    (short, long) = SplitDescription(stanza['description'])
+                    # Markup strings to enable verbatim output of preformatted text
+                    self.metapkg.desc['en']['short'] = MarkupString(short.encode('utf-8'), self.metapkg.PrintedName, 'taskShortDesc')
+                    self.metapkg.desc['en']['long']  = MarkupString(long.encode('utf-8'),  self.metapkg.PrintedName, 'taskLongDesc')
+
+                    # find maintainer and other metadata:
+                    query = "EXECUTE query_pkg ('%s')" % self.metapkg.pkg
+                    curs.execute(query)
+                    if curs.rowcount > 0:
+                        row = RowDictionaries(curs)[0]
+                        (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+                        self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+
+                    # if an English description was found chances are good that we are seeking for other languages
+                    # as well
+                    if source == 0: # If we are querying for source packages to render BTS pages
+                                    # tranlations are irrelevant - so only obtain ddtp translations
+                                    # otherwise
+                        query = "EXECUTE ddtp_description ('%s')" % (self.metapkg.pkg)
+                        curs.execute(query)
+
+                        for row in RowDictionaries(curs):
+                            lang = row['language']
+                            self.metapkg.desc[lang] = {}
+                            try:
+                                short = to_unicode(row['description'])
+                                self.metapkg.desc[lang]['short'] = MarkupString(to_unicode(short), \
+                                                  self.metapkg.PrintedName, 'taskShortDesc - ' + lang)
+                            except UnicodeEncodeError, err:
+                                logger.error("===> UnicodeDecodeError in metapackage %s (lang='%s'): '%s'; ErrTxt: %s" % \
+                                    (self.metapkg.pkg, lang, row['description'], err))
+                                short = to_unicode(row['description'],'latin1')
+                                self.metapkg.desc[lang]['short'] = MarkupString(short, \
+                                                  self.metapkg.PrintedName, 'taskShortDesc - ' + lang)
+
+                            try:
+                                self.metapkg.desc[lang]['long'] = Markup(render_longdesc(to_unicode(row['long_description']).splitlines()))
+                            except UnicodeDecodeError, err:
+                                logger.error("===> UnicodeDecodeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s" % \
+                                    (self.metapkg.pkg, lang, row['long_description'], err))
+                                self.metapkg.desc[lang]['long'] = 'UnicodeDecodeError'
+
+                    continue
+                if key == 'Meta-Depends':
+                    self.metadepends = stanza['meta-depends']
+                if key == 'Meta-Suggests':
+                    self.metadepends = stanza['meta-suggests']
+                if key == 'Why':
+                    why = stanza['why']
+                    continue
+                if key == 'Responsible':
+                    responsible = stanza['responsible'].strip()
+            	    if not dep:
+            		# Make sure there is really enough information to deal with provided by the package
+            		logger.error("Missing package information for field %s = %s" % (key, responsible))
+            		continue
+                    if dep.responsible != None:
+                        # we are dealing with an official package that has a real maintainer who
+                        # is finally responsible
+                        # ... but do not issue a hint about this in the logs. Sometimes a responsible
+                        # person makes sense in the tasks field
+                        # fields_obsolete.append(key)
+                        continue
+                    if responsible != '':
+                        (_name, _url) = email.Utils.parseaddr(responsible)
+                        dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+                    continue
+
+                if key in dep_strength_keys:
+                    # Hack: Debian Edu tasks files are using '\' at EOL which is broken
+                    #       in RFC 822 files, but blend-gen-control from blends-dev relies
+                    #       on this.  So remove this stuff here for the Moment
+                    dependencies = re.sub('\\\\\n\s+', '', stanza[key])
+
+                    # Remove versions from versioned depends
+                    dependencies = re.sub(' *\([ ><=\.0-9]+\) *', '', dependencies)
+
+                    # turn alternatives ('|') into real depends for this purpose
+                    # because we are finally interested in all alternatives
+                    dependencylist = dependencies.replace('|',',').split(',')
+                    # Collect all dependencies in one line first,
+                    # create an object for each later
+                    deps_in_one_line = []
+                    for dependency in dependencylist:
+                        if dependency.strip() != '': # avoid confusion when ',' is at end of line
+                            deps_in_one_line.append(dependency.strip())
+
+                    for dep_in_line in deps_in_one_line:
+                        # If there are more than one dependencies in one line
+                        # just put the current one into the right list of dependencies
+                        # before initiating the next instance
+                        if dep != None:
+                            tmp_dep_list.append(dep)
+                        dep = DependantPackage(self.blendname, self.task)
+                        # Store the comments in case they might be usefull for later applications
+                        dep.why            = why
+                        dep.responsible    = responsible
+                        dep.dep_strength   = key
+                        if dep_in_line.islower():
+                            dep.pkg            = dep_in_line
+                        else:
+                            dep.pkg            = dep_in_line.lower()
+                            logger.warning("Package names may not contain upper case letters, so %s is an invalid package name which is turned into %s" \
+                                % (dep_in_line, dep.pkg))
+
+                        dep._QueryUDD4Package(source)
+
+                    continue
+
+                # sometimes the tasks file contains standalone comments or other RFC 822 entries.
+                # Just ignore this stuff
+                if dep == None:
+                    continue # ... with next stanza
+
+                # the following fields might be provided in the Blends tasks file for
+                # prospective packages.  This information should be ignored in case the
+                # package just exists at Debian mirrors
+                if pkgstatus[dep.pkgstatus]['order'] <= pkgstatus['experimental']['order']:
+                    # for packages not in Debian we use the information from the tasks file
+                    # if a package is in Debian we use the information from the Packages file (via UDD query)
+                    # and the fields may not be overriden.  The list collects duplicated fields and will
+                    # trigger a warning if the list is not empty
+                    # TODO: warn about possibly duplicated prospective package entries in tasks files
+                    fields_duplicated = []
+
+                # The following keys will be mostly used for programs that
+                # are not yet existing in Debian and will go to our todo list
+                if key == 'Homepage':
+                    if fields_duplicated != None:
+                        fields_duplicated.append(key)
+                    if dep != None:
+                        # set Homepage only if not just set via official package information
+                        if dep.properties['homepage'] == HOMEPAGENONE:
+                            dep.properties['homepage'] = to_unicode(stanza['homepage'])
+                        else:
+                            fields_obsolete.append(key)
+                    else:
+                        logger.error("Dep not initiated before Homepage %s -> something is wrong." \
+                              % stanza['homepage'])
+                elif key == 'Vcs-Svn' or key == 'vcs-svn': # strangely enough on alioth the later
+                                                           # spelling seems to be needed - no idea why
+                    if dep != None:
+                        if dep.vcs_found == 1:
+                            fields_obsolete.append(key)
+                            continue
+                        dep.properties['vcs-url']  = stanza['vcs-svn']
+                        dep.properties['vcs-type'] = 'SVN'
+                        # if Vcs-Svn is given we are able to obtain the Browser URL of wsvn
+                        if dep.properties['vcs-browser'] == HOMEPAGENONE:
+                            try:
+                                dep.properties['vcs-browser'] = BrowserFromVcsURL(dep.properties['vcs-type'], dep.properties['vcs-url'])
+                            except KeyError, err:
+                                logger.error("Vcs Property missing in packages file:", dep.properties, err)
+                    else:
+                        logger.error("Dep not initiated before Vcs-Svn %s -> something is wrong." \
+                              % stanza['vcs-svn'])
+                    if dep.pkgstatus == 'unknown':
+                        dep.pkgstatus = 'pkgvcs'
+                elif key == 'Vcs-Git' or key == 'vcs-git': # strangely enough on alioth the later
+                                                           # spelling seems to be needed - no idea why
+                    if dep != None:
+                        if dep.vcs_found == 1:
+                            fields_obsolete.append(key)
+                            continue
+                        dep.properties['vcs-url']  = stanza['vcs-git']
+                        dep.properties['vcs-type'] = 'Git'
+                        # if Vcs-Git is given we are able to obtain the Browser URL of wsvn
+                        if dep.properties['vcs-browser'] == HOMEPAGENONE:
+                            dep.properties['vcs-browser'] = BrowserFromVcsURL(dep.properties['vcs-type'], dep.properties['vcs-url'])
+                    else:
+                        logger.error("Dep not initiated before Vcs-Git %s -> something is wrong." \
+                              % stanza['vcs-git'])
+                elif key == 'Vcs-Browser' or key == 'vcs-browser': # strangely enough on alioth the later
+                                                           # spelling seems to be needed - no idea why
+                    if dep != None:
+                        if dep.vcs_found == 1:
+                            fields_obsolete.append(key)
+                            continue
+                        dep.properties['vcs-browser'] = stanza['vcs-browser']
+                        if re.compile("[/.]git\.").search(dep.properties['vcs-browser']):
+                    	    dep.properties['vcs-type'] = 'Git'
+                        elif re.compile("[/.]svn\.").search(dep.properties['vcs-browser']):
+                    	    dep.properties['vcs-type'] = 'SVN'
+			else:
+			    # no chance to guess Vcs type
+                    	    dep.properties['vcs-type'] = 'Vcs'
+                        # There is no need to specify the Vcs-{Git,SVN} field in the tasks file but property 'vcs-type' should be set in
+                        # any case - so set it here in case it was not set before.  If an apropriate field is set later it becomes
+                        # overriden anyway
+                        if not dep.properties.has_key('vcs-url'):
+                            dep.properties['vcs-url'] = dep.properties['vcs-browser']
+                    else:
+                        logger.error("Dep not initiated before Vcs-Browser %s -> something is wrong." \
+                              % stanza['vcs-browser'])
+                    if dep.pkgstatus == 'unknown':
+                        dep.pkgstatus = 'pkgvcs'
+                elif key == 'section':
+                    if dep != None:
+                        dep.properties[key.lower()]  = stanza[key.lower()]
+                    else:
+                        logger.error("Dep not initiated before %s %s -> something is wrong." \
+                              % (key, stanza[key.lower()]))
+                elif key == 'License':
+                    if dep != None:
+                        dep.properties[key.lower()]  = stanza[key.lower()]
+                    else:
+                        logger.error("Dep not initiated before %s %s -> something is wrong." \
+                              % (key, stanza[key.lower()]))
+                elif key == 'Language':
+                    if dep != None:
+                        dep.properties[key.lower()]  = stanza[key.lower()]
+                    else:
+                        logger.error("Dep not initiated before %s %s -> something is wrong." \
+                              % (key, stanza[key.lower()]))
+                elif key == 'Registration':
+                    if dep != None:
+                        dep.properties[key.lower()]  = stanza[key.lower()]
+                    else:
+                        logger.error("Dep not initiated before %s %s -> something is wrong." \
+                              % (key, stanza[key.lower()]))
+                elif key.startswith('Published-'):
+                    if dep != None:
+                        if not dep.properties.has_key('published'):
+                            dep.properties['published'] = {}
+                        ptype = key.replace('Published-','').lower()
+                        dep.properties['published'][ptype] = to_unicode(stanza[key.lower()])
+                    else:
+                        logger.error("Dep not initiated before %s %s -> something is wrong." \
+                              % (key, stanza[key.lower()]))
+                elif key == 'WNPP':
+                    if dep != None:
+                        # it makes sense to assume that a package where WNPP was recently closed
+                        # shows up in unstable first
+                        if dep.component != None:
+                            logger.info("WNPP for package %s just closed - extra information can be removed from task file %s." % (dep.pkg, dep.taskname))
+                        else:
+                    	    wnpp = stanza['wnpp'].strip()
+                            # in case somebody prepended a '#' sign before the bug number
+                            wnpp = re.sub('^#', '', wnpp)
+                    	    # if there is really a number given
+                    	    if re.compile("^\d+$").search(wnpp):
+                        	dep.properties['wnpp'] = wnpp
+                    else:
+                        logger.error("Dep not initiated before WNPP %s -> something is wrong." \
+                              % stanza['wnpp'])
+                elif key.lower() == 'pkg-url':
+                    if dep != None:
+                        if dep.properties['pkg-url'] == HOMEPAGENONE: # only if no official package is just available
+                            # Escape '&' in URLs with %26 (Trick stolen by pasting the URL into a search engine ;-))
+                            dep.properties['pkg-url'] = stanza['pkg-url'].replace("&", "%26")
+                    else:
+                        logger.error("Dep not initiated before Pkg-URL %s -> something is wrong." \
+                              % stanza['pkg-url'])
+                elif key == 'Pkg-Description':
+                    if dep == None:
+                        logger.error("Dep not initiated before Pkg-Description %s -> something is wrong." \
+                              % stanza['pkg-description'].splitlines()[0])
+                    else:
+                        # Only update use description from task file if not known from official package
+                        if dep.desc['en'] == {}:
+                            (short, long) = SplitDescription(to_unicode(stanza['pkg-description']))
+                            dep.desc['en']['short'] = short
+                            dep.desc['en']['long']  = long
+                        else:
+                            fields_obsolete.append(key)
+                            continue
+                elif key == 'Avoid' or key == 'Ignore':
+                    dep.pkgstatus = key.lower()
+                elif key == 'Remark':
+                    (short, long) = SplitDescription(stanza['remark'])
+                    if dep == None:
+                        _pkg = self.metapkg.PrintedName
+                    else:
+                        _pkg = dep.pkg
+                    remark['short'] = MarkupString(short.encode('utf-8'), _pkg, 'RemarkShort')
+                    remark['long']  = MarkupString(long.encode('utf-8'),  _pkg, 'RemarkLong')
+                    continue
+                else:
+            	    if key not in KEYSTOIGNORE:
+                        # Also ignore keys starting with X[A-Z]-
+                        if not re.compile("^X[A-Z]*-").search(key):
+                            try:
+                                logger.warning("Unknown key '%s': %s in file %s" % (key, stanza[key], self.metapkg.PrintedName))
+                            except:
+                                logger.error("Unknown key '%s' with problematic value in file %s." % (key, self.metapkg.PrintedName))
+
+            if dep == None:
+                continue # ... with next stanza
+            # seek for certain field set in the tasks file to move the dependency into different
+            # categories of development status of not yet included packages
+            if dep.pkgstatus == 'unknown':
+                flag = 0
+                # first check those packages where some work was just done
+                for status in ['pkgvcs', 'unofficial', 'wnpp', 'prospective']:
+                    for field in pkgstatus[status]['fields-set']:
+                        if dep.properties.has_key(field):
+                            if field in HOMEPAGENONEFIELDS and dep.properties[field] == HOMEPAGENONE :
+                                continue
+                            dep.pkgstatus = status
+                            flag = 1
+                            break
+                    if flag == 1:
+                        break
+                if flag == 0:
+                    # If there was no such package found query UDD whether any package provides this name
+                    # This is often the case for libraries with versions in the package name
+                    query = "EXECUTE query_provides ('%s')" % (dep.pkg)
+                    curs.execute(query)
+                    if curs.rowcount > 0:
+                        has_expilicite = 0
+                        VirtProvides = []
+                        for row in curs.fetchall():
+                            VirtProvides.append(row[0])
+                            for hasdeps in tmp_dep_list:
+                                if row[0] == hasdeps.pkg:
+                                    logger.error("    --> %s is mentioned explicitely in dependency list" % row[0])
+                                    has_expilicite = 1
+                        if has_expilicite == 1:
+                            logger.error("Do not keep a record of virtual package %s which has explicite package dependencies" % dep.pkg)
+                            # ATTENTION: THIS HAS TO BE CHANGED FOR blends-dev BY AN OPTIONAL parameter
+                            continue
+
+                        logger.error("Use real package %s instead of virtual package %s." % (VirtProvides[0], dep.pkg))
+                        dep.pkg = VirtProvides[0]
+                        dep._QueryUDD4Package(source)
+
+                        if len(VirtProvides) > 1:
+                            logger.error("Virtual package %s is provided by more than one package (%s).  Make sure you mention a real package in addition!" \
+                                % (dep.pkg, str(VirtProvides)))
+
+                            for virt_provides in VirtProvides[1:]:
+                                # Add all remaining packages which provide a virtual package to the list
+                                if dep != None:
+                                    # Add the first real package from the packages which provide the virtual package to the list
+                                    tmp_dep_list.append(dep)
+                                dep = DependantPackage(self.blendname, self.task)
+                                # Store the comments in case they might be usefull for later applications
+                                dep.why            = why
+                                dep.responsible    = responsible
+                                dep.dep_strength   = key
+                                dep.pkg            = virt_provides
+                                dep._QueryUDD4Package(source)
+                    else:
+                        logger.warning("Dependency with unknown status: %s (Task %s)" % (dep.pkg, dep.taskname))
+
+            tmp_dep_list.append(dep)
+            # remarks which are common to several dependencies in a list have to be added to all of the dependencies
+            for dep in tmp_dep_list:
+                if remark != {}:
+                    dep.remark = remark
+                if fields_obsolete != [] and dep.pkgstatus != 'new':
+                    logger.info("Package %s is an official package and has information in UDD. The following extra information can be removed from tasks file %s: %s" % \
+                                     (dep.pkg, dep.taskname, str(fields_obsolete)))
+                if dep.desc['en'] == {}:
+                    logger.error("Missing description for package %s in task %s.  This package will be ignored completely." % (dep.pkg, dep.taskname))
+                else:
+                    self._AppendDependency2List(dep, source)
+
+        f.close()
+
+        for dependency in self.dependencies.keys():
+            self.dependencies[dependency].sort()
+
+    def MarkupPreformatedStrings(self):
+        # Genshi does not touch strings that are marked with "Markup()" - so just
+        # mark the strings that are ready formatted
+
+        for dependency in self.dependencies.keys():
+            for dep in self.dependencies[dependency]:
+                dep.responsible         = MarkupString(dep.responsible, dep.pkg, 'responsible')
+                if dep.desc['en'] != {}:
+                    dep.desc['en']['short'] = MarkupString(dep.desc['en']['short'], dep.pkg, 'pkgShortDesc')
+                    dep.desc['en']['long']  = MarkupString(dep.desc['en']['long'],  dep.pkg, 'pkgLongDesc')
+
+    def __str__(self):
+        ret = "Blend: " + self.blendname + ", " \
+              "Task:"   + self.task      + ", " \
+              "Dependencies:" + str(self.dependencies)
+        return ret
+
+
+class Available:
+    # Information about available packages
+    #
+    # Usage example:
+    #    available = Available(                     # Initialize instance
+    #                          release='testing',      # (default='unstable')
+    #                          components=('main'), # Regard only main, default: main, contrib, non-free
+    #                          source=1             # Use source package names, default: use binaries
+    #                          arch='sparc'         # (default='i386')
+    #                         )
+    # 
+    #    available.GetPackageNames() # Actually parse the Packages files to obtain needed information
+    #                                # This has to be done at least once.  It is verified that the effort
+    #                                # to obtain package information is not done twice per run
+
+    def __init__(self, release=None, components=(), source=None, arch=None, method=None):
+        if method == 'UDD':
+            self.__init_UDD__(release=release, components=components, source=source, arch=arch)
+        else:
+            self.__init_Packages_gz__(release=release, components=components, source=source, arch=arch)
+
+
+    def __init_Packages_gz__(self, release=None, components=(), source=None, arch=None):
+        self.source = 'Packages.'+COMPRESSIONEXTENSION
+        if source != None:
+            self.source = 'Sources.'+COMPRESSIONEXTENSION
+        self.binary = 'source'
+        if source == None:
+            if arch == None:
+                # use arch=i386 as default because it contains most packages
+                self.binary = 'binary-i386'
+            else:
+                self.binary = 'binary-' + arch
+        self.release = 'unstable'
+        if release != None:
+            self.release = release
+        self.components = ('main', 'contrib', 'non-free')
+        if components != ():
+            self.components = components
+        # The dictionary packages contains the component as key
+        # The values are dictionaries holding package names as key
+        # and a DependantPackage object as values
+        self.packages = {}
+        for component in self.components:
+            self.packages[component] = {}
+
+
+##################################################################################################
+# bugs
+
+SEVERITIES = ('critical', 'grave', 'serious', 'important', 'normal', 'minor', 'wishlist')
+
+# Obtain description in foreign language from DDTP project if available
+# For the moment just query for the highest available version of the description
+query = """PREPARE bugs_query_source (text) AS
+           SELECT id, package, source, status, severity, done, title FROM bugs WHERE source = $1"""
+curs.execute(query)
+
+query = """PREPARE bugs_query_tags (int) AS SELECT tag FROM bugs_tags WHERE id = $1"""
+curs.execute(query)
+
+class BugEntry:
+    # Define a separate class for bug entries to be able to define a reasonably sorting mechanism
+    # It seems that Genshi templates are unable to handle bug objects and so the essential parts
+    # are taken over here in addition to a __cmp__ method to enable easy sorting of list of bugs
+    # Rationale: Calling methods inside the template does not
+    #            seem to work and has to be done in any case to
+    #            circumvent encoding problems
+
+    def __init__(self, bug):
+        self.bug      = bug['id']
+	self.summary  = to_unicode(bug['title'])
+	self.severity = bug['severity']
+
+        query = "EXECUTE bugs_query_tags (%i)" % self.bug
+        curs.execute(query)
+
+        self.tags     = ''
+        if curs.rowcount > 0:
+            komma = ''
+            for tag in curs.fetchall():
+		self.tags += komma + tag[0]
+		komma = ', '
+
+    # sort these objects according to bug number
+    def __cmp__(self, other):
+        # Comparing with None object has to return something reasonable
+        if other == None:
+            return -2
+        # Sort according to package name
+        return cmp(self.bug, other.bug)
+
+class PackageBugs:
+    # Store list of bugs (either open or done) of a package
+
+    def __init__(self, pkgdict):
+        self.pkgname    = pkgdict['pkgname']
+        self.source     = pkgdict['source']
+        self.homepage   = pkgdict['homepage']
+        self.vcsbrowser = pkgdict['vcs-browser']
+        self.maintainer = MarkupString(pkgdict['maintainer'], self.pkgname, 'maintainer')
+	self.bugs       = []      # open bugs
+	self.nbugs      = 0
+	self.severities = {}
+	for s in SEVERITIES:
+		self.severities[s] = 0
+
+    # sort these objects according to the package name
+    def __cmp__(self, other):
+        # Comparing with None object has to return something reasonable
+        if other == None:
+            return -2
+        # Sort according to package name
+        return cmp(self.pkgname, other.pkgname)
+
+    def __str__(self):
+        str = "---\npkgname: %s\nsource: %s\n" % (self.pkgname, self.source)
+	if self.homepage:
+            str += "homepage: %s\n" % (self.homepage)
+	if self.vcsbrowser:
+            str += "vcsbrowser: %s\n" % (self.vcsbrowser)
+        return str + "nbugs: %s\nnbugs = %i\nseverities = %s\n---" % (self.nbugs, self.severities)
+
+
+class PackageBugsOpenAndDone:
+    # Store list of bugs of a package
+
+    def __init__(self, pkgdict):
+	pkgname = pkgdict['pkgname']
+	source  = pkgdict['source']
+	self.open       = PackageBugs(pkgdict)  # open bugs
+	self.done       = PackageBugs(pkgdict)  # closed bugs
+
+	bugs = None
+        query = "EXECUTE bugs_query_source ('%s')" % source
+        curs.execute(query)
+
+        if curs.rowcount > 0:
+            for bug in RowDictionaries(curs):
+                bugentry = BugEntry(bug)
+                if bug['status'] == 'done':
+                    if self.done.pkgname == None:
+                        self.done.pkgname = pkgname
+                    self.done.bugs.append(bugentry)
+                    self.done.nbugs += 1
+                else:
+                    if self.open.pkgname == None:
+                        self.open.pkgname = pkgname
+                    self.open.bugs.append(bugentry)
+                    self.open.nbugs += 1
+                    self.open.severities[bugentry.severity] += 1
+            self.open.bugs.sort()
+            self.done.bugs.sort()
+            if source == None:
+                self.open.source = pkgname
+                self.done.source = pkgname
+            else:
+                self.open.source = source
+                self.done.source = source
+

Modified: blends/trunk/webtools/update-all-tasks
URL: http://svn.debian.org/wsvn/blends/blends/trunk/webtools/update-all-tasks?rev=2604&op=diff
==============================================================================
--- blends/trunk/webtools/update-all-tasks (original)
+++ blends/trunk/webtools/update-all-tasks Tue Jan 18 07:53:01 2011
@@ -58,7 +58,7 @@
 mkdir -p logs
 for blend in `ls webconf/*.conf | grep -v -e debug -e rest-test | sed 's?webconf/\(.*\)\.conf?\1?'` ; do
     starttime=`date "+%s"`
-    ./tasks.py $blend 2> logs/${blend}.err > logs/${blend}.out
+    time ./tasks.py $blend 2> logs/${blend}.err > logs/${blend}.out
     endtime=`date "+%s"`
     # echo "Rendering Blend '$blend' took $((endtime-starttime)) seconds"
 done




More information about the Blends-commit mailing list