[Blends-commit] [SCM] website branch, master, updated. 45ef7fe4b3df054de7fd75431ec888e9286b95ea

Andreas Tille tille at debian.org
Sun Dec 9 16:54:25 UTC 2012


The following commit has been merged in the master branch:
commit ef9b804dc42ae1663519fc459784a69015d689d9
Author: Andreas Tille <tille at debian.org>
Date:   Thu Nov 29 08:21:30 2012 +0100

    Readd blendstasktoolsold.py which is used in bugs.py yet (even if it should have been rewritten since two years :-()

diff --git a/webtools/blendstasktools.py b/webtools/blendstasktoolsold.py
similarity index 60%
copy from webtools/blendstasktools.py
copy to webtools/blendstasktoolsold.py
index a50e172..16c4e8b 100644
--- a/webtools/blendstasktools.py
+++ b/webtools/blendstasktoolsold.py
@@ -1,5 +1,5 @@
 #!/usr/bin/python
-# Copyright 2008-2012: Andreas Tille <tille at debian.org>
+# Copyright 2008: Andreas Tille <tille at debian.org>
 # License: GPL
 
 # Blends metapackages are listing a set of Dependencies
@@ -13,9 +13,7 @@
 # or if it is not contained it obtains information
 # from tasks file about home page, license, WNPP etc.
 
-#PORT=5441
-UDDPORT=5452
-PORT=UDDPORT
+PORT=5441
 DEFAULTPORT=5432
 
 from sys import stderr, exit
@@ -37,25 +35,27 @@ from genshi import Markup
 # from genshi.input import HTML
 from blendsmarkdown import SplitDescription, MarkupString, render_longdesc
 
-try:
-    from debian import deb822
-except:
-    from debian_bundle import deb822
+from debian import deb822
 from blendsunicode import to_unicode
-from blendslanguages import languages
 
 import logging
 import logging.handlers
 logger = logging.getLogger('blends')
 logger.setLevel(logging.INFO)
-# logger.setLevel(logging.DEBUG)
 
 # Seems to have problems on 17.04.2009
 # BASEURL  = 'http://ftp.debian.org/debian'
 BASEURL  = 'http://ftp.de.debian.org/debian'
+# SVNHOST  = 'svn+ssh://svn.debian.org'
+SVNHOST  = 'svn://svn.debian.org'
 KEYSTOIGNORE = ( 'Architecture', 'Comment', 'Leaf', 'NeedConfig', 'Note', 'Section',
                  'Needconfig', 'DontAvoid',
                  'Enhances', 'Test-always-lang', 'Metapackage')
+# DDTP now exports to official debian mirror
+#DDTPURL = "http://ddtp.debian.net/debian/dists/"
+DDTPURL = "http://ftp.de.debian.org/debian/dists/"
+DDTPDIR = "/i18n/Translation-"
+DDTPLISTS = ('etch', 'lenny', 'sid')
 
 CONFDIR = 'webconf'
 
@@ -196,49 +196,11 @@ for pkgstat in pkgstatus:
         if dep not in dep_strength_keys:
             dep_strength_keys.append(dep)
 
-rmpub = open('remove-publications-from-tasks-files.dat','w+')
-
 license_in_component = {'main'     : 'DFSG free',
                         'contrib'  : 'DFSG free, but needs non-free components',
                         'non-free' : 'non-free'
             }
 
-try:
-    import psutil
-    has_psutils=True
-except ImportError:
-    has_psutils=False
-
-LOCKFILE='/var/lock/blends.lock'
-def LockBlendsTools():
-    """Locking mechanism to make sure the scripts will not run in parallel
-       which happened because of IO problems on udd.debian.org"""
-    if not has_psutils:
-	logger.warning("Package python-psutil is missing.  No locking support available")
-	return
-    if os.path.exists(LOCKFILE):
-        try:
-            lf = open(LOCKFILE, 'r')
-            pid = int(lf.readline())
-            lf.close()
-            if pid in psutil.get_pid_list():
-                logger.error("Another process rebuilding web sentinel pages with PID %i is running. Exit." % pid)
-                exit()
-            else:
-                logger.warning("Process with PID %i is not running any more but lockfile remained.  Removing %s ..." % (pid, LOCKFILE))
-                os.unlink(LOCKFILE)
-        except IOError as e:
-            pass
-    pid = os.getpid()
-    lf = open(LOCKFILE, 'w')
-    print >>lf, pid
-    lf.close()
-
-def UnlockBlendsTools():
-    """Unlock previousely locked file"""
-    if os.path.exists(LOCKFILE):
-        os.unlink(LOCKFILE)
-
 def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
     # Create a list of status of dependencies out of pkgstatus dictionary
     use_dependencystatus = []
@@ -263,243 +225,146 @@ def GetDependencies2Use(dependencystatus=[], max_order='prospective'):
 ###########################################################################################
 # Define several prepared statements to query UDD
 try:
-  conn = psycopg2.connect(host="localhost",port=PORT,user="guest",database="udd")
-except psycopg2.OperationalError, err:
-  try:
-    conn = psycopg2.connect(host="udd.debian.org",port=UDDPORT,user="guest",database="udd")
-  except psycopg2.OperationalError, err:
-    # logger not known at this state: logger.warning
-    print >>stderr, "PostgreSQL does not seem to run on port %i .. trying default port %i.\n\tMessage: %s" % (PORT, DEFAULTPORT, str(err))
+    conn = psycopg2.connect(host="localhost",port=PORT,user="guest",database="udd")
+except psycopg2.OperationalError:
+    logger.debug("PostgreSQL does not seem to run on port %i .. trying default port %i." % (PORT, DEFAULTPORT))
     try:
         conn = psycopg2.connect(host="localhost",port=DEFAULTPORT,user="guest",database="udd")
     except psycopg2.OperationalError:
 	# Hmmm, I observed a really strange behaviour on one of my machines where connecting to
-	# localhost does not work but 127.0.0.1 works fine.  No idea why ... but this should
+	# localhost does not work but 127.0.0.1 works fine.  No odea why ... but this should
 	# do the trick for the moment
 	conn = psycopg2.connect(host="127.0.0.1",port=DEFAULTPORT,user="guest",database="udd")
 
 curs = conn.cursor()
-# uddlog = open('logs/uddquery.log', 'w')
-
-def _execute_udd_query(query):
-    try:
-        curs.execute(query)
-        logger.debug(query)
-    except psycopg2.ProgrammingError, err:
-        print >>stderr, "Problem with query\n%s" % (to_unicode(query))
-        print >>stderr, err
-        exit(-1)
-    except psycopg2.DataError, err:
-        print >>stderr, "%s; query was\n%s" % (err, query)
-
-query = """PREPARE query_pkgs (text[],text[]) AS
-        SELECT * FROM blends_query_packages($1,$2) AS (
-          package text, distribution text, release text, component text, version debversion,
-          maintainer text,
-          source text, section text, task text, homepage text,
-          maintainer_name text, maintainer_email text,
-          "vcs-type" text, "vcs-url" text, "vcs-browser" text,
-	  changed_by text,
-          enhanced text[],
-          releases text[], versions text[], architectures text[],
-	  unstable_upstream text, unstable_parsed_version text, unstable_status dehs_status, experimental_parsed_version text, experimental_status dehs_status,
-          vote int, recent int, -- popcon
-          debtags text[],
-          screenshot_versions text[], image text[], icon text[],
-          year    text,
-          title   text,
-          authors text,
-          doi     text,
-          pubmed  text,
-          url     text,
-          journal text,
-          volume  text,
-          number  text,
-          pages   text,
-          eprint  text,
-          description_en text, long_description_en text,
-          description_cs text, long_description_cs text,
-          description_da text, long_description_da text,
-          description_de text, long_description_de text,
-          description_es text, long_description_es text,
-          description_fi text, long_description_fi text,
-          description_fr text, long_description_fr text,
-          description_hu text, long_description_hu text,
-          description_it text, long_description_it text,
-          description_ja text, long_description_ja text,
-          description_ko text, long_description_ko text,
-          description_nl text, long_description_nl text,
-          description_pl text, long_description_pl text,
-          "description_pt_BR" text, "long_description_pt_BR" text,
-          description_ru text, long_description_ru text,
-          description_sk text, long_description_sk text,
-          description_sr text, long_description_sr text,
-          description_sv text, long_description_sv text,
-          description_uk text, long_description_uk text,
-          "description_zh_CN" text, "long_description_zh_CN" text,
-          "description_zh_TW" text, "long_description_zh_TW" text
-        )"""
-_execute_udd_query(query)
-
-query = """PREPARE query_new (text[]) AS SELECT
-                   p.package,
-                   p.distribution, p.component, p.version, p.architecture, p.maintainer,
-                   p.source, p.section, p.distribution, 'new' AS release, p.component, p.homepage,
-                   s.changed_by,
-                   description AS description_en, long_description AS long_description_en,
-         bibyear.value    AS "year",
-         bibtitle.value   AS "title",
-         bibauthor.value  AS "authors",
-         bibdoi.value     AS "doi",
-         bibpmid.value    AS "pubmed",
-         biburl.value     AS "url",
-         bibjournal.value AS "journal",
-         bibvolume.value  AS "volume",
-         bibnumber.value  AS "number",
-         bibpages.value   AS "pages",
-         bibeprint.value  AS "eprint"
-                   FROM new_packages p
-		   JOIN new_sources s ON p.source = s.source AND p.version = s.version
-    LEFT OUTER JOIN bibref bibyear    ON p.source = bibyear.source    AND bibyear.rank = 0    AND bibyear.key    = 'year'    AND bibyear.package = ''
-    LEFT OUTER JOIN bibref bibtitle   ON p.source = bibtitle.source   AND bibtitle.rank = 0   AND bibtitle.key   = 'title'   AND bibtitle.package = ''
-    LEFT OUTER JOIN bibref bibauthor  ON p.source = bibauthor.source  AND bibauthor.rank = 0  AND bibauthor.key  = 'author'  AND bibauthor.package = ''
-    LEFT OUTER JOIN bibref bibdoi     ON p.source = bibdoi.source     AND bibdoi.rank = 0     AND bibdoi.key     = 'doi'     AND bibdoi.package = ''
-    LEFT OUTER JOIN bibref bibpmid    ON p.source = bibpmid.source    AND bibpmid.rank = 0    AND bibpmid.key    = 'pmid'    AND bibpmid.package = ''
-    LEFT OUTER JOIN bibref biburl     ON p.source = biburl.source     AND biburl.rank = 0     AND biburl.key     = 'url'     AND biburl.package = ''
-    LEFT OUTER JOIN bibref bibjournal ON p.source = bibjournal.source AND bibjournal.rank = 0 AND bibjournal.key = 'journal' AND bibjournal.package = ''
-    LEFT OUTER JOIN bibref bibvolume  ON p.source = bibvolume.source  AND bibvolume.rank = 0  AND bibvolume.key  = 'volume'  AND bibvolume.package = ''
-    LEFT OUTER JOIN bibref bibnumber  ON p.source = bibnumber.source  AND bibnumber.rank = 0  AND bibnumber.key  = 'number'  AND bibnumber.package = ''
-    LEFT OUTER JOIN bibref bibpages   ON p.source = bibpages.source   AND bibpages.rank = 0   AND bibpages.key   = 'pages'   AND bibpages.package = ''
-    LEFT OUTER JOIN bibref bibeprint  ON p.source = bibeprint.source  AND bibeprint.rank = 0  AND bibeprint.key  = 'eprint'  AND bibeprint.package = ''
-                   WHERE (p.package, p.version) IN  
-                         (SELECT package, max(version) FROM
-                   new_packages WHERE package = ANY ($1) GROUP BY package)"""
-_execute_udd_query(query)
-
-query = """PREPARE query_vcs (text[]) AS SELECT
-                   p.package,
-                   p.component, p.maintainer,
-                   p.source, p.section, 'vcs' AS release, p.component, p.homepage,
-                   p.changed_by, p.chlog_version AS version,
-                   description AS description_en, long_description AS long_description_en,
-                   p.vcs_url     AS "vcs-url",
-                   p.vcs_type    AS "vcs-type",
-                   p.vcs_browser AS "vcs-browser",
-                   p.blend,
-                   p.license,
-                   p.wnpp,
-         bibyear.value    AS "year",
-         bibtitle.value   AS "title",
-         bibauthor.value  AS "authors",
-         bibdoi.value     AS "doi",
-         bibpmid.value    AS "pubmed",
-         biburl.value     AS "url",
-         bibjournal.value AS "journal",
-         bibvolume.value  AS "volume",
-         bibnumber.value  AS "number",
-         bibpages.value   AS "pages",
-         bibeprint.value  AS "eprint"
-                   FROM blends_prospectivepackages p
-    LEFT OUTER JOIN bibref bibyear    ON p.source = bibyear.source    AND bibyear.rank = 0    AND bibyear.key    = 'year'    AND bibyear.package = ''
-    LEFT OUTER JOIN bibref bibtitle   ON p.source = bibtitle.source   AND bibtitle.rank = 0   AND bibtitle.key   = 'title'   AND bibtitle.package = ''
-    LEFT OUTER JOIN bibref bibauthor  ON p.source = bibauthor.source  AND bibauthor.rank = 0  AND bibauthor.key  = 'author'  AND bibauthor.package = ''
-    LEFT OUTER JOIN bibref bibdoi     ON p.source = bibdoi.source     AND bibdoi.rank = 0     AND bibdoi.key     = 'doi'     AND bibdoi.package = ''
-    LEFT OUTER JOIN bibref bibpmid    ON p.source = bibpmid.source    AND bibpmid.rank = 0    AND bibpmid.key    = 'pmid'    AND bibpmid.package = ''
-    LEFT OUTER JOIN bibref biburl     ON p.source = biburl.source     AND biburl.rank = 0     AND biburl.key     = 'url'     AND biburl.package = ''
-    LEFT OUTER JOIN bibref bibjournal ON p.source = bibjournal.source AND bibjournal.rank = 0 AND bibjournal.key = 'journal' AND bibjournal.package = ''
-    LEFT OUTER JOIN bibref bibvolume  ON p.source = bibvolume.source  AND bibvolume.rank = 0  AND bibvolume.key  = 'volume'  AND bibvolume.package = ''
-    LEFT OUTER JOIN bibref bibnumber  ON p.source = bibnumber.source  AND bibnumber.rank = 0  AND bibnumber.key  = 'number'  AND bibnumber.package = ''
-    LEFT OUTER JOIN bibref bibpages   ON p.source = bibpages.source   AND bibpages.rank = 0   AND bibpages.key   = 'pages'   AND bibpages.package = ''
-    LEFT OUTER JOIN bibref bibeprint  ON p.source = bibeprint.source  AND bibeprint.rank = 0  AND bibeprint.key  = 'eprint'  AND bibeprint.package = ''
-                   WHERE p.package = ANY ($1)"""
-_execute_udd_query(query)
-
-# This prepared statement is called only once but it makes sense to mention it in the
-# header to keep all stuff in one place which needs to be changed in case a new language
-# will be added
-query = """PREPARE query_metapkg_trans (text[]) AS
-        SELECT * FROM blends_metapackage_translations($1) AS (
-          package text,
-          description_en text, long_description_en text,
-          description_cs text, long_description_cs text,
-          description_da text, long_description_da text,
-          description_de text, long_description_de text,
-          description_es text, long_description_es text,
-          description_fi text, long_description_fi text,
-          description_fr text, long_description_fr text,
-          description_hu text, long_description_hu text,
-          description_it text, long_description_it text,
-          description_ja text, long_description_ja text,
-          description_ko text, long_description_ko text,
-          description_nl text, long_description_nl text,
-          description_pl text, long_description_pl text,
-          "description_pt_BR" text, "long_description_pt_BR" text,
-          description_ru text, long_description_ru text,
-          description_sk text, long_description_sk text,
-          description_sr text, long_description_sr text,
-          description_sv text, long_description_sv text,
-          description_uk text, long_description_uk text,
-          "description_zh_CN" text, "long_description_zh_CN" text,
-          "description_zh_TW" text, "long_description_zh_TW" text
-        )"""
-_execute_udd_query(query)
+query = """PREPARE query_pkg (text) AS SELECT 
+                   distribution, component, version, architecture, maintainer,
+                   source, section, task, distribution, release, component, homepage,
+                   description FROM packages
+                   WHERE package = $1 ORDER BY version"""
+curs.execute(query)
+
+query = """PREPARE query_new (text) AS SELECT 
+                   distribution, component, version, architecture, maintainer,
+                   source, section, distribution, 'new' AS release, component, homepage,
+                   description, long_description FROM new_packages
+                   WHERE package = $1 ORDER BY version LIMIT 1"""
+curs.execute(query)
 
 # Sometimes the tasks file contains dependencies from virtual packages and we have to
 # obtain the real packages which provide this dependency.
 # First check whether there are such packages (only names)
-query = """PREPARE query_provides (text[]) AS 
-           SELECT DISTINCT package, provides FROM packages WHERE provides IS NOT NULL AND package = ANY($1) ;"""
-_execute_udd_query(query)
+query = """PREPARE query_provides (text) AS 
+           SELECT package FROM packages WHERE provides = $1 GROUP BY package;"""
+curs.execute(query)
 
 # Obtain more detailed information about packages that might provide a dependency
 #query = """PREPARE query_provides_version_release (text) AS 
 #           SELECT package, version, release FROM packages WHERE provides = $1
 #                  GROUP BY version, package, release ORDER BY version DESC;"""
-#_execute_udd_query(query)
+#curs.execute(query)
+
+# Obtain the component a certain package might be in
+query = "PREPARE pkg_component (text) AS SELECT component FROM packages WHERE package = $1 GROUP BY component "
+curs.execute(query)
 
 # Obtain the releases featuring a certain package, in case a package might show up in different components when
 # considering different releases we apply a preference for main over contrib over non-free.  If this is the case
 # we mention only the releases of the selected component
 query = "PREPARE pkg_releases (text, text) AS SELECT release FROM packages WHERE package = $1 AND component = $2 GROUP BY release "
-_execute_udd_query(query)
+curs.execute(query)
+
+# Obtain available versions including the architectures where this version is available
+#query = """PREPARE pkg_versions_arch (text, text) AS
+#   SELECT release, regexp_replace(version, '^[0-9]:', '') AS version, array_to_string(array_accum(architecture),',') AS architectures FROM
+#     ( SELECT architecture, version,
+#          release || CASE WHEN char_length(substring(distribution from '-.*')) > 0 THEN substring(distribution from '-.*') ELSE '' END AS release FROM packages
+#          WHERE package = $1 AND component = $2
+#          GROUP BY architecture, version, release, distribution
+#          ORDER BY architecture
+#     ) AS av
+#     GROUP BY version, release ORDER BY version DESC;"""
+query = """PREPARE pkg_versions_arch (text) AS
+   SELECT r as release, version, archs, component
+     FROM versions_archs_component($1) AS (r text, version text, archs text, component text)
+          -- you have to specify the column names because plain RECORD type is returned
+     JOIN releases ON releases.release = r
+          -- JOIN with release table to enable reasonable sorting
+    WHERE r NOT LIKE '%-%'
+          -- ignore releases like *-security etc.
+    ORDER BY releases.sort ASC, version DESC;"""
+curs.execute(query)
 
 query = """PREPARE pkg_versions_stable_testing (text) AS
    SELECT release,  regexp_replace(regexp_replace(debversion, '-.*', ''), '[.+~]dfsg.*', '') AS version, debversion
      FROM (SELECT r AS release, MAX(version) AS debversion
              FROM versions_archs_component($1) AS (r text, version text, archs text, component text)
-            WHERE r IN ('lenny', 'squeeze') GROUP BY r) AS zw;"""  # Change here releases once Squeeze is released
-_execute_udd_query(query)
-
-# Number of all submissions
+            WHERE r IN ('lenny', 'squeeze') GROUP BY r) AS zw;"""  # Change here releases onace Squeeze is released
+curs.execute(query)
+
+# Obtain upstream version in case it is newer than Debian version
+query = """PREPARE pkg_dehs (text) AS
+   SELECT DISTINCT d.source, unstable_upstream, unstable_parsed_version, unstable_status, experimental_parsed_version, experimental_status FROM dehs d
+      JOIN  packages p ON p.source = d.source
+      WHERE p.package = $1 AND unstable_status = 'outdated' ;"""
+curs.execute(query)
+
+# Obtain Vcs Information from source table - use only latest version because this is 
+query = """PREPARE src_vcs (text) AS
+           SELECT vcs_type AS "vcs-type", vcs_url AS "vcs-url", vcs_browser AS "vcs-browser" FROM sources
+            WHERE source = $1 AND (vcs_type IS NOT NULL OR vcs_url IS NOT NULL OR vcs_browser IS NOT NULL)
+            ORDER BY version desc LIMIT 1"""
+curs.execute(query)
+
+# Obtain popcon data:
+#   inst     : number of people who installed this package;
+# * vote     : number of people who use this package regularly;
+#   old      : number of people who installed, but don't use this package regularly;
+# * recent   : number of people who upgraded this package recently;
+#   no-files : number of people whose entry didn't contain enough information (atime and ctime were 0).
+query = "PREPARE popcon (text) AS SELECT vote, recent FROM popcon WHERE package = $1"
+curs.execute(query)
+
+# Number of submissions
 query = "PREPARE popcon_submissions AS SELECT vote FROM popcon WHERE package = '_submissions'"
-_execute_udd_query(query)
+curs.execute(query)
+
+# Debtags
+query = """PREPARE debtags (text) AS 
+            SELECT * FROM debtags WHERE
+                package = $1 AND
+                tag NOT LIKE 'implemented-in::%' AND
+                tag NOT LIKE 'protocol::%' AND
+                tag NOT LIKE '%::TODO' AND
+                tag NOT LIKE '%not-yet-tagged%';"""
+curs.execute(query)
+
+query = """PREPARE query_screenshots (text) AS
+           SELECT screenshot_url, large_image_url AS image, small_image_url AS icon, version
+                  FROM screenshots WHERE package = $1
+                  ORDER BY version DESC, image ;"""
+curs.execute(query)
+
+# This query might result in more packages than wanted, because when seeking for a
+# package 'foo'  it also matches an enhances field of 'bar, xfoo-xx, foo-doc, bazz'
+# and thus we get a false positive.  We have to preprocess the resultset by splitting
+# it into single packages and check again the whole word for matching.  This is
+# implemented below in Python
+######################################################################################
+# ATTENTION: Call this "EXECUTE query_check_enhances('%"+pkg+"%')"                   #
+#            I have no idea how to otherwise mask the '%' in the prepared statement. #
+######################################################################################
+query = """PREPARE query_check_enhances (text) AS
+            SELECT DISTINCT package, enhances FROM packages WHERE enhances LIKE $1"""
+curs.execute(query)
+
+# Obtain e-mail address of latest uploader of source package
+query = """PREPARE query_get_latest_uploader (text) AS
+           SELECT changed_by FROM upload_history WHERE source = $1 ORDER BY version DESC LIMIT 1;"""
+curs.execute(query)
 
 #########################################################################################
 
-def List2PgArray(list):
-    # turn a list of strings into the syntax for a PostgreSQL array:
-    # {"string1","string2",...,"stringN"}
-    if not list:
-        return '{}'
-    komma='{'
-    PgArray=''
-    for s in list:
-        PgArray=PgArray+komma+'"'+s+'"'
-        komma=','
-    return PgArray+'}'
-
-def List2PgSimilarArray(list):
-    # turn a list of strings surrounded by '%' into the syntax for a PostgreSQL array to enable LIKE conditions:
-    # {"%string1%","%string2%",...,"%stringN%"}
-    if not list:
-        return '{}'
-    komma='{'
-    PgSimArray=''
-    for s in list:
-        PgSimArray=PgSimArray+komma+'"%'+s+'%"'
-        komma=','
-    return PgSimArray+'}'
-
 def ReadConfig(blendname=''):
     # Try to read config file CONFDIR/<blendname>.conf
     conffile = CONFDIR + '/' + blendname + '.conf'
@@ -558,52 +423,27 @@ def FetchTasksFiles(data):
 
     # tasks directory to obtain dependencies and debian/control to obtain meta
     # information like the metapackage prefix
-    # Checkout/Update tasks from SVN
-    if data['vcsdir'].startswith('svn:'):
-        for dir in ('tasks', 'debian'):
-            tasksdir = data['datadir'] + '/' + dir
-            if not os.access(tasksdir, os.W_OK):
-                try:
-                    os.makedirs(tasksdir)
-                except:
-                    logger.error("Unable to create data directory", tasksdir)
-    	    svncommand = "svn %%s %s/%s %s >> /dev/null" % (data['vcsdir'], dir, tasksdir)
+    for dir in ('tasks', 'debian'):
+        tasksdir = data['datadir'] + '/' + dir
+        if not os.access(tasksdir, os.W_OK):
+            try:
+		os.makedirs(tasksdir)
+            except:
+		logger.error("Unable to create data directory", tasksdir)
+        # Checkout/Update tasks from SVN
+        svncommand = "svn %%s %s/%s/%s %s >> /dev/null" % (SVNHOST, data['vcsdir'], dir, tasksdir)
+        if os.path.isdir(tasksdir+'/.svn'):
+    	    svncommand = svncommand % 'up'
+        else:
+            os.system("mkdir -p %s" % (tasksdir))
+            svncommand = svncommand % 'co'
+        if os.system(svncommand):
+    	    logger.error("SVN command %s failed" % (svncommand))
     	    if os.path.isdir(tasksdir+'/.svn'):
-    	        svncommand = svncommand % ' --accept theirs-conflict up'
-            else:
-                os.system("mkdir -p %s" % (tasksdir))
-                svncommand = svncommand % 'co'
-            if os.system(svncommand):
-    	        logger.error("SVN command %s failed" % (svncommand))
-    	        if os.path.isdir(tasksdir+'/.svn'):
-    		    logger.error("Trying old files in %s ..." % tasksdir)
-    	        else:
-                    if os.listdir(tasksdir):
-                        logger.warning("No .svn directory found in %s but trying those random files there as tasks files." % tasksdir)
-                    else:
-                        logger.error("There are no old files in %s -> giving up" % tasksdir)
-                        exit(-1)
-    elif data['vcsdir'].startswith('git:') or data['vcsdir'].startswith('http:'):
-        githtml = data['vcsdir']
-        if githtml.startswith('git:'):
-            githtml=githtml.replace('git://','http://')
-    	if os.path.isdir(data['datadir']+'/.git'):
-	    gitcommand = "cd %s; git pull" % data['datadir']
-	else:
-	    gitcommand = "cd %s; git clone %s" % (re.sub('/[^/]+$', '', data['datadir']), githtml)
-        if os.system(gitcommand):
-    	    logger.error("Git command %s failed" % (gitcommand))
-    	    if os.path.isdir(data['datadir']+'/.git'):
-                logger.error("Trying old files in %s ..." % data['datadir'])
+    		logger.error("Trying old files in %s ..." % tasksdir)
     	    else:
-                if os.listdir(data['datadir']+'/'+dir):
-                    logger.warning("No .git directory found in %s but trying those random files there as tasks files." % data['datadir']+'/'+dir)
-                else:
-                    logger.error("There are no old files in %s -> giving up" % data['datadir'])
-                    exit(-1)
-    else:
-        logger.error("Don't know how to checkout tasks files from %s -> giving up" % data['vcsdir'])
-        exit(-1)
+    		logger.error("There are no old files in %s -> giving up" % tasksdir)
+    		exit(-1)
     return data['datadir'] + '/tasks'
 
 def RowDictionaries(cursor):
@@ -632,8 +472,6 @@ def RowDictionaries(cursor):
 
 def BrowserFromVcsURL(vcs_type, vcs_url):
     # Guess Vcs-Browser URL from VCS URL
-    if not vcs_type:
-        return HOMEPAGENONE
     if vcs_type.lower().startswith('svn'):
         ret_url = re.sub('^svn:', 'http:', vcs_url)
         ret_url = re.sub('/svn/', '/wsvn/', ret_url)
@@ -650,10 +488,7 @@ def BrowserFromVcsURL(vcs_type, vcs_url):
     else:
         logger.warning("Unknown VCS for " + vcs_url)
         return HOMEPAGENONE
-    if vcs_url.startswith('https://github.com') and vcs_url.endswith('.git'):
-        ret_url = re.sub('.git$', '', vcs_url)
-    elif vcs_url.startswith('http:'):
-        return vcs_url
+
     if ret_url == vcs_url:
         logger.warning("Unable to obtain Vcs-Browser from " + vcs_url)
         return HOMEPAGENONE
@@ -687,7 +522,6 @@ class DependantPackage:
         self.blendname      = blendname # Blend that includes the package in dependency list
         self.taskname       = taskname  # Task which includes the Dependency
         self.pkg            = None # Name of dependant package
-        self.source         = None # Name of source package of the dependency
         self.PrintedName    = None # Only for Meta package names - no use for a real dependant package
                                    # FIXME -> object model
         self.pkgstatus      = 'unknown' # global pkgstatus: characterizes status of dependency, release, packaging status
@@ -723,6 +557,221 @@ class DependantPackage:
         self.remark         = {}   # Optional remark for a package
         self.dep_strength   = 0    # Type of Dependency (Depends, Recommends, Suggests, Experimental, New, Avoid, Ignore, WNPP
 
+    def _QueryUDD4Package(self, source):
+        # Query UDD for several properties of a package
+        query = "EXECUTE pkg_component ('%s')" % self.pkg
+        curs.execute(query)
+        if curs.rowcount == 0:
+            # the package does not exist in UDD table packages
+            # verify whether we have some information in NEW queue
+            query = "EXECUTE query_new ('%s')" % self.pkg
+            curs.execute(query)
+            if curs.rowcount == 0:
+                return
+
+            row = RowDictionaries(curs)[0]
+            self.component = row['component']
+            self.version   = [{'release':'new', 'version': row['version'], 'archs':row['architecture']} , ]
+            self.pkgstatus = 'new'
+            self.properties['pkg-url'] = 'http://ftp-master.debian.org/new/%s_%s.html' % (row['source'], row['version'])
+            for prop in PROPERTIES:
+                self.properties[prop] = row[prop]
+            self.desc['en']['short'] = MarkupString(to_unicode(row['description']), self.pkg, 'ShortDesc - New')
+            self.desc['en']['long']  = Markup(render_longdesc(row['long_description'].splitlines()))
+            (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+            self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+            logger.info("The package %s is not yet in Debian but it is just in the new queue. (Task %s)" % (self.pkg, self.taskname))
+
+            return
+        # = the package exists in UDD table packages
+        else:
+            # This might happen in cases where a package was in non-free and moved to main later or
+            # something like this.  If we find a certain package in more than one components we
+            # prefer main over contrib over non-free.
+            if curs.rowcount > 1:
+                in_main = 0
+                in_contrib = 0
+                for component in curs.fetchall():
+                    if component[0] == 'main':
+                        self.component = 'main'
+                        in_main       = 1
+                        break
+                    if component[0] == 'contrib':
+                        in_contrib    = 1
+                if in_main == 0:
+                    if in_contrib == 1:
+                        self.component = 'contrib'
+                    else:
+                        self.component = 'non-free'
+            else:
+                self.component = curs.fetchone()[0]
+        # Debian Edu contains packages from main/debian-installer
+        if self.component == 'main/debian-installer':
+                self.component = 'main'
+        self.properties['license'] = license_in_component[self.component]
+
+        query = "EXECUTE pkg_releases ('%s', '%s')" % (self.pkg, self.component)
+        curs.execute(query)
+        has_official = 0
+        for rel in curs.fetchall():
+            self.releases.append(rel[0])
+            if rel[0] != 'experimental':
+                has_official = 1
+
+        # query = "EXECUTE pkg_versions_arch ('%s', '%s')" % (self.pkg, self.component)
+        query = "EXECUTE pkg_versions_arch ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            for row in RowDictionaries(curs):
+                if self.component.startswith(row['component']):
+                    self.version.append({'release':row['release'], 'version': row['version'], 'archs':row['archs']})
+                else:
+                    self.version.append({'release':row['release'], 'version': row['version'] + ' (' + row['component'] + ')', 'archs':row['archs']})
+
+        # Version in stable / testing for long table
+        query = "EXECUTE pkg_versions_stable_testing ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            prefix = 'Versions: '
+            for row in RowDictionaries(curs):
+                self.properties['stable_testing_version'].append((row['release'], row['debversion'], row['version'], prefix))
+                prefix = ', '
+
+        query = "EXECUTE pkg_dehs ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            row = RowDictionaries(curs)[0]
+            # it might be that the new upstream goes to experimental - this should be ignored here
+            if row['experimental_status'] != 'uptodate':
+                self.outdated['release']       = 'upstream'
+                self.outdated['version']       = row['unstable_upstream']
+                self.outdated['architectures'] = ''
+
+        query = "EXECUTE popcon ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            row = RowDictionaries(curs)[0]
+            self.popcon['vote']   = row['vote']
+            self.popcon['recent'] = row['recent']
+
+        query = "EXECUTE debtags ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            tagdict = {}
+            taglist = []
+            for row in RowDictionaries(curs):
+                (tag,value) = row['tag'].split('::')
+                if tagdict.has_key(tag):
+                    tagdict[tag] += ', ' + value
+                else:
+                    tagdict[tag]  = value
+                    taglist.append(tag)
+            if taglist:
+                taglist.sort()
+                for tag in taglist:
+                    self.debtags.append({'tag':tag, 'value':tagdict[tag]})
+
+        query = "EXECUTE query_screenshots ('%s')" % (self.pkg)
+        curs.execute(query)
+        if curs.rowcount > 0:
+            rows = RowDictionaries(curs)
+            self.icon           = rows[0]['icon']
+            self.image          = rows[0]['image']
+            self.screenshot_url = rows[0]['screenshot_url']
+            # if a package has more than one screenshots provide a list of these
+            if curs.rowcount > 1:
+                for row in rows:
+                    self.screenshots.append({'version':row['version'], 'url':row['image']})
+
+        if has_official == 1:
+            if self.component == 'main':
+                if self.dep_strength == 'Depends' or self.dep_strength == 'Recommends':
+                    self.pkgstatus = 'official_high'
+                else:
+                    self.pkgstatus = 'official_low'
+            else:
+                    self.pkgstatus = 'non-free'
+        else:
+            self.pkgstatus = 'experimental'
+
+        # link to packages.debian.org search page to see overview about all
+        # package versions in all releases
+        self.properties['pkg-url'] = PKGURLMASK % self.pkg
+
+        query = "EXECUTE query_pkg ('%s')" % self.pkg
+        curs.execute(query)
+
+        for row in RowDictionaries(curs):
+            for prop in PROPERTIES:
+                self.properties[prop] = row[prop]
+            self.desc['en']['short'] = MarkupString(to_unicode(row['description']), self.pkg, 'ShortDesc')
+            self.desc['en']['long']  = 'unused'
+            (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+            self.properties['maintainer'] = to_unicode(row['maintainer'])
+            self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+
+        if self.properties.has_key('source'):
+            query = "EXECUTE src_vcs ('%s')" % (self.properties['source'])
+            curs.execute(query)
+            if curs.rowcount > 0:
+                # There is only one line returned by this query
+                row = RowDictionaries(curs)[0]
+                # If some information about Vcs is found in the database make sure it is ignored from tasks file
+                self.vcs_found = 1
+                for prop in row.keys():
+                    if row[prop]:
+                        self.properties[prop] = row[prop]
+                if not self.properties.has_key('vcs-browser') or self.properties['vcs-browser'] == HOMEPAGENONE:
+                    try:
+                        self.properties['vcs-browser'] = BrowserFromVcsURL(self.properties['vcs-type'], self.properties['vcs-url'])
+                    except KeyError, err:
+                        logger.warning("Vcs Property missing in Database:", self.properties, err)
+                if not self.properties.has_key('vcs-type') or not self.properties['vcs-type']:
+                    if self.properties.has_key('vcs-browser') and self.properties['vcs-browser'] != HOMEPAGENONE:
+                        self.properties['vcs-type']    = VcsTypeFromBrowserURL(self.properties['vcs-browser'])
+            # We are only interested in source packages (for instance for Bugs page)
+            if source == 1:
+                self.pkg = self.properties['source']
+            # Stop using source package in self.pkg because we need the source package to obtain latest uploaders and
+            # and bugs should be rendered in the same job - so we need the differentiation anyway
+            self.src = self.properties['source']
+        else:
+            logger.error("Failed to obtain source for package", self.pkg)
+            return
+
+        query = "EXECUTE query_get_latest_uploader ('%s')" % (self.src)
+        curs.execute(query)
+        try:
+            changed = to_unicode(curs.fetchone()[0])
+	except TypeError, err:
+            changed = None
+            logger.info("Query '%s' does not result in a valid changed entry (%s)" % (query, err))
+        if changed:
+            # self.properties['maintainer'] = to_unicode(self.properties['maintainer']) # .encode('utf-8')
+            try:
+                if not changed.startswith(self.properties['maintainer']):
+                    (_name, _url) = email.Utils.parseaddr(changed)
+                    changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
+                    self.properties['changed_by']    = MarkupString(changed, self.pkg, 'changed_by')
+                    self.properties['last_uploader'] = to_unicode(changed)
+                    self.properties['last_uploader_simple'] = to_unicode('%s <%s>' % (_name, _url))
+            except UnicodeDecodeError, err:
+                logger.error("Failed to compare changed with maintainer - assume both are the same for package %s (%s)", self.pkg, err)
+
+        query = "EXECUTE query_check_enhances ('%"+self.pkg+"%')"
+        curs.execute(query)
+
+        if curs.rowcount > 0:
+            for row in RowDictionaries(curs):
+                enhancelist = row['enhances'].split(', ')
+                if self.pkg in enhancelist:
+                    if not row['package'] in self.properties['Enhances'].keys():
+                        self.properties['Enhances'][row['package']] = PKGURLMASK % row['package']
+            # if self.properties['Enhances'] != {}:
+            #    print "The following packages are enhancing %s: " % self.pkg,
+            #    for enh in self.properties['Enhances'].keys():
+            #        print enh,
+            #    print
 
     # sort these objects according to the package name
     def __cmp__(self, other):
@@ -749,79 +798,11 @@ class DependantPackage:
         if self.desc:
             ret += ", desc: "         + str(self.desc)
         for prop in self.properties.keys():
-            try:
-                ret += ", %s: %s" % (prop, to_unicode(str(self.properties[prop])))
-            except UnicodeEncodeError:            
-                ret += ", %s: <UnicodeEncodeError>" % (prop)
-        try:
-            ret += ", popcon = %i (%i)" % (self.popcon['vote'], self.popcon['recent'])
-        except:
-            pass
-        ret += ", debtags = " + str(self.debtags)
-        if self.icon:
-            ret += ", screenshoticon = " + self.icon
-        if self.outdated:
-            ret += ", dehsstatus = " + str(self.outdated)
-
+            ret += ", %s: %s" % (prop, str(self.properties[prop]))
         # if self.desc['en']:
         #    ret += ", desc['en']:"   + str(self.desc['en'])
         return ret
 
-    def SetPublications(self, row):
-        for pub in ("year", "title", "authors", "doi", "pubmed", "url", "journal", "volume", "number", "pages", "eprint" ):
-            if row[pub]:
-                if pub == "pages":
-                    row[pub] = re.sub("--", "-", row[pub])
-                if (pub == "authors" and row[pub].count(" and ") or row[pub].count(" AND ")):
-                    # assume "authors" string is a regular BibTeX "and" separated list of authors
-                    row[pub] = re.sub("AND", "and", row[pub].strip())
-                    authors_list = row[pub].split(" and ")
-                    # normalize several BibTeX styles to "First Last, First Last and First Last":
-                    # 1. "First Last and First Last and First Last"
-                    # 2. "Last, First and Last, First and Last, First"
-                    # 3. "First Last, First Last and First Last"
-                    authors_string = ""
-                    while (authors_list):
-                        author = authors_list.pop(0)
-                        if (author.count(",") > 1):
-                            # 3. "First Last, First Last and First Last"
-                            # authors string is already in desired format, keep it
-                            authors_string = row[pub].strip()
-                            break
-                        elif (row[pub].count(",") == row[pub].count(" and ") + 1):
-                            # 2. "Last, First and Last, First and Last, First"
-                            # reverse last and first name
-                            (last, first) = author.split(", ")
-                            full_author = first + " " + last
-                        else:
-                            # 1. "First Last and First Last and First Last"
-                            full_author = author
-                        if (len(authors_list) > 1):
-                            authors_string += full_author + ", "
-                        elif (len(authors_list) > 0):
-                            authors_string += full_author + " and "
-                        else:
-                            authors_string += full_author
-                    if row[pub] != authors_string:
-                        # emergency brake if algorithm fails to detect non-names like '1000 Genome Project Data Processing Subgroup'
-                        if authors_string.count(',') > row[pub].count(' and '):
-                            logger.warning("Refuse to change Author string in %s: '%s'(%i) -> '%s'(%i)", \
-                                            self.pkg, to_unicode(row[pub]), row[pub].count(' and '), to_unicode(authors_string), authors_string.count(','))
-                        else:
-                            logger.debug("Author string changed in %s: '%s' -> '%s'", self.pkg, to_unicode(row[pub]), to_unicode(authors_string))
-                            row[pub] = authors_string
-                if not self.properties.has_key('published'):
-                    self.properties['published'] = {}
-                if self.properties['published'].has_key(pub):
-                    if self.properties['published'][pub] == to_unicode(row[pub]):
-                    	try:
-                           print >>rmpub, "%s: %s: Published-%s: %s" % (self.taskname, self.pkg, pub, to_unicode(row[pub]))
-            	        except UnicodeEncodeError:
-            	           print >>rmpub, "--- %s: %s: Published-%s: some duplicated value featuring encoding problems ---" % (self.taskname, self.pkg, pub)
-            		logger.info("%s/%s: Publication-%s = %s can be removed"  % (self.taskname, self.pkg, pub, to_unicode(row[pub])))
-            	    else:
-            	        logger.info("%s conflicting fields Publication-%s in tasks file with value '%s' and in UDD with value '%s'" % (self.pkg, pub, self.properties['published'][pub], to_unicode(row[pub])))
-                self.properties['published'][pub] = to_unicode(row[pub])
 
 class Tasks:
     # Provide a list of depencencies defined in metapackages
@@ -840,8 +821,6 @@ class Tasks:
         handler.setFormatter(formatter)
         logger.addHandler(handler)
 
-	LockBlendsTools()
-
         # This Instance of the Available class contains all
         # information about packages that are avialable in Debian
         # See below for more information about Available class
@@ -870,6 +849,9 @@ class Tasks:
         # If we want to subscribe ddpo we need the source package names.
         # In this case set source=1
 
+        if self.metapackagekeys == []:
+            self._GetMetapackages()
+
         # Obtain the prefix of the meta packages of the Blend using blends-dev tools blend_get_names
         if os.access('/usr/share/blends-dev/blend-get-names', os.X_OK):
             blend_get_names = '/usr/share/blends-dev/blend-get-names'
@@ -884,45 +866,17 @@ class Tasks:
         pipe = Popen(cmd, shell=True, stdout=PIPE).stdout
         prefix = pipe.read().strip() + '-'
         pipe.close()
-
-        metapackages = []
-        for task in self.metapackagekeys:
-            metapackages.append(prefix + task)
-
-        # Verify whether there are any translations of metapackage descriptions which on
-        # one hand is a sign that this Blend has uploaded metapackages at all and on the
-        # other hand gives a clue about whether it makes sense to query for description
-        # translations
-        query = "SELECT COUNT(*) FROM descriptions WHERE package = ANY ('%s')" % List2PgArray(metapackages)
-        _execute_udd_query(query)
-        if curs.rowcount > 0:
-            hastranslations = curs.fetchone()[0]
-
-        metapkg_translations = {}
-        if hastranslations > 0:
-            query = "EXECUTE query_metapkg_trans('%s')" % List2PgArray(metapackages)
-            _execute_udd_query(query)
-            if curs.rowcount > 0:
-                for row in RowDictionaries(curs):
-                    metapkg_translations[row['package']] = row
-
         for task in self.metapackagekeys:
-            td = TaskDependencies(self.blendname, task=task, tasksdir=self.tasksdir)
-            pkgname = prefix + task
-            translations = None
-            if metapkg_translations.has_key(pkgname):
-                translations = metapkg_translations[pkgname]
-            td.SetMetapackageInfo(pkgname, translations)
+            td = TaskDependencies(self.blendname, task=task, tasksdir=self.tasksdir, prefix=prefix)
             td.GetTaskDependencies(source)
             self.tasks[task] = td
 
         if source == 0:
-            # total number popcon submissions
+            # overall popcon submissions
             query = "EXECUTE popcon_submissions"
-            _execute_udd_query(query)
+            curs.execute(query)
             if curs.rowcount > 0:
                 self.popconsubmit = curs.fetchone()[0]
-            # Obtain packages that might enhance any of the packages in tasks list
             self.LinkEnhances()
 
     def GetNamesOnlyDict(self, dependencystatus=[]):
@@ -1108,7 +1062,7 @@ class Tasks:
 
 class TaskDependencies:
     # List of depencencies defined in one metapackage
-    def __init__(self, blendname, task, tasksdir=None):
+    def __init__(self, blendname, task, tasksdir=None, prefix=''):
 
         self.data     = ReadConfig(blendname)
         self.blendname  = self.data['Blend']
@@ -1123,12 +1077,16 @@ class TaskDependencies:
             logger.error("No such task file %s." % self.taskfile)
             return None
 
-        # Dictionary with status of dependencies as key and list of DependantPackage
+        # Dictionary with satus of dependencies as key and list of DependantPackage
         # instances
         self.dependencies = {}
         for pkgstat in pkgstatus:
             self.dependencies[pkgstat] = []
 
+        # Main information for a task
+        self.metapkg             = DependantPackage(self.blendname, self.task)
+        self.metapkg.pkg         = prefix + task
+
         # If a Blend just bases on the meta package of an other Blend (this is the
         # case in Debian Science which bases on med-bio for biology and gis-workstation
         # for geography it makes no sense to build an own sentinel page but read
@@ -1140,38 +1098,6 @@ class TaskDependencies:
         # This is NOT YET implemented
         self.metadepends     = None
 
-
-    def SetMetapackageInfo(self, pkgname, ddtptranslations=None):
-        # Gather information (specifically description translations if exists) about metapackage itself
-        self.metapkg             = DependantPackage(self.blendname, self.task)
-        self.metapkg.pkg         = pkgname
-        self.metapkg.source      = self.blendname
-        if not ddtptranslations:
-            return
-        for lang in languages:
-            if ddtptranslations['description_'+lang]:
-                self.metapkg.desc[lang] = {}
-                try:
-                    short = to_unicode(ddtptranslations['description_'+lang])
-                    self.metapkg.desc[lang]['short'] = MarkupString(short, self.metapkg.pkg, 'taskShortDesc', lang)
-                except UnicodeEncodeError, err:
-                    logger.error("===> UnicodeDecodeError in metapackage %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['description_'+lang], err))
-                    short = to_unicode(ddtptranslations['description_'+lang],'latin1')
-                    self.metapkg.desc[lang]['short'] = MarkupString(short, self.metapkg.pkg, 'taskShortDesc' + lang)
-    
-                try:
-                    self.metapkg.desc[lang]['long'] = Markup(render_longdesc(ddtptranslations['long_description_'+lang].splitlines()))
-                except UnicodeDecodeError, err:
-                    logger.error("===> UnicodeDecodeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['long_description_'+lang], err))
-                    self.metapkg.desc[lang]['long'] = 'UnicodeDecodeError'
-                except AttributeError, err:
-                    logger.error("===> AttributeError in metapackage long %s (lang='%s'): '%s'; ErrTxt: %s" % \
-                                     (self.metapkg.pkg, lang, ddtptranslations['long_description_'+lang], err))
-                    self.metapkg.desc[lang]['long'] = 'Missing long description'
-
-
     def _AppendDependency2List(self, dep, source):
         # Append dependency which was found in the tasks file if not Ignore / Avoid and
         # no dupplication in case of source depencencies
@@ -1198,7 +1124,6 @@ class TaskDependencies:
         global dep_strength_keys
 
         f = file(self.taskfile)
-        found_description=False
         for stanza in deb822.Sources.iter_paragraphs(f):
             # Why and Responsible can be valid for more than one dependency
             # Store them in strings and use them for all Dependent Package objects
@@ -1215,21 +1140,24 @@ class TaskDependencies:
                     self.metapkg.PrintedName = to_unicode(stanza['task'])
                     continue
                 if key == 'Description':
-                    if found_description:
-                        logger.error("Duplicate description entry in task %s; you probably want to use Pkg-Description field instead!" % self.task)
-                    else:
-                        (short, long) = SplitDescription(stanza['description'])
-                        # Markup strings to enable verbatim output of preformatted text
-                        self.metapkg.desc['en']['short'] = MarkupString(short.encode('utf-8'), self.metapkg.PrintedName, 'taskShortDesc')
-                        self.metapkg.desc['en']['long']  = MarkupString(long.encode('utf-8'),  self.metapkg.PrintedName, 'taskLongDesc')
-                        found_description = True
+                    (short, long) = SplitDescription(stanza['description'])
+                    # Markup strings to enable verbatim output of preformatted text
+                    self.metapkg.desc['en']['short'] = MarkupString(short.encode('utf-8'), self.metapkg.PrintedName, 'taskShortDesc')
+                    self.metapkg.desc['en']['long']  = MarkupString(long.encode('utf-8'),  self.metapkg.PrintedName, 'taskLongDesc')
+
+                    # find maintainer and other metadata:
+                    query = "EXECUTE query_pkg ('%s')" % self.metapkg.pkg
+                    curs.execute(query)
+                    if curs.rowcount > 0:
+                        row = RowDictionaries(curs)[0]
+                        (_name, _url) = email.Utils.parseaddr(row['maintainer'])
+                        self.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
+
                     continue
                 if key == 'Meta-Depends':
                     self.metadepends = stanza['meta-depends']
-                    continue
                 if key == 'Meta-Suggests':
                     self.metadepends = stanza['meta-suggests']
-                    continue
                 if key == 'Why':
                     why = stanza['why']
                     continue
@@ -1237,7 +1165,7 @@ class TaskDependencies:
                     responsible = stanza['responsible'].strip()
             	    if not dep:
             		# Make sure there is really enough information to deal with provided by the package
-            		logger.error("Missing package information for field %s = %s in task file %s" % (key, responsible, self.task))
+            		logger.error("Missing package information for field %s = %s" % (key, responsible))
             		continue
                     if dep.responsible != None:
                         # we are dealing with an official package that has a real maintainer who
@@ -1248,11 +1176,7 @@ class TaskDependencies:
                         continue
                     if responsible != '':
                         (_name, _url) = email.Utils.parseaddr(responsible)
-                        _name = to_unicode(_name)
-                        try:
-                            dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, _name)
-                        except UnicodeDecodeError, err:
-                            logger.error("Unicode problem when decoding name of maintainer with mail address <%s> in task %s (%s)" % (_url, self.task, err))
+                        dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
                     continue
 
                 if key in dep_strength_keys:
@@ -1292,6 +1216,8 @@ class TaskDependencies:
                             logger.warning("Package names may not contain upper case letters, so %s is an invalid package name which is turned into %s" \
                                 % (dep_in_line, dep.pkg))
 
+                        dep._QueryUDD4Package(source)
+
                     continue
 
                 # sometimes the tasks file contains standalone comments or other RFC 822 entries.
@@ -1389,9 +1315,6 @@ class TaskDependencies:
                               % (key, stanza[key.lower()]))
                 elif key == 'License':
                     if dep != None:
-                        if dep.vcs_found == 1 and dep.properties.has_key(key.lower()):
-                            fields_obsolete.append(key)
-                            continue
                         dep.properties[key.lower()]  = stanza[key.lower()]
                     else:
                         logger.error("Dep not initiated before %s %s -> something is wrong." \
@@ -1419,15 +1342,17 @@ class TaskDependencies:
                               % (key, stanza[key.lower()]))
                 elif key == 'WNPP':
                     if dep != None:
-                        if dep.vcs_found == 1 and dep.properties.has_key(key.lower()):
-                            fields_obsolete.append(key)
-                            continue
-                        wnpp = stanza['wnpp'].strip()
-                        # in case somebody prepended a '#' sign before the bug number
-                        wnpp = re.sub('^#', '', wnpp)
-                    	# if there is really a number given
-                        if re.compile("^\d+$").search(wnpp):
-                            dep.properties['wnpp'] = wnpp
+                        # it makes sense to assume that a package where WNPP was recently closed
+                        # shows up in unstable first
+                        if dep.component != None:
+                            logger.info("WNPP for package %s just closed - extra information can be removed from task file %s." % (dep.pkg, dep.taskname))
+                        else:
+                    	    wnpp = stanza['wnpp'].strip()
+                            # in case somebody prepended a '#' sign before the bug number
+                            wnpp = re.sub('^#', '', wnpp)
+                    	    # if there is really a number given
+                    	    if re.compile("^\d+$").search(wnpp):
+                        	dep.properties['wnpp'] = wnpp
                     else:
                         logger.error("Dep not initiated before WNPP %s -> something is wrong." \
                               % stanza['wnpp'])
@@ -1475,9 +1400,8 @@ class TaskDependencies:
             if dep == None:
                 continue # ... with next stanza
             # seek for certain field set in the tasks file to move the dependency into different
-            # categories of development status of not yet included packages provided that there
-            # is at least a package description given
-            if dep.pkgstatus == 'unknown' and dep.desc['en'] != {}:
+            # categories of development status of not yet included packages
+            if dep.pkgstatus == 'unknown':
                 flag = 0
                 # first check those packages where some work was just done
                 for status in ['pkgvcs', 'unofficial', 'wnpp', 'prospective']:
@@ -1490,405 +1414,11 @@ class TaskDependencies:
                             break
                     if flag == 1:
                         break
-
-            tmp_dep_list.append(dep)
-            # remarks which are common to several dependencies in a list have to be added to all of the dependencies
-            for dep in tmp_dep_list:
-                if remark != {}:
-                    dep.remark = remark
-                if fields_obsolete != [] and dep.pkgstatus != 'new':
-                    logger.info("Package %s is an official package and has information in UDD. The following extra information can be removed from tasks file %s: %s" % \
-                                     (dep.pkg, dep.taskname, str(fields_obsolete)))
-                self._AppendDependency2List(dep, source)
-
-        f.close()
-
-        alldepends=[]
-        for status in self.dependencies.keys():
-            for dep in self.dependencies[status]:
-                alldepends.append(dep.pkg)
-
-        if not alldepends:
-            logger.warning("No dependencies defined in taskfile %s" % self.task)
-            return
-        query = "EXECUTE query_pkgs ('%s', '%s')" % (List2PgArray(alldepends), List2PgSimilarArray(alldepends))
-        _execute_udd_query(query)
-        pkgs_in_pool = []
-        enhancing_pkgs = []
-        if curs.rowcount > 0:
-            for row in RowDictionaries(curs):
-                # seek for package name in list of packages mentioned in tasks file
-                found = False
-                for status in self.dependencies.keys():
-                    for dep in self.dependencies[status]:
-                        if dep.pkg == row['package']:
-                            found = True
-                            break
-                    if found:
-                        break
-                if not found:
-                    # this should not happen ...
-                    logger.info("The package %s was found in package pool but never mentioned in task %s." % (row['package'], self.task))
-                    continue
-
-                # Now set the information for the package found in the database
-                # Debian Edu contains packages from main/debian-installer - that's why we use startswith here
-                if row['component'].startswith('main'):
-                    dep.component = 'main'
-                    if dep.dep_strength == 'Depends' or dep.dep_strength == 'Recommends':
-                        dep.pkgstatus = 'official_high'
-                    elif dep.dep_strength == 'Suggests':
-                        dep.pkgstatus = 'official_low'
-                else:
-                    dep.component = row['component']
-                    # If a package is not found in main its status can be maximum non-free
-                    dep.pkgstatus = 'non-free'
-                # if a package is released *only* in experimental decrease package status
-                if row['release'] == 'experimental':
-                    dep.pkgstatus = 'experimental'
-
-                # move dependant package to different status list if needed because a status change was detected
-                if dep.pkgstatus != status:
-                    self.dependencies[status].remove(dep)
-                    self.dependencies[dep.pkgstatus].append(dep)
-
-                # Warn about remaining information of prospective package
-                if ( dep.desc['en'] and dep.desc['en']['short'] ) and \
-                    not dep.debtags: # prevent informing about packages which are just duplicated because of a broken query
-                    logger.info("WNPP for package %s just closed - extra information can be removed from task file %s." % (dep.pkg, dep.taskname))
-
-                dep.properties['license'] = license_in_component[dep.component]
-                for prop in PROPERTIES:
-                    dep.properties[prop] = row[prop]
-                for prop in ('vcs-type', 'vcs-url'):
-                    dep.properties[prop] = row[prop]
-                if row['vcs-browser']:
-                    dep.properties['vcs-browser'] = row['vcs-browser']
-                else:
-                    if dep.properties['vcs-browser'] == HOMEPAGENONE:
-                        dep.properties['vcs-browser'] = BrowserFromVcsURL(dep.properties['vcs-type'], dep.properties['vcs-url'])
-
-                if row['enhanced']:
-                    for pkg in row['enhanced']:
-                        dep.properties['Enhances'][pkg] = PKGURLMASK % pkg
-                        enhancing_pkgs.append(pkg)
-
-                for i in range(len(row['releases'])):
-                    dep.version.append({'release':row['releases'][i], 'version': row['versions'][i], 'archs':row['architectures'][i]})
-
-                dep.popcon['vote']   = row['vote']
-                dep.popcon['recent'] = row['recent']
-
-                # Debtags as sorted list of dict fields
-                if row['debtags']:
-                    if dep.debtags: # there is no reasonable way that debtags was set before - so something is wrong here and a warning should be issued
-                        logger.warning("Debtags for package '%s' was just set.  A duplicated result from database query is suspected.  Please check the result!" % dep.pkg)
-                    tagdict = {}
-                    taglist = []
-                    for debtag in row['debtags']:
-                        (tag,value) = debtag.split('::')
-                        if tagdict.has_key(tag):
-                            tagdict[tag] += ', ' + value
-                        else:
-                            tagdict[tag]  = value
-                            taglist.append(tag)
-                        if taglist:
-                            taglist.sort()
-                    for tag in taglist:
-                        dep.debtags.append({'tag':tag, 'value':tagdict[tag]})
-
-                # screenshots
-                if row['icon']:
-                    dep.icon           = row['icon'][0]
-                    dep.image          = row['image'][0]
-                    dep.screenshot_url = 'http://screenshots.debian.net/package/' + dep.pkg
-                    for i in range(1,len(row['image'])):
-                        dep.screenshots.append({'version':row['screenshot_versions'][i], 'url':row['image'][i]})
-
-                # it might be that the new upstream goes to experimental - this should be ignored here
-                if row['experimental_status'] != 'uptodate' and row['unstable_parsed_version']:
-                    dep.outdated['release']       = 'upstream'
-                    dep.outdated['version']       = row['unstable_upstream']
-                    dep.outdated['architectures'] = ''
-
-                if row['changed_by']:
-                    try:
-                        changed = to_unicode(row['changed_by'])
-                    except TypeError, err:
-                        changed = None
-                        logger.warning("Encoding problem for last uploader of package '%s' in task %s (%s)" % (dep.pkg, dep.taskname, err))
-                    if changed:
-                        try:
-                            (_name, _url) = email.Utils.parseaddr(changed)
-                            changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
-                            dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')
-                            dep.properties['last_uploader'] = to_unicode(changed)
-                            dep.properties['last_uploader_simple'] = to_unicode('%s <%s>' % (_name, _url))
-                        except UnicodeDecodeError, err:
-                            logger.error("Encoding problem for last uploader - assume same as maintainer for package %s (%s)", dep.pkg, err)
-
-                # link to packages.debian.org search page to see overview about all
-                # package versions in all releases
-                dep.properties['pkg-url'] = PKGURLMASK % dep.pkg
-                dep.SetPublications(row)
-                for l in languages:
-                    if row['description_'+l]:
-                        dep.desc[l] = {}
-                        dep.desc[l]['short'] = MarkupString(to_unicode(row['description_'+l]), dep.pkg, 'ShortDesc')
-                        if row['long_description_'+l]:
-                            dep.desc[l]['long']  = Markup(render_longdesc(row['long_description_'+l].splitlines()))
-                if not dep.desc['en'].has_key('short'):
-                    logger.error("Dep has no English short description: %s", dep.pkg)
-                    dep.desc['en']['short'] = "??? missing short description for package %s :-(" % dep.pkg
-                (_name, _url) = email.Utils.parseaddr(row['maintainer'])
-                dep.properties['maintainer'] = to_unicode(row['maintainer'])
-                dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
-
-                pkgs_in_pool.append(dep.pkg)
-                # DEBUG
-                # print dep
-
-        pkgs_not_in_pool = []
-        for status in self.dependencies.keys():
-            for dep in self.dependencies[status]:
-                if dep.pkg not in pkgs_in_pool:
-                    pkgs_not_in_pool.append(dep.pkg)
-
-        # Gather information about packages in NEW
-        query = "EXECUTE query_new('%s')" % List2PgArray(pkgs_not_in_pool)
-        _execute_udd_query(query)
-        pkgs_in_new = []
-        if curs.rowcount > 0:
-            for row in RowDictionaries(curs):
-                pkgs_in_new.append(row['package'])
-                # seek for package name in list of packages mentioned in tasks file
-                found = False
-                for status in self.dependencies.keys():
-                    for dep in self.dependencies[status]:
-                        if dep.pkg == row['package']:
-                            found = True
-                            break
-                    if found:
-                        break
-                if not found:
-                    # this should not happen ...
-                    logger.info("The package %s was found in new but never mentioned in task %s." % (row['package'], self.task))
-                    continue
-                # Check for korrekt status 'new'
-                if status != 'new':
-                    self.dependencies[status].remove(dep)
-                    self.dependencies['new'].append(dep)
-                dep.pkgstatus = 'new'
-                dep.component = row['component']
-                dep.version   = [ row['version'], ]
-                dep.properties['pkg-url'] = 'http://ftp-master.debian.org/new/%s_%s.html' % (row['source'], row['version'])
-                # Warn about remaining information of prospective package
-                if ( dep.desc['en'] and dep.desc['en']['short'] ) or dep.properties['homepage'] != HOMEPAGENONE:
-                    logger.info("The package %s is not yet in Debian but it is just in the new queue. (Task %s)" % (dep.pkg, dep.taskname))
-                for prop in PROPERTIES:
-                    dep.properties[prop] = row[prop]
-                dep.desc['en']['short'] = MarkupString(to_unicode(row['description_en']), dep.pkg, 'ShortDesc - New')
-                dep.desc['en']['long']  = Markup(render_longdesc(row['long_description_en'].splitlines()))
-                (_name, _url) = email.Utils.parseaddr(row['maintainer'])
-                dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
-                dep.SetPublications(row)
-                if row['changed_by']:
-                    try:
-                        changed = to_unicode(row['changed_by'])
-                    except TypeError, err:
-                        changed = None
-                        logger.warning("Encoding problem for uploader to ftpnew of package '%s' in task %s (%s)" % (dep.pkg, dep.taskname, err))
-                    if changed:
-                        try:
-                            (_name, _url) = email.Utils.parseaddr(changed)
-                            changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
-                            dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')
-                            dep.properties['last_uploader'] = to_unicode(changed)
-                            dep.properties['last_uploader_simple'] = to_unicode('%s <%s>' % (_name, _url))
-                        except UnicodeDecodeError, err:
-                            logger.error("Encoding problem for last uploader - assume same as maintainer for package %s (%s)", dep.pkg, err)
-
-        # Verify whether there are virtual packages which are provided by some other packages in the list of dependencies
-        query = "EXECUTE query_provides('%s')" % List2PgArray(pkgs_in_pool)
-        _execute_udd_query(query)
-        pkgs_virtual = []
-        if curs.rowcount > 0:
-            virtual_pkgs = RowDictionaries(curs)
-            for status in self.dependencies.keys():
-                for dep in self.dependencies[status]:
-                    if dep.pkg not in pkgs_in_pool and dep.pkg not in pkgs_in_new:
-                        found = False
-                        for vp in virtual_pkgs:
-                            for pr in vp['provides'].split(','):
-                                prs=pr.strip()
-                                if dep.pkg == prs:
-                                    pkgs_virtual.append(prs)
-                                    logger.info("Virtual package %s is provided by package %s for task %s" % (dep.pkg, vp['package'], dep.taskname))
-                                    found = True
-                                    break
-                            if found:
-                                break
-
-        pkgs_not_in_pool = []
-        for status in self.dependencies.keys():
-            for dep in self.dependencies[status]:
-                if dep.pkg not in pkgs_in_pool and dep.pkg not in pkgs_in_new and dep.pkg not in pkgs_virtual:
-                    pkgs_not_in_pool.append(dep.pkg)
-        # Gather information about packages in Vcs
-        query = "EXECUTE query_vcs('%s')" % List2PgArray(pkgs_not_in_pool)
-        _execute_udd_query(query)
-        pkgs_in_vcs = []
-        if curs.rowcount > 0:
-            for row in RowDictionaries(curs):
-		# print row
-                pkgs_in_vcs.append(row['package'])
-                # seek for package name in list of packages mentioned in tasks file
-                found = False
-                for status in self.dependencies.keys():
-                    for dep in self.dependencies[status]:
-                        if dep.pkg == row['package']:
-                            found = True
-                            break
-                    if found:
-                        break
-                if not found:
-                    # this should not happen ...
-                    logger.info("The package %s was found in vcs but never mentioned in task %s." % (row['package'], self.task))
-                    continue
-                # Check for korrekt status 'pkgvcs'
-                if status != 'pkgvcs':
-                    self.dependencies[status].remove(dep)
-                    self.dependencies['pkgvcs'].append(dep)
-                dep.pkgstatus = 'pkgvcs'
-                dep.component = row['component']
-                dep.version   = [ row['version'], ]
-                dep.debtags   = [] # prevent trying to print debtags (should be default setting but does not work in template (FIXME)
-                # Warn about remaining information of prospective package
-                if ( dep.desc['en'] and dep.desc['en']['short'] ) or dep.properties['homepage'] != HOMEPAGENONE:
-                    logger.info("The package %s is not yet in Debian but it is just in Blends %s Vcs. (Task %s)" % (dep.pkg, row['blend'], dep.taskname))
-                for prop in PROPERTIES:
-                    dep.properties[prop] = row[prop]
-                for prop in ('vcs-url', 'vcs-browser', 'vcs-type', 'license'):
-                    dep.properties[prop] = row[prop]
-                if int(row['wnpp']) > 0:
-                    dep.properties['wnpp'] = row['wnpp']
-                dep.SetPublications(row)
-                dep.desc['en']['short'] = MarkupString(to_unicode(row['description_en']), dep.pkg, 'ShortDesc - New')
-                dep.desc['en']['long']  = Markup(render_longdesc(row['long_description_en'].splitlines()))
-                (_name, _url) = email.Utils.parseaddr(row['maintainer'])
-                dep.responsible = '<a href="mailto:%s">%s</a>' % (_url, to_unicode(_name))
-                if row['changed_by']:
-                    try:
-                        changed = to_unicode(row['changed_by'])
-                    except TypeError, err:
-                        changed = None
-                        logger.warning("Encoding problem for changelog author in Vcs of package '%s' in task %s (%s)" % (dep.pkg, dep.taskname, err))
-                    if changed:
-                        try:
-                            (_name, _url) = email.Utils.parseaddr(changed)
-                            changed = '<a href="mailto:%s">%s</a>' % (_url, _name)
-                            dep.properties['changed_by']    = MarkupString(changed, dep.pkg, 'changed_by')
-                            dep.properties['last_uploader'] = to_unicode(changed)
-                            dep.properties['last_uploader_simple'] = to_unicode('%s <%s>' % (_name, _url))
-                        except UnicodeDecodeError, err:
-                            logger.error("Encoding problem for changer - assume same as maintainer for package %s (%s)", dep.pkg, err)
-
-        # Verify whether packages which are neither in pool, new, vcs nor virtual have sufficient information in task file
-        for status in self.dependencies.keys():
-            for dep in self.dependencies[status]:
-                if dep.pkg not in pkgs_in_pool and dep.pkg not in pkgs_in_new and dep.pkg not in pkgs_virtual and (dep.pkgstatus == 'unknown' or dep.pkgstatus == 'pkgvcs'):
-                    # If only Vcs fields are given than we currently do not know enough to print package information
-                    if dep.pkgstatus == 'pkgvcs' and (dep.properties['homepage'] == HOMEPAGENONE or dep.desc['en'] == {}):
-                        logger.error("Package %s in task %s has only Vcs information - please provide more information" % (dep.pkg, dep.taskname))
-                        self.dependencies[status].remove(dep)
-                        # dep.pkgstatus == 'unknown'
-                    if dep.properties['homepage'] == HOMEPAGENONE:
-                        if dep.desc['en'] == {}:
-                            logger.error("Package %s in task %s neither in pool nor new and is lacking homepage and description - ignored" % (dep.pkg, dep.taskname))
-                        else:
-                            logger.error("Package %s in task %s neither in pool nor new and has no homepage information - ignored (%s)" % (dep.pkg, dep.taskname, dep.pkgstatus))
-                    else:
-                        if dep.desc['en'] == {}:
-                            logger.error("Package %s neither in pool nor new and has no description - ignored" % dep.pkg)
-                else:
-                    # prevent printing WNPP of packages inside Debian
-                    if dep.properties.has_key('wnpp') and dep.pkgstatus != 'wnpp':
-                        del dep.properties['wnpp']
-
-        for dependency in self.dependencies.keys():
-            self.dependencies[dependency].sort()
-
-    def _QueryUDD4Package(self, source):
-
-        query = "EXECUTE pkg_releases ('%s', '%s')" % (self.pkg, self.component)
-        _execute_udd_query(query)
-        has_official = 0
-        for rel in curs.fetchall():
-            self.releases.append(rel[0])
-            if rel[0] != 'experimental':
-                has_official = 1
-
-        # Version in stable / testing for long table
-        query = "EXECUTE pkg_versions_stable_testing ('%s')" % (self.pkg)
-        _execute_udd_query(query)
-        if curs.rowcount > 0:
-            prefix = 'Versions: '
-            for row in RowDictionaries(curs):
-                self.properties['stable_testing_version'].append((row['release'], row['debversion'], row['version'], prefix))
-                prefix = ', '
-
-
-        if has_official == 1:
-            if self.component == 'main':
-                if self.dep_strength == 'Depends' or self.dep_strength == 'Recommends':
-                    self.pkgstatus = 'official_high'
-                else:
-                    self.pkgstatus = 'official_low'
-            else:
-                    self.pkgstatus = 'non-free'
-        else:
-            self.pkgstatus = 'experimental'
-
-
-        if self.properties.has_key('source'):
-            query = "EXECUTE src_vcs ('%s')" % (self.properties['source'])
-            _execute_udd_query(query)
-            if curs.rowcount > 0:
-                # There is only one line returned by this query
-                row = RowDictionaries(curs)[0]
-                # If some information about Vcs is found in the database make sure it is ignored from tasks file
-                self.vcs_found = 1
-                for prop in row.keys():
-                    if row[prop]:
-                        self.properties[prop] = row[prop]
-                if not self.properties.has_key('vcs-browser') or self.properties['vcs-browser'] == HOMEPAGENONE:
-                    try:
-                        self.properties['vcs-browser'] = BrowserFromVcsURL(self.properties['vcs-type'], self.properties['vcs-url'])
-                    except KeyError, err:
-                        logger.warning("Vcs Property missing in Database:", self.properties, err)
-                if not self.properties.has_key('vcs-type') or not self.properties['vcs-type']:
-                    if self.properties.has_key('vcs-browser') and self.properties['vcs-browser'] != HOMEPAGENONE:
-                        self.properties['vcs-type']    = VcsTypeFromBrowserURL(self.properties['vcs-browser'])
-            # We are only interested in source packages (for instance for Bugs page)
-            if source == 1:
-                self.pkg = self.properties['source']
-            # Stop using source package in self.pkg because we need the source package to obtain latest uploaders and
-            # and bugs should be rendered in the same job - so we need the differentiation anyway
-            self.src = self.properties['source']
-        else:
-            logger.error("Failed to obtain source for package", self.pkg)
-            return
-
-        if source == 0: # If we are querying for source packages to render BTS pages
-                # tranlations are irrelevant - so only obtain ddtp translations
-                # otherwise
-
-######################
                 if flag == 0:
                     # If there was no such package found query UDD whether any package provides this name
                     # This is often the case for libraries with versions in the package name
                     query = "EXECUTE query_provides ('%s')" % (dep.pkg)
-                    _execute_udd_query(query)
+                    curs.execute(query)
                     if curs.rowcount > 0:
                         has_expilicite = 0
                         VirtProvides = []
@@ -1901,7 +1431,7 @@ class TaskDependencies:
                         if has_expilicite == 1:
                             logger.error("Do not keep a record of virtual package %s which has explicite package dependencies" % dep.pkg)
                             # ATTENTION: THIS HAS TO BE CHANGED FOR blends-dev BY AN OPTIONAL parameter
-#                            continue
+                            continue
 
                         logger.error("Use real package %s instead of virtual package %s." % (VirtProvides[0], dep.pkg))
                         dep.pkg = VirtProvides[0]
@@ -1926,6 +1456,20 @@ class TaskDependencies:
                     else:
                         logger.warning("Dependency with unknown status: %s (Task %s)" % (dep.pkg, dep.taskname))
 
+            tmp_dep_list.append(dep)
+            # remarks which are common to several dependencies in a list have to be added to all of the dependencies
+            for dep in tmp_dep_list:
+                if remark != {}:
+                    dep.remark = remark
+                if fields_obsolete != [] and dep.pkgstatus != 'new':
+                    logger.info("Package %s is an official package and has information in UDD. The following extra information can be removed from tasks file %s: %s" % \
+                                     (dep.pkg, dep.taskname, str(fields_obsolete)))
+                if dep.desc['en'] == {}:
+                    logger.error("Missing description for package %s in task %s.  This package will be ignored completely." % (dep.pkg, dep.taskname))
+                else:
+                    self._AppendDependency2List(dep, source)
+
+        f.close()
 
         for dependency in self.dependencies.keys():
             self.dependencies[dependency].sort()
@@ -1939,11 +1483,7 @@ class TaskDependencies:
                 dep.responsible         = MarkupString(dep.responsible, dep.pkg, 'responsible')
                 if dep.desc['en'] != {}:
                     dep.desc['en']['short'] = MarkupString(dep.desc['en']['short'], dep.pkg, 'pkgShortDesc')
-                    try:
-                        dep.desc['en']['long']  = MarkupString(dep.desc['en']['long'],  dep.pkg, 'pkgLongDesc')
-                    except KeyError:
-                        logger.error("Dep has no English long description: %s", dep.pkg)
-                        dep.desc['en']['long'] = "??? Missing long description for package %s" % dep.pkg
+                    dep.desc['en']['long']  = MarkupString(dep.desc['en']['long'],  dep.pkg, 'pkgLongDesc')
 
     def __str__(self):
         ret = "Blend: " + self.blendname + ", " \
@@ -2008,10 +1548,10 @@ SEVERITIES = ('critical', 'grave', 'serious', 'important', 'normal', 'minor', 'w
 # For the moment just query for the highest available version of the description
 query = """PREPARE bugs_query_source (text) AS
            SELECT id, package, source, status, severity, done, title FROM bugs WHERE source = $1"""
-_execute_udd_query(query)
+curs.execute(query)
 
 query = """PREPARE bugs_query_tags (int) AS SELECT tag FROM bugs_tags WHERE id = $1"""
-_execute_udd_query(query)
+curs.execute(query)
 
 class BugEntry:
     # Define a separate class for bug entries to be able to define a reasonably sorting mechanism
@@ -2027,7 +1567,7 @@ class BugEntry:
 	self.severity = bug['severity']
 
         query = "EXECUTE bugs_query_tags (%i)" % self.bug
-        _execute_udd_query(query)
+        curs.execute(query)
 
         self.tags     = ''
         if curs.rowcount > 0:
@@ -2087,7 +1627,7 @@ class PackageBugsOpenAndDone:
 
 	bugs = None
         query = "EXECUTE bugs_query_source ('%s')" % source
-        _execute_udd_query(query)
+        curs.execute(query)
 
         if curs.rowcount > 0:
             for bug in RowDictionaries(curs):

-- 
Static and dynamic websites for Debian Pure Blends



More information about the Blends-commit mailing list