[Git][security-tracker-team/security-tracker][helmutg/precompute-json] precompute /data/json and store it in side security.db

Helmut Grohne (@helmutg) helmutg at debian.org
Wed Feb 26 09:03:33 GMT 2025



Helmut Grohne pushed to branch helmutg/precompute-json at Debian Security Tracker / security-tracker


Commits:
f1fa755e by Helmut Grohne at 2025-02-26T10:02:50+01:00
precompute /data/json and store it in side security.db

The computation of the json export can take significant amount of time,
which is why the web service caches it. Unfortunately, generation can
take more than a minute and since the request handler holds a global
mutex, no other requests can be served concurrently.

Note that the debsecan data suffers from similar problems in principle,
but it is precomputed as part of bin/update-db. This change does the
same to the json data. The debsecan_data SQL table is generalized to an
export_data SQL table that holds both debsecan and json. Another
semantic change is skipping the base64 encoding. SQlite and apsw are
capable of handling BLOBs and we already use BLOBs, so save a bit of
space and computation and avoid the round trip through base64. As a
result, the json data is never computed inside the web server and always
up to date.

- - - - -


4 changed files:

- bin/show-debsecan
- bin/tracker_service.py
- bin/update-db
- lib/python/security_db.py


Changes:

=====================================
bin/show-debsecan
=====================================
@@ -2,7 +2,6 @@
 
 import sys
 import apsw
-import base64
 import zlib
 from io import StringIO
 
@@ -14,14 +13,13 @@ db = apsw.Connection(sys.argv[1])
 c = db.cursor()
 
 if len(sys.argv) == 2:
-    for (name,) in c.execute("SELECT name FROM debsecan_data ORDER BY name"):
-        print(name)
+    for (name,) in c.execute("SELECT path FROM export_data WHERE path LIKE 'debsecan/%' ORDER BY path"):
+        print(name.removeprefix("debsecan/"))
 else: # len(sys.argv) == 3
     name = sys.argv[2]
-    for (data,) in c.execute("SELECT data FROM debsecan_data WHERE name = ?",
-                             (name,)):
+    for (data,) in c.execute("SELECT data FROM export_data WHERE path = ?",
+                             ("debsecan/" + name,)):
         pass
-    data = base64.b64decode(data)
     data = zlib.decompress(data)
 
     # The following has been taken from a debsecan test case.


=====================================
bin/tracker_service.py
=====================================
@@ -1,7 +1,5 @@
 #!/usr/bin/python3
 
-import email.utils
-import json
 import os.path
 import sys
 import time
@@ -32,20 +30,6 @@ if __name__ == "__main__":
 else:
     webservice_base_class = WebServiceHTTP
 
-def clean_dict(d):
-    """ taken from http://w3facility.org/question/exclude-emptynull-values-from-json-serialization/
-    Delete keys with the value ``None`` in a dictionary, recursively.
-
-    This alters the input so you may wish to ``copy`` the dict first.
-    """
-    # d.iteritems isn't used as you can't del or the iterator breaks.
-    for key, value in list(d.items()):
-        if value is None:
-            del d[key]
-        elif isinstance(value, dict):
-            clean_dict(value)
-    return d  # For convenience
-
 class BugFilter:
     default_action_list = [('high_urgency', 'high', 'urgency'),
                            ('medium_urgency', 'medium', 'urgency'),
@@ -133,14 +117,9 @@ class TrackerService(webservice_base_class):
         determining the values of these factors, but the rating itself comes
         from a fully automated formula.''')
 
-    json_generation_interval = 5 * 60 # in seconds
-
     def __init__(self, socket_name, db_name):
         webservice_base_class.__init__(self, socket_name)
         self.db = security_db.DB(db_name)
-        self.json_data = None # the JSON dump itself
-        self.json_timestamp = None # timestamp of JSON generation
-        self.json_last_modified = None
 
         self.stable_releases = config.get_supported_releases()
         self.stable_releases.remove(config.get_release_codename('testing'))
@@ -1281,200 +1260,25 @@ Debian bug number.'''),
             data.append('\n')
         return BinaryResult(''.join(data),'application/octet-stream')
 
-    def _get_json(self):
-        """Helper method handling basic caching of the JSON data, to avoid
-           overloading security-tracker.d.o. It'll return the cached
-           version of this data unless it's been generated more than
-           self.json_generation_interval seconds ago """
-
-        if self.json_timestamp: # we've generated the JSON at least once
-            delta = time.time() - self.json_timestamp
-            if delta <= self.json_generation_interval:
-                # not expired yet, serve the cached data
-                return self.json_data
-
-        # if we reached here, the data has expired; let's regenerate it
-        from collections import defaultdict
-        packages = []
-        issues = defaultdict(list)
-        descriptions = {}
-        debianbugs = defaultdict(dict)
-        remote = defaultdict(dict)
-        releases = defaultdict(lambda: defaultdict(list))
-        subreleases = defaultdict(lambda: defaultdict(list))
-        repositories = defaultdict(lambda: defaultdict(list))
-        version = defaultdict(lambda: defaultdict(dict))
-        fixed_version = defaultdict(lambda: defaultdict(dict))
-        status = defaultdict(lambda: defaultdict(dict))
-        urgency = defaultdict(lambda: defaultdict(dict))
-        nodsa = defaultdict(lambda: defaultdict(dict))
-        nodsa_reason = defaultdict(lambda: defaultdict(dict))
-        next_point_update = defaultdict(lambda: defaultdict(set))
-        supported_releases = config.get_supported_releases()
-        for (pkg, issue, desc, debianbug, release, subrelease, db_version, db_fixed_version, db_status, db_urgency, db_remote, db_nodsa, db_nodsa_reason, db_next_point_update) in self.db.cursor().execute(
-                """SELECT sp.name, st.bug_name,
-                (SELECT cve_desc FROM nvd_data
-                WHERE cve_name = st.bug_name),
-                (SELECT MIN(debian_cve.bug) FROM debian_cve
-                WHERE debian_cve.bug_name = st.bug_name),
-                sp.release, sp.subrelease,
-                sp.version,
-                (SELECT pn.fixed_version FROM package_notes AS pn
-                WHERE pn.bug_name = st.bug_name
-                AND pn.package = sp.name AND
-                (pn.release = sp.release OR (pn.release = '' AND fixed_version != ''))),
-                st.vulnerable, st.urgency,
-                (SELECT range_remote FROM nvd_data
-                WHERE cve_name = st.bug_name),
-                (SELECT comment FROM package_notes_nodsa AS nd
-                WHERE nd.package = sp.name AND nd.release = sp.release
-                AND nd.bug_name = st.bug_name) AS nodsa,
-                (SELECT reason FROM package_notes_nodsa AS nd
-                WHERE nd.package = sp.name AND nd.release = sp.release
-                AND nd.bug_name = st.bug_name) AS nodsa_reason,
-                (SELECT next_point_update.release as next_point_update_release FROM next_point_update
-                WHERE st.bug_name=next_point_update.cve_name) AS next_point_update_release
-                FROM source_package_status AS st, source_packages AS sp, bugs
-                WHERE sp.rowid = st.package AND st.bug_name = bugs.name
-                AND ( st.bug_name LIKE 'CVE-%' OR st.bug_name LIKE 'TEMP-%' )
-                AND sp.release IN (""" + ",".join("?" * len(supported_releases)) + """)
-                ORDER BY sp.name, st.bug_name, sp.release, sp.subrelease""" , supported_releases):
-
-            ### to ease debugging...:
-            #if issue in ('CVE-2012-6656','CVE-2014-8738','CVE-2013-6673') :
-            #    print pkg, issue, release, subrelease, db_version, db_fixed_version, db_status
-            if pkg not in packages:
-                packages.append(pkg)
-            if issue not in issues[pkg]:
-                issues[pkg].append(issue)
-                descriptions[issue] = desc
-                debianbugs[pkg][issue] = debianbug
-                remote[pkg][issue] = db_remote
-            if release not in releases[pkg][issue]:
-                releases[pkg][issue].append(release)
-            subreleases[pkg][issue].append(subrelease)
-            if subrelease == '':
-                repository = release
-            else:
-                repository = release+'-'+subrelease
-            if repository not in repositories[pkg][issue]:
-                repositories[pkg][issue].append(repository)
-            version[pkg][issue][repository] = db_version
-            fixed_version[pkg][issue][repository] = db_fixed_version
-            status[pkg][issue][repository] = db_status
-            urgency[pkg][issue][repository] = db_urgency
-            if db_next_point_update:
-                next_point_update[pkg][issue].add(db_next_point_update)
-            if str(db_nodsa) != 'None':
-                nodsa[pkg][issue][repository] = db_nodsa
-            if str(db_nodsa_reason) != 'None':
-                nodsa_reason[pkg][issue][repository] = db_nodsa_reason
-
-        data = {}
-        for pkg in packages:
-            data[pkg] = {}
-            for issue in issues[pkg]:
-                description = None
-                debianbug = None
-                scope = None
-                suites = {}
-
-                if descriptions[issue]:
-                    description = descriptions[issue]
-                if debianbugs[pkg][issue] != None:
-                    debianbug = debianbugs[pkg][issue]
-                if str(remote[pkg][issue]) == 'None':
-                    pass
-                elif remote[pkg][issue] == 1:
-                    scope = "remote"
-                else:
-                    scope = "local"
-                for release in releases[pkg][issue]:
-                    state = None
-                    suite_fixed_version = None
-                    suite_urgency = None
-                    suite_nodsa = None
-                    suite_nodsa_reason = None
-                    suite_repositories = {}
-                    winner=''
-                    for suffix in ('','-security','-lts'):
-                        subrelease=release+suffix
-                        if subrelease in status[pkg][issue]:
-                            if status[pkg][issue][subrelease] == 0:
-                                # the issue is fixed, let's pick this subrelease and be done
-                                winner=suffix
-                                break
-                            elif status[pkg][issue][subrelease] > 0 and winner == '':
-                                # the issue ain't fixed, but at least exists.
-                                # keep looking for a real winner...
-                                winner=suffix
-                    repository=release+winner
-                    if status[pkg][issue][repository] == 0:
-                        # 1 = vulnerable, 2 = undetermined
-                        state = "resolved"
-                        suite_fixed_version = fixed_version[pkg][issue][repository]
-                    elif status[pkg][issue][repository] == 2:
-                        state = "undetermined"
-                    else:
-                        state = "open"
-                    suite_urgency = urgency[pkg][issue][repository]
-                    if repository in nodsa[pkg][issue]:
-                        suite_nodsa = nodsa[pkg][issue][repository]
-                    if repository in nodsa_reason[pkg][issue]:
-                        suite_nodsa_reason = nodsa_reason[pkg][issue][repository]
-                    if pkg in next_point_update and \
-                            issue in next_point_update[pkg] and \
-                            release in next_point_update[pkg][issue]:
-                        suite_next_point_update = True
-                    else:
-                        suite_next_point_update = None
-                    for repository in repositories[pkg][issue]:
-                        for suffix in ('','-security','-lts'):
-                            subrelease=release+suffix
-                            if subrelease in version[pkg][issue]:
-                                suite_repositories[subrelease] = version[pkg][issue][subrelease]
-                    suites[release] = { "status": state,
-                                        "repositories": suite_repositories,
-                                        "fixed_version" : suite_fixed_version,
-                                        "urgency": suite_urgency,
-                                        "nodsa": suite_nodsa,
-                                        "nodsa_reason": suite_nodsa_reason,
-                                        "next_point_update": suite_next_point_update
-                                        }
-                    clean_dict(suites[release])
-                pkg_issue = { "description": description,
-                              "debianbug": debianbug,
-                              "scope": scope,
-                              "releases": suites }
-                clean_dict(pkg_issue)
-
-                data[pkg][issue]=pkg_issue
-
-        # store the JSON dump in memory, and update the generation
-        # timestamp before returning
-        new_data = json.dumps(data, separators=(',', ':'))
-        self.json_timestamp = time.time()
-        if new_data != self.json_data:
-            self.json_data = new_data
-            self.json_last_modified = self.json_timestamp
-        return self.json_data
-
-    def page_json(self, path, params, url):
-        result = BinaryResult(self._get_json(),'application/json')
-        result.headers['Last-Modified'] = email.utils.formatdate(self.json_last_modified, usegmt=True)
-        return result
-
-    def page_debsecan(self, path, params, url):
-        obj = '/'.join(path)
-        data = self.db.getDebsecan(obj)
+    def exported_result(self, path, url):
+        data = self.db.getExported(path)
         if data:
-            return BinaryResult(data,'application/octet-stream')
+            result = BinaryResult(data[0], data[1])
+            if data[2]:
+                result.headers['Last-Modified'] = data[2]
+            return result
         else:
             return self.create_page(
                 url, "Object not found",
-                [P("The requested debsecan object has not been found.")],
+                [P("The requested object has not been found.")],
                 status=404)
 
+    def page_json(self, path, params, url):
+        return self.exported_result('data/json', url)
+
+    def page_debsecan(self, path, params, url):
+        return self.exported_result("debsecan/" + '/'.join(path), url)
+
     def create_page(self, url, title, body, search_in_page=False, status=200):
         append = body.append
         append(HR())


=====================================
bin/update-db
=====================================
@@ -75,6 +75,10 @@ if warnings:
 
 db.calculateDebsecan()
 
+# json data
+
+db.calculateJson()
+
 # Everything worked well.
 
 db.commit(cursor)


=====================================
lib/python/security_db.py
=====================================
@@ -29,9 +29,10 @@ FIXME: Document the database schema once it is finished.
 
 from apt_pkg import version_compare
 import apsw
-import base64
 import bugs
-from collections import namedtuple
+from collections import defaultdict, namedtuple
+import email.utils
+import json
 import pickle
 import glob
 import itertools
@@ -39,6 +40,7 @@ import os
 import os.path
 import re
 import sys
+import time
 import zlib
 
 import config
@@ -226,6 +228,20 @@ DSAsForSourcePackage = namedtuple(
     "DSAsForSourcePackage",
     "bug description")
 
+def clean_dict(d):
+    """ taken from http://w3facility.org/question/exclude-emptynull-values-from-json-serialization/
+    Delete keys with the value ``None`` in a dictionary, recursively.
+
+    This alters the input so you may wish to ``copy`` the dict first.
+    """
+    # d.iteritems isn't used as you can't del or the iterator breaks.
+    for key, value in list(d.items()):
+        if value is None:
+            del d[key]
+        elif isinstance(value, dict):
+            clean_dict(value)
+    return d  # For convenience
+
 class DB:
     """Access to the security database.
 
@@ -251,7 +267,7 @@ class DB:
         # Enable WAL.  This means that updates will not block readers.
         c.execute("PRAGMA journal_mode = WAL")
 
-        self.schema_version = 23
+        self.schema_version = 24
         self._initFunctions()
 
         for (v,) in c.execute("PRAGMA user_version"):
@@ -270,6 +286,8 @@ class DB:
                 c.execute("PRAGMA user_version = 22")
             elif v == 22:
                 self._initSchema22()
+            elif v == 23:
+                self._initSchema23()
             elif v != self.schema_version:
                 if self.verbose:
                     print("DB: schema version mismatch: expected %d, got %d"
@@ -441,15 +459,12 @@ class DB:
             loss_sec_prot_admin INTEGER NOT NULL,
             loss_sec_prot_other INTEGER NOT NULL)""")
 
-        cursor.execute(
-            """CREATE TABLE debsecan_data
-            (name TEXT NOT NULL PRIMARY KEY,
-            data TEXT NOT NULL)""")
-
         self._initNoDSA(cursor)
 
         self._initNextPointRelease(cursor)
 
+        self._initExporters(cursor)
+
         cursor.execute("PRAGMA user_version = %d" % self.schema_version)
 
     def _initSchema20(self):
@@ -477,6 +492,7 @@ class DB:
 
         cursor.execute("PRAGMA user_version = 1")
         self._initNextPointRelease(cursor)
+        self._initExporters(cursor)
         cursor.execute("PRAGMA user_version = %d" % self.schema_version)
 
     def _initNextPointRelease(self, cursor):
@@ -487,6 +503,21 @@ class DB:
             PRIMARY KEY (cve_name, release))
             """)
 
+    def _initSchema23(self):
+        cursor = self.db.cursor()
+        cursor.execute("PRAGMA user_version = 1")
+        self_initExporters(cursor)
+        cursor.execute("PRAGMA user_version = %d" % self.schema_version)
+
+    def _initExporters(self, cursor):
+        cursor.execute("DROP TABLE IF EXISTS debsecan_data")
+        cursor.execute(
+            """CREATE TABLE export_data
+            (path TEXT NOT NULL PRIMARY KEY,
+            content_type TEXT NOT NULL,
+            last_modified TEXT,
+            data TEXT NOT NULL)""")
+
     def _initViews(self, cursor):
         testing = config.get_release_codename('testing')
         cursor.execute(
@@ -1523,6 +1554,14 @@ class DB:
         VALUES (?, ?, ?, ?)""",
               (bug_name, suite, result, pkgs))
 
+    def storeExport(self, name, content_type, data, last_modified=None):
+        """Store a pre-computed data export in the export_data table."""
+        if last_modified is None:
+            last_modified = email.utils.formatdate(time.time(), usegmt=True)
+        self.cursor().execute(
+            "INSERT OR REPLACE INTO export_data (path, content_type, last_modified, data) VALUES (?, ?, ?, ?)",
+            (name, content_type, last_modified, data))
+
     def calculateDebsecan0(self, release):
         """Create data for the debsecan tool (VERSION 0 format)."""
 
@@ -1618,11 +1657,9 @@ class DB:
                              kind, urgency_to_flag[urgency], remote,
                              fix_available,
                              package, fixed_version, description))
-        result = base64.encodebytes(zlib.compress(''.join(result).encode('utf-8'), 9))
+        result = zlib.compress(''.join(result).encode('utf-8'), 9)
 
-        c.execute(
-            "INSERT OR REPLACE INTO debsecan_data (name, data) VALUES (?, ?)",
-            ('release/' + release, result))
+        self.storeExport('debsecan/release/' + release, 'application/octet-stream', result)
 
         c.execute("DROP TABLE vulnlist")
 
@@ -1789,9 +1826,8 @@ class DB:
         source_packages.sort()
 
         def store_value(name, value):
-            value = base64.encodebytes(zlib.compress(value.encode('utf-8'), 9))
-            c.execute("""INSERT OR REPLACE INTO debsecan_data
-            VALUES (?, ?)""", (name, value))
+            value = zlib.compress(value.encode('utf-8'), 9)
+            self.storeExport('debsecan/' + name, 'application/octet-stream', value)
 
         def gen_release(release):
             result = result_start[:]
@@ -1836,13 +1872,174 @@ class DB:
             self.calculateDebsecan0(release)
         self.calculateDebsecan1()
 
-    def getDebsecan(self, name):
-        """Returns the debsecan data item NAME."""
-        for (data,) in self.cursor().execute(
-            "SELECT data FROM debsecan_data WHERE name = ?", (name,)):
-            return base64.decodebytes(data)
-        else:
-            return None
+    def getExported(self, name):
+        """Returns pre-computed export data by item name."""
+        for (data, content_type, last_modified) in self.cursor().execute(
+            "SELECT data, content_type, last_modified FROM export_data WHERE path = ?",
+            (name,)):
+            return (data, content_type, last_modified)
+        return None
+
+    def calculateJson(self):
+        """Calculate the /data/json export format."""
+        packages = []
+        issues = defaultdict(list)
+        descriptions = {}
+        debianbugs = defaultdict(dict)
+        remote = defaultdict(dict)
+        releases = defaultdict(lambda: defaultdict(list))
+        subreleases = defaultdict(lambda: defaultdict(list))
+        repositories = defaultdict(lambda: defaultdict(list))
+        version = defaultdict(lambda: defaultdict(dict))
+        fixed_version = defaultdict(lambda: defaultdict(dict))
+        status = defaultdict(lambda: defaultdict(dict))
+        urgency = defaultdict(lambda: defaultdict(dict))
+        nodsa = defaultdict(lambda: defaultdict(dict))
+        nodsa_reason = defaultdict(lambda: defaultdict(dict))
+        next_point_update = defaultdict(lambda: defaultdict(set))
+        supported_releases = config.get_supported_releases()
+        for (pkg, issue, desc, debianbug, release, subrelease, db_version, db_fixed_version, db_status, db_urgency, db_remote, db_nodsa, db_nodsa_reason, db_next_point_update) in self.cursor().execute(
+                """SELECT sp.name, st.bug_name,
+                (SELECT cve_desc FROM nvd_data
+                WHERE cve_name = st.bug_name),
+                (SELECT MIN(debian_cve.bug) FROM debian_cve
+                WHERE debian_cve.bug_name = st.bug_name),
+                sp.release, sp.subrelease,
+                sp.version,
+                (SELECT pn.fixed_version FROM package_notes AS pn
+                WHERE pn.bug_name = st.bug_name
+                AND pn.package = sp.name AND
+                (pn.release = sp.release OR (pn.release = '' AND fixed_version != ''))),
+                st.vulnerable, st.urgency,
+                (SELECT range_remote FROM nvd_data
+                WHERE cve_name = st.bug_name),
+                (SELECT comment FROM package_notes_nodsa AS nd
+                WHERE nd.package = sp.name AND nd.release = sp.release
+                AND nd.bug_name = st.bug_name) AS nodsa,
+                (SELECT reason FROM package_notes_nodsa AS nd
+                WHERE nd.package = sp.name AND nd.release = sp.release
+                AND nd.bug_name = st.bug_name) AS nodsa_reason,
+                (SELECT next_point_update.release as next_point_update_release FROM next_point_update
+                WHERE st.bug_name=next_point_update.cve_name) AS next_point_update_release
+                FROM source_package_status AS st, source_packages AS sp, bugs
+                WHERE sp.rowid = st.package AND st.bug_name = bugs.name
+                AND ( st.bug_name LIKE 'CVE-%' OR st.bug_name LIKE 'TEMP-%' )
+                AND sp.release IN (""" + ",".join("?" * len(supported_releases)) + """)
+                ORDER BY sp.name, st.bug_name, sp.release, sp.subrelease""" , supported_releases):
+
+            ### to ease debugging...:
+            #if issue in ('CVE-2012-6656','CVE-2014-8738','CVE-2013-6673') :
+            #    print pkg, issue, release, subrelease, db_version, db_fixed_version, db_status
+            if pkg not in packages:
+                packages.append(pkg)
+            if issue not in issues[pkg]:
+                issues[pkg].append(issue)
+                descriptions[issue] = desc
+                debianbugs[pkg][issue] = debianbug
+                remote[pkg][issue] = db_remote
+            if release not in releases[pkg][issue]:
+                releases[pkg][issue].append(release)
+            subreleases[pkg][issue].append(subrelease)
+            if subrelease == '':
+                repository = release
+            else:
+                repository = release+'-'+subrelease
+            if repository not in repositories[pkg][issue]:
+                repositories[pkg][issue].append(repository)
+            version[pkg][issue][repository] = db_version
+            fixed_version[pkg][issue][repository] = db_fixed_version
+            status[pkg][issue][repository] = db_status
+            urgency[pkg][issue][repository] = db_urgency
+            if db_next_point_update:
+                next_point_update[pkg][issue].add(db_next_point_update)
+            if str(db_nodsa) != 'None':
+                nodsa[pkg][issue][repository] = db_nodsa
+            if str(db_nodsa_reason) != 'None':
+                nodsa_reason[pkg][issue][repository] = db_nodsa_reason
+
+        data = {}
+        for pkg in packages:
+            data[pkg] = {}
+            for issue in issues[pkg]:
+                description = None
+                debianbug = None
+                scope = None
+                suites = {}
+
+                if descriptions[issue]:
+                    description = descriptions[issue]
+                if debianbugs[pkg][issue] != None:
+                    debianbug = debianbugs[pkg][issue]
+                if str(remote[pkg][issue]) == 'None':
+                    pass
+                elif remote[pkg][issue] == 1:
+                    scope = "remote"
+                else:
+                    scope = "local"
+                for release in releases[pkg][issue]:
+                    state = None
+                    suite_fixed_version = None
+                    suite_urgency = None
+                    suite_nodsa = None
+                    suite_nodsa_reason = None
+                    suite_repositories = {}
+                    winner=''
+                    for suffix in ('','-security','-lts'):
+                        subrelease=release+suffix
+                        if subrelease in status[pkg][issue]:
+                            if status[pkg][issue][subrelease] == 0:
+                                # the issue is fixed, let's pick this subrelease and be done
+                                winner=suffix
+                                break
+                            elif status[pkg][issue][subrelease] > 0 and winner == '':
+                                # the issue ain't fixed, but at least exists.
+                                # keep looking for a real winner...
+                                winner=suffix
+                    repository=release+winner
+                    if status[pkg][issue][repository] == 0:
+                        # 1 = vulnerable, 2 = undetermined
+                        state = "resolved"
+                        suite_fixed_version = fixed_version[pkg][issue][repository]
+                    elif status[pkg][issue][repository] == 2:
+                        state = "undetermined"
+                    else:
+                        state = "open"
+                    suite_urgency = urgency[pkg][issue][repository]
+                    if repository in nodsa[pkg][issue]:
+                        suite_nodsa = nodsa[pkg][issue][repository]
+                    if repository in nodsa_reason[pkg][issue]:
+                        suite_nodsa_reason = nodsa_reason[pkg][issue][repository]
+                    if pkg in next_point_update and \
+                            issue in next_point_update[pkg] and \
+                            release in next_point_update[pkg][issue]:
+                        suite_next_point_update = True
+                    else:
+                        suite_next_point_update = None
+                    for repository in repositories[pkg][issue]:
+                        for suffix in ('','-security','-lts'):
+                            subrelease=release+suffix
+                            if subrelease in version[pkg][issue]:
+                                suite_repositories[subrelease] = version[pkg][issue][subrelease]
+                    suites[release] = { "status": state,
+                                        "repositories": suite_repositories,
+                                        "fixed_version" : suite_fixed_version,
+                                        "urgency": suite_urgency,
+                                        "nodsa": suite_nodsa,
+                                        "nodsa_reason": suite_nodsa_reason,
+                                        "next_point_update": suite_next_point_update
+                                        }
+                    clean_dict(suites[release])
+                pkg_issue = { "description": description,
+                              "debianbug": debianbug,
+                              "scope": scope,
+                              "releases": suites }
+                clean_dict(pkg_issue)
+
+                data[pkg][issue]=pkg_issue
+
+        self.storeExport('data/json',
+                         'application/json',
+                         json.dumps(data, separators=(',', ':')).encode("utf8"))
 
     def updateNVD(self, cursor, data, incremental):
         """Adds (and overwrites) NVD data stored in the database.  This



View it on GitLab: https://salsa.debian.org/security-tracker-team/security-tracker/-/commit/f1fa755e82177d66fcba7b40ed14bb25df236ff0

-- 
View it on GitLab: https://salsa.debian.org/security-tracker-team/security-tracker/-/commit/f1fa755e82177d66fcba7b40ed14bb25df236ff0
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-security-tracker-commits/attachments/20250226/89c9d69a/attachment-0001.htm>


More information about the debian-security-tracker-commits mailing list