[Git][qa/jenkins.debian.net][master] reproducible debian: sqla 2.0 positional columns in select()

Mattia Rizzolo (@mattia) gitlab at salsa.debian.org
Mon Aug 11 14:53:10 BST 2025



Mattia Rizzolo pushed to branch master at Debian QA / jenkins.debian.net


Commits:
48482419 by Mattia Rizzolo at 2025-08-11T15:52:45+02:00
reproducible debian: sqla 2.0 positional columns in select()

Signed-off-by: Mattia Rizzolo <mattia at debian.org>

- - - - -


8 changed files:

- bin/reproducible_html_arch_oldies.py
- bin/reproducible_html_indexes.py
- bin/reproducible_html_live_status.py
- bin/reproducible_html_notes.py
- bin/reproducible_notes.py
- bin/reproducible_opensuse_import_json.py
- bin/reproducible_openwrt_package_parser.py
- bin/reproducible_remote_scheduler.py


Changes:

=====================================
bin/reproducible_html_arch_oldies.py
=====================================
@@ -31,13 +31,13 @@ stats_build = db_table('stats_build')
 
 def gen_suitearch_oldies_table(suite, arch):
     html = ''
-    query = select([
+    query = select(
         sources.c.suite,
         sources.c.architecture,
         sources.c.name,
         results.c.status,
         results.c.build_date
-    ]).select_from(
+    ).select_from(
         results.join(sources)
     ).where(
         and_(


=====================================
bin/reproducible_html_indexes.py
=====================================
@@ -84,10 +84,10 @@ for issue in filtered_issues:
 if not filtered_issues:
     filter_issues_list = [None]
 
-distro_id = query_db(select([distributions.c.id]).where(distributions.c.name == DISTRO))[0][0]
+distro_id = query_db(select(distributions.c.id).where(distributions.c.name == DISTRO))[0][0]
 
 count_results = select(
-    [func.count(results.c.id)]
+    func.count(results.c.id)
 ).select_from(
     results.join(sources)
 ).where(
@@ -99,7 +99,7 @@ count_results = select(
 )
 
 select_sources = select(
-    [sources.c.name]
+    sources.c.name
 ).select_from(
     results.join(sources)
 ).where(
@@ -212,7 +212,7 @@ queries = {
                 results.c.status == Status.FTBFS.value.name,
                 sources.c.id.notin_(
                     select(
-                        [notes.c.package_id]
+                        notes.c.package_id
                     ).select_from(
                         notes
                     ).where(
@@ -229,7 +229,7 @@ queries = {
                 results.c.status == Status.FTBFS.value.name,
                 sources.c.id.in_(
                     select(
-                        [notes.c.package_id]
+                        notes.c.package_id
                     ).select_from(
                         notes
                     ).where(
@@ -307,7 +307,7 @@ queries = {
         ),
     "notes":
         select(
-            [sources.c.name]
+            sources.c.name
         ).select_from(
             sources.join(results).join(notes)
         ).where(
@@ -324,7 +324,7 @@ queries = {
         select_sources.where(
             and_(
                 results.c.status == bindparam('status'),
-                sources.c.id.notin_(select([notes.c.package_id]).select_from(notes))
+                sources.c.id.notin_(select(notes.c.package_id).select_from(notes))
             )
         ).order_by(
             desc(results.c.build_date)


=====================================
bin/reproducible_html_live_status.py
=====================================
@@ -38,9 +38,9 @@ def generate_schedule(arch):
     # 'AND h.name=s.name AND h.suite=s.suite AND h.architecture=s.architecture'
     # in this query and the query below is needed due to not using package_id
     # in the stats_build table, which should be fixed...
-    averagesql = select([
+    averagesql = select(
         func.coalesce(func.avg(cast(stats_build.c.build_duration, Integer)), 0)
-    ]).where(
+    ).where(
         and_(
             stats_build.c.status.in_(('reproducible', 'FTBR')),
             stats_build.c.name == sources.c.name,
@@ -49,7 +49,7 @@ def generate_schedule(arch):
         )
     ).as_scalar()
 
-    query = select([
+    query = select(
         schedule.c.date_scheduled,
         sources.c.suite,
         sources.c.architecture,
@@ -57,7 +57,7 @@ def generate_schedule(arch):
         results.c.status,
         results.c.build_duration,
         averagesql
-    ]).select_from(
+    ).select_from(
         sources.join(schedule).join(results, isouter=True)
     ).where(
         and_(
@@ -97,9 +97,9 @@ def generate_schedule(arch):
 
 
 def generate_live_status_table(arch):
-    averagesql = select([
+    averagesql = select(
         func.coalesce(func.avg(cast(stats_build.c.build_duration, Integer)), 0)
-    ]).where(
+    ).where(
         and_(
             stats_build.c.status.in_(('reproducible', 'FTBR')),
             stats_build.c.name == sources.c.name,
@@ -108,7 +108,7 @@ def generate_live_status_table(arch):
         )
     ).as_scalar()
 
-    query = select([
+    query = select(
         sources.c.id,
         sources.c.suite,
         sources.c.architecture,
@@ -119,7 +119,7 @@ def generate_live_status_table(arch):
         results.c.build_duration,
         averagesql,
         schedule.c.job,
-    ]).select_from(
+    ).select_from(
         sources.join(schedule).join(results, isouter=True)
     ).where(
         and_(


=====================================
bin/reproducible_html_notes.py
=====================================
@@ -300,7 +300,7 @@ def gen_html_issue(issue, suite):
     results = db_table('results')
     sources = db_table('sources')
     sql = select(
-        [sources.c.name]
+        sources.c.name
     ).select_from(
         results.join(sources)
     ).where(


=====================================
bin/reproducible_notes.py
=====================================
@@ -116,7 +116,7 @@ def load_issues():
 def store_issues():
     issues_table = db_table('issues')
     # Get existing issues
-    results = conn_db.execute(sql.select([issues_table.c.name]))
+    results = conn_db.execute(sql.select(issues_table.c.name))
     existing_issues = set([row[0] for row in results])
     to_insert = []
     to_update = []


=====================================
bin/reproducible_opensuse_import_json.py
=====================================
@@ -27,7 +27,7 @@ sources = db_table('sources')
 results = db_table('results')
 
 distro_id = query_db(
-    select([distributions.c.id]).where(distributions.c.name == 'opensuse')
+    select(distributions.c.id).where(distributions.c.name == 'opensuse')
 )[0][0]
 
 pkgs = []
@@ -55,7 +55,7 @@ for pkg in ostatus:
 log.info('Dropping old data…')
 transaction = conn_db.begin()
 d = results.delete(results.c.package_id.in_(
-    select([sources.c.id]).select_from(sources).where(sources.c.distribution == distro_id)
+    select(sources.c.id).select_from(sources).where(sources.c.distribution == distro_id)
 ))
 query_db(d)
 d = sources.delete(sources.c.distribution == distro_id)
@@ -71,8 +71,8 @@ transaction.commit()
 
 log.info('Injecting build results…')
 cur_pkgs = select(
-    [sources.c.id, sources.c.name, sources.c.version,
-     sources.c.suite, sources.c.architecture]
+    sources.c.id, sources.c.name, sources.c.version,
+     sources.c.suite, sources.c.architecture
 ).select_from(
     sources.join(distributions)
 ).where(


=====================================
bin/reproducible_openwrt_package_parser.py
=====================================
@@ -83,7 +83,7 @@ def insert_into_db(result, suite='trunk'):
     sources_tbl = db_table('sources')
 
     distro_id = query_db(
-        select([distributions.c.id]).where(distributions.c.name == 'openwrt')
+        select(distributions.c.id).where(distributions.c.name == 'openwrt')
         )[0][0]
 
     src_pkgs = []
@@ -92,8 +92,8 @@ def insert_into_db(result, suite='trunk'):
 
     # query for a source package with name, version
     query_src_pkg = select(
-        [sources_tbl.c.id, sources_tbl.c.name, sources_tbl.c.version,
-         sources_tbl.c.suite, sources_tbl.c.architecture]
+        sources_tbl.c.id, sources_tbl.c.name, sources_tbl.c.version,
+         sources_tbl.c.suite, sources_tbl.c.architecture
     ).select_from(
         sources_tbl.join(distributions)
     ).where(
@@ -105,7 +105,7 @@ def insert_into_db(result, suite='trunk'):
     )
 
     query_results_pkg = select(
-        [results_tbl.c.id]
+        results_tbl.c.id
     ).where(results_tbl.c.package_id == bindparam('package_id'))
 
     def insert_pkg_list(pkg_list, state, timestamp):


=====================================
bin/reproducible_remote_scheduler.py
=====================================
@@ -302,10 +302,10 @@ def rest(scheduling_args, requester, local, suite, arch):
 
     schedule_table = db_table('schedule')
     if ids:
-        existing_pkg_ids = dict(query_db(sql.select([
+        existing_pkg_ids = dict(query_db(sql.select(
             schedule_table.c.package_id,
             schedule_table.c.id,
-        ]).where(schedule_table.c.package_id.in_(ids))))
+        ).where(schedule_table.c.package_id.in_(ids))))
 
     for id in ids:
         if id in existing_pkg_ids:



View it on GitLab: https://salsa.debian.org/qa/jenkins.debian.net/-/commit/48482419ea500c018e7ea1e78c972df7b7d40559

-- 
View it on GitLab: https://salsa.debian.org/qa/jenkins.debian.net/-/commit/48482419ea500c018e7ea1e78c972df7b7d40559
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/qa-jenkins-scm/attachments/20250811/1a3f90b1/attachment-0001.htm>


More information about the Qa-jenkins-scm mailing list