[med-svn] [Git][med-team/gnumed-server][upstream] New upstream version 22.18
Andreas Tille (@tille)
gitlab at salsa.debian.org
Wed Jan 4 13:21:55 GMT 2023
Andreas Tille pushed to branch upstream at Debian Med / gnumed-server
Commits:
c1271f1d by Andreas Tille at 2023-01-04T11:05:24+01:00
New upstream version 22.18
- - - - -
15 changed files:
- server/bootstrap/bootstrap_gm_db_system.py
- server/bootstrap/fixup_db-v22.conf
- server/bootstrap/update_db-v21_v22.conf
- server/doc/schema/gnumed-entire_schema-no_audit.dot
- server/doc/schema/gnumed-entire_schema.html
- server/gm-zip+sign_backups.sh
- server/pycommon/gmCfg.py
- server/pycommon/gmDateTime.py
- server/pycommon/gmDispatcher.py
- server/pycommon/gmExceptions.py
- server/pycommon/gmMimeLib.py
- server/pycommon/gmPG2.py
- server/pycommon/gmTools.py
- + server/sql/v21-v22/fixups/v22-gm-concat_table_structure_v19_and_up-fixup.sql
- server/sql/v21-v22/fixups/v22-release_notes-fixup.sql
Changes:
=====================================
server/bootstrap/bootstrap_gm_db_system.py
=====================================
@@ -759,6 +759,8 @@ class database:
curs.execute("alter database %s set check_function_bodies to on" % self.name)
# we want checking of data checksums if available
curs.execute("alter database %s set ignore_checksum_failure to off" % self.name)
+ # tighten permissions on schema public
+ curs.execute("revoke create on schema public from public")
curs.close()
self.conn.commit()
@@ -780,9 +782,19 @@ class database:
curs.execute("alter database %s set track_commit_timestamp to on" % self.name)
except:
_log.exception(u'PostgreSQL version < 9.5 does not support <track_commit_timestamp> OR <track_commit_timestamp> cannot be set at runtime')
+ curs.close()
+ self.conn.commit()
+ # set owner of schema public to new role "pg_database_owner",
+ # as suggested by PG 15 release notes
+ curs = self.conn.cursor()
+ try:
+ curs.execute("alter schema public owner to pg_database_owner")
+ except:
+ _log.exception(u'PostgreSQL versions < 15 do not yet support role <pg_database_owner>')
curs.close()
self.conn.commit()
+
curs = self.conn.cursor()
gmConnectionPool.log_pg_settings(curs = curs)
curs.close()
@@ -833,7 +845,7 @@ class database:
#--------------------------------------------------------------
def __create_db(self):
-
+ _log.info('creating database')
# verify template database hash
template_version = cfg_get(self.section, 'template version')
if template_version is None:
@@ -843,6 +855,7 @@ class database:
if not converted:
_log.error(u'invalid template database definition: %s', template_version)
return False
+
if not gmPG2.database_schema_compatible(link_obj = self.conn, version = version):
_log.error(u'invalid [%s] schema structure in GNUmed template database [%s]', template_version, self.template_db)
return False
=====================================
server/bootstrap/fixup_db-v22.conf
=====================================
@@ -34,6 +34,7 @@ v22-clin-v_candidate_diagnoses-fixup.sql
v22-dem-v_message_inbox-fixup.sql
v22-add_generic_covid_vaccine.sql
v22-clin-remove_old_empty_encounters-fixup.sql
+v22-gm-concat_table_structure_v19_and_up-fixup.sql
v22-release_notes-fixup.sql
$schema$
=====================================
server/bootstrap/update_db-v21_v22.conf
=====================================
@@ -166,6 +166,7 @@ v22-clin-v_candidate_diagnoses-fixup.sql
v22-dem-v_message_inbox-fixup.sql
v22-add_generic_covid_vaccine.sql
v22-clin-remove_old_empty_encounters-fixup.sql
+v22-gm-concat_table_structure_v19_and_up-fixup.sql
v22-release_notes-fixup.sql
$schema$
=====================================
server/doc/schema/gnumed-entire_schema-no_audit.dot
=====================================
The diff for this file was not included because it is too large.
=====================================
server/doc/schema/gnumed-entire_schema.html
=====================================
The diff for this file was not included because it is too large.
=====================================
server/gm-zip+sign_backups.sh
=====================================
@@ -20,9 +20,6 @@
#
# 47 12,19 * * * * /usr/bin/gm-zip+sign_backups.sh
#
-#
-# It is useful to have a PROCMAIL rule for the GNotary server replies
-# piping them into the stoarage area where the backups are kept.
#==============================================================
CONF="/etc/gnumed/gnumed-backup.conf"
@@ -60,10 +57,9 @@ shopt -s -q nullglob # no glob matches -> ""
AGGREGATE_EXIT_CODE=0
-# find any leftover, untested tar files
+# find any leftover untested tar files
# and test them so they can be compressed
for TAR_UNTESTED in "${BACKUP_BASENAME}"-*.tar.untested ; do
-
# test
tar --extract --to-stdout --file="${TAR_UNTESTED}" > /dev/null
RESULT="$?"
@@ -72,7 +68,6 @@ for TAR_UNTESTED in "${BACKUP_BASENAME}"-*.tar.untested ; do
AGGREGATE_EXIT_CODE=${RESULT}
continue
fi
-
# rename to final archive name
TAR_FINAL=$(basename "${TAR_UNTESTED}" .untested)
mv --force "${TAR_UNTESTED}" "${TAR_FINAL}"
@@ -84,7 +79,34 @@ for TAR_UNTESTED in "${BACKUP_BASENAME}"-*.tar.untested ; do
continue
fi
chown "${BACKUP_OWNER}" "${TAR_FINAL}"
+done
+
+# find any leftover untested bz2 files and test
+# them so they are not re-compressed unnecessarily
+for BZ2_UNTESTED in "${BACKUP_BASENAME}"-*.tar.bz2.untested ; do
+ # verify compressed archive
+ bzip2 --quiet --test "${BZ2_UNTESTED}"
+ RESULT="$?"
+ if test "${RESULT}" != "0" ; then
+ echo "Verifying compressed archive [${BZ2_UNTESTED}] failed (${RESULT}). Removing."
+ AGGREGATE_EXIT_CODE=${RESULT}
+ rm --force "${BZ2_UNTESTED}"
+ continue
+ fi
+ # rename to final archive name
+ BZ2_FINAL=$(basename "${BZ2_UNTESTED}" .untested)
+ mv --force "${BZ2_UNTESTED}" "${BZ2_FINAL}"
+ RESULT="$?"
+ if test "${RESULT}" != "0" ; then
+ echo "Renaming tested compressed archive [${BZ2_UNTESTED}] to [${BZ2_FINAL}] failed (${RESULT}). Skipping."
+ AGGREGATE_EXIT_CODE=${RESULT}
+ continue
+ fi
+ TAR_FINAL=$(basename "${BZ2_UNTESTED}" .bz2.untested)
+ rm --force "${TAR_FINAL}"
+ chmod "${BACKUP_MASK}" "${BZ2_FINAL}"
+ chown "${BACKUP_OWNER}" "${BZ2_FINAL}"
done
@@ -133,37 +155,9 @@ for TAR_FINAL in "${BACKUP_BASENAME}"-*.tar ; do
AGGREGATE_EXIT_CODE=${RESULT}
continue
fi
-
rm --force "${TAR_FINAL}"
chmod "${BACKUP_MASK}" "${BZ2_FINAL}"
chown "${BACKUP_OWNER}" "${BZ2_FINAL}"
-
- # GNotary support
- if test -n "${GNOTARY_TAN}" ; then
- LOCAL_MAILER=$(which mail)
-
- #SHA512="SHA 512:"`sha512sum -b ${BACKUP_FILENAME}.tar.bz2`
- SHA512=$(openssl dgst -sha512 -hex "${BZ2_FINAL}")
- RMD160=$(openssl dgst -ripemd160 -hex "${BZ2_FINAL}")
-
- export REPLYTO=${SIG_RECEIVER}
-
- # send mail
- (
- echo " "
- echo "<?xml version=\"1.0\" encoding=\"iso-8859-1\" ?>"
- echo "<message>"
- echo " <tan>$GNOTARY_TAN</tan>"
- echo " <action>notarize</action>"
- echo " <hashes number=\"2\">"
- echo " <hash file=\"${BZ2_FINAL}\" modified=\"${TS}\" algorithm=\"SHA-512\">${SHA512}</hash>"
- echo " <hash file=\"${BZ2_FINAL}\" modified=\"${TS}\" algorithm=\"RIPE-MD-160\">${RMD160}</hash>"
- echo " </hashes>"
- echo "</message>"
- echo " "
- ) | $LOCAL_MAILER -s "gnotarize" "$GNOTARY_SERVER"
- fi
-
done
=====================================
server/pycommon/gmCfg.py
=====================================
@@ -343,7 +343,7 @@ limit 1""" % where_clause
elif isinstance(value, list):
# there can be different syntaxes for list types so don't try to cast them
pass
- elif isinstance(value, buffer):
+ elif isinstance(value, (bytes, memoryview)):
# can go directly into bytea
pass
else:
=====================================
server/pycommon/gmDateTime.py
=====================================
@@ -711,7 +711,7 @@ def format_pregnancy_weeks(age):
return '%s%s%s%s' % (
int(weeks),
_('interval_format_tag::weeks::w')[-1:],
- interval.days,
+ days,
_('interval_format_tag::days::d')[-1:]
)
@@ -1700,11 +1700,12 @@ def __numbers_only(str2parse):
if not regex.match("^(\s|\t)*\d{1,4}(\s|\t)*$", str2parse, flags = regex.UNICODE):
return []
- now = pydt_now_here()
val = int(regex.findall('\d{1,4}', str2parse, flags = regex.UNICODE)[0])
+ if val == 0:
+ return []
+ now = pydt_now_here()
matches = []
-
# today in that year
if (1850 < val) and (val < 2100):
target_date = cFuzzyTimestamp (
@@ -2385,7 +2386,7 @@ if __name__ == '__main__':
#-------------------------------------------------
def test_get_date_of_weekday_in_week_of_date():
dt = pydt_now_here()
- print('weekday', base_dt.isoweekday(), '(2day):', dt)
+ print('weekday', dt.isoweekday(), '(2day):', dt)
for weekday in range(8):
dt = get_date_of_weekday_in_week_of_date(weekday)
print('weekday', weekday, '(same):', dt)
@@ -2400,6 +2401,15 @@ if __name__ == '__main__':
except ValueError as exc:
print(exc)
+ #-------------------------------------------------
+ def test__numbers_only():
+ for val in range(-1, 35):
+ matches = __numbers_only(str(val))
+ print(val, ':')
+ for m in matches:
+ print(' ', m)
+ input()
+
#-------------------------------------------------
# GNUmed libs
gmI18N.activate_locale()
@@ -2408,7 +2418,7 @@ if __name__ == '__main__':
init()
#test_date_time()
- test_str2fuzzy_timestamp_matches()
+ #test_str2fuzzy_timestamp_matches()
#test_get_date_of_weekday_in_week_of_date()
#test_cFuzzyTimeStamp()
#test_get_pydt()
@@ -2419,5 +2429,6 @@ if __name__ == '__main__':
#test_pydt_strftime()
#test_calculate_apparent_age()
#test_is_leap_year()
+ test__numbers_only()
#===========================================================================
=====================================
server/pycommon/gmDispatcher.py
=====================================
@@ -316,7 +316,7 @@ def _call(receiver, **kwds):
acceptable_args = func_code_def.co_varnames[0:func_code_def.co_argcount]
else:
_log.error('<%s> must be instance, method or function, but is [%s]', str(receiver), type(receiver))
- raise TypeError('DISPATCHER ERROR: _call(): <%s> must be instance, method or function, but is []' % (str(receiver), type(receiver)))
+ raise TypeError('DISPATCHER ERROR: _call(): <%s> must be instance, method or function, but is [%s]' % (str(receiver), type(receiver)))
# 0x08: bit for whether func uses **kwds syntax
if not (func_code_def.co_flags & 0x08):
=====================================
server/pycommon/gmExceptions.py
=====================================
@@ -30,7 +30,7 @@ class AccessDenied(Exception):
def __repr__(self):
txt = self.errmsg
if self.source is not None:
- txt += '\nSource: %s' % source
+ txt += '\nSource: %s' % self.source
if self.code is not None:
txt += '\nCode: %s' % self.code
if self.details is not None:
=====================================
server/pycommon/gmMimeLib.py
=====================================
@@ -158,7 +158,7 @@ def guess_ext_by_mimetype(mimetype=''):
)
if ext is not None:
- _log.debug('<%s>: %s (%s)' % (mimetype, ext, candidate))
+ _log.debug('<%s>: %s' % (mimetype, ext))
return ext
_log.error("<%s>: no suitable file extension found in config files" % mimetype)
=====================================
server/pycommon/gmPG2.py
=====================================
@@ -373,33 +373,153 @@ def request_login_params(setup_pool=False):
# =======================================================================
# netadata API
# =======================================================================
+SQL__concat_table_structure_v19_and_up = """
+create or replace function gm.concat_table_structure_v19_and_up()
+ returns text
+ language 'plpgsql'
+ security definer
+ as '
+declare
+ _table_desc record;
+ _pk_desc record;
+ _column_desc record;
+ _constraint_def record;
+ _total text;
+begin
+ _total := '''';
+
+ -- find relevant tables
+ for _table_desc in
+ select * from information_schema.tables tabs where
+ tabs.table_schema in (''dem'', ''clin'', ''blobs'', ''cfg'', ''ref'', ''i18n'', ''bill'')
+ and
+ tabs.table_type = ''BASE TABLE''
+ order by
+ decode(md5(tabs.table_schema || tabs.table_name), ''hex'')
+
+ -- loop over tables
+ loop
+ -- where are we at ?
+ _total := _total || ''TABLE:'' || _table_desc.table_schema || ''.'' || _table_desc.table_name || E''\\n'';
+
+ -- find PKs of that table
+ for _pk_desc in
+ select * from (
+ select
+ pg_class.oid::regclass || ''.'' || pg_attribute.attname || ''::'' || format_type(pg_attribute.atttypid, pg_attribute.atttypmod) AS primary_key_column
+ from
+ pg_index, pg_class, pg_attribute
+ where
+ --pg_class.oid = ''TABLENAME''::regclass
+ pg_class.oid = (_table_desc.table_schema || ''.'' || _table_desc.table_name)::regclass
+ AND
+ indrelid = pg_class.oid
+ AND
+ pg_attribute.attrelid = pg_class.oid
+ AND
+ pg_attribute.attnum = any(pg_index.indkey)
+ AND
+ indisprimary
+ ) AS PKs
+ order by
+ decode(md5(PKs.primary_key_column), ''hex'')
+ -- and loop over those PK columns
+ loop
+ _total := _total || ''PK:'' || _pk_desc.primary_key_column || E''\\n'';
+ end loop;
+
+ -- find columns of that table
+ for _column_desc in
+ select *
+ from information_schema.columns cols
+ where
+ cols.table_name = _table_desc.table_name
+ and
+ cols.table_schema = _table_desc.table_schema
+ order by
+ decode(md5(cols.column_name || cols.data_type), ''hex'')
+ -- and loop over those columns
+ loop
+ -- add columns in the format "schema.table.column::data_type"
+ _total := _total || ''COL:''
+ || _column_desc.table_schema || ''.''
+ || _column_desc.table_name || ''.''
+ || _column_desc.column_name || ''::''
+ || _column_desc.udt_name || E''\\n'';
+
+ end loop;
+
+ -- find and loop over CONSTRAINTs of that table
+ for _constraint_def in
+ select * from
+ (select
+ tbl.contype,
+ ''CONSTRAINT:type=''
+ || tbl.contype::TEXT || '':''
+ || replace(pg_catalog.pg_get_constraintdef(tbl.oid, true), '' '', ''_'')
+ || ''::active=''
+ || tbl.convalidated::TEXT
+ as condef
+ from pg_catalog.pg_constraint tbl
+ where
+ tbl.conrelid = (_table_desc.table_schema || ''.'' || _table_desc.table_name)::regclass
+ -- include FKs only because we may have to add/remove
+ -- other (say, check) constraints in a minor release
+ -- for valid reasons which we do not want to affect
+ -- the hash, if however we need to modify a foreign
+ -- key that would, indeed, warrant a hash change
+ AND
+ tbl.contype = ''f''
+ ) as CONSTRAINTs
+ order by
+ CONSTRAINTs.contype,
+ decode(md5(CONSTRAINTs.condef), ''hex'')
+ loop
+ _total := _total || _constraint_def.condef || E''\\n'';
+ end loop;
+
+ end loop; -- over tables
+
+ return _total;
+end;';
+
+select md5(gm.concat_table_structure(%(ver)s::integer)) AS md5;
+"""
+
def database_schema_compatible(link_obj=None, version=None, verbose=True):
expected_hash = known_schema_hashes[version]
if version == 0:
args = {'ver': 9999}
else:
args = {'ver': version}
- rows, idx = run_ro_queries (
- link_obj = link_obj,
- queries = [{
- 'cmd': 'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5',
- 'args': args
- }]
- )
- if rows[0]['md5'] != expected_hash:
- _log.error('database schema version mismatch')
- _log.error('expected: %s (%s)' % (version, expected_hash))
- _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
- if verbose:
- _log.debug('schema dump follows:')
- for line in get_schema_structure(link_obj = link_obj).split():
- _log.debug(line)
- _log.debug('schema revision history dump follows:')
- for line in get_schema_revision_history(link_obj = link_obj):
- _log.debug(' - '.join(line))
- return False
- _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
- return True
+ SQL = 'select md5(gm.concat_table_structure(%(ver)s::integer)) as md5'
+ try:
+ rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': SQL, 'args': args}])
+ except dbapi.errors.AmbiguousFunction as exc:
+ gmConnectionPool.log_pg_exception_details(exc)
+ if not hasattr(exc, 'diag'):
+ raise
+ if 'gm.concat_table_structure_v19_and_up()' not in exc.diag.context:
+ raise
+ rows = None
+ if rows is None:
+ _log.error('gm.concat_table_structure_v19_and_up() failed, retrying with updated function')
+ rows, idx = run_ro_queries(link_obj = link_obj, queries = [{'cmd': SQL__concat_table_structure_v19_and_up, 'args': args}])
+ if rows[0]['md5'] == expected_hash:
+ _log.info('detected schema version [%s], hash [%s]' % (map_schema_hash2version[rows[0]['md5']], rows[0]['md5']))
+ return True
+
+ _log.error('database schema version mismatch')
+ _log.error('expected: %s (%s)' % (version, expected_hash))
+ _log.error('detected: %s (%s)' % (get_schema_version(link_obj=link_obj), rows[0]['md5']))
+ if verbose:
+ _log.debug('schema dump follows:')
+ for line in get_schema_structure(link_obj = link_obj).split():
+ _log.debug(line)
+ _log.debug('schema revision history dump follows:')
+ for line in get_schema_revision_history(link_obj = link_obj):
+ _log.debug(' - '.join(line))
+ return False
#------------------------------------------------------------------------
def get_schema_version(link_obj=None):
@@ -2620,9 +2740,9 @@ SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM (
#test_run_query()
#test_schema_exists()
#test_get_foreign_key_names()
- test_row_locks()
+ #test_row_locks()
#test_faulty_SQL()
#test_log_settings()
- #test_get_db_fingerprint()
+ test_get_db_fingerprint()
# ======================================================================
=====================================
server/pycommon/gmTools.py
=====================================
@@ -690,10 +690,10 @@ def old_unicode_csv_reader(unicode_csv_data, dialect=csv.excel, encoding='utf-8'
if is_dict_reader is not True:
raise KeyError
kwargs['restkey'] = default_csv_reader_rest_key
- csv_reader = csv.DictReader(unicode2charset_encoder(unicode_csv_data), dialect=dialect, **kwargs)
+ csv_reader = csv.DictReader(old_unicode2charset_encoder(unicode_csv_data), dialect=dialect, **kwargs)
except KeyError:
is_dict_reader = False
- csv_reader = csv.reader(unicode2charset_encoder(unicode_csv_data), dialect=dialect, **kwargs)
+ csv_reader = csv.reader(old_unicode2charset_encoder(unicode_csv_data), dialect=dialect, **kwargs)
for row in csv_reader:
# decode ENCODING back to Unicode, cell by cell:
@@ -826,7 +826,7 @@ def get_unique_filename(prefix=None, suffix=None, tmp_dir=None, include_timestam
def __make_symlink_on_windows(physical_name, link_name):
import ctypes
#windows_create_symlink = ctypes.windll.kernel32.CreateSymbolicLinkW
- kernel32 = ctype.WinDLL('kernel32', use_last_error = True)
+ kernel32 = ctypes.WinDLL('kernel32', use_last_error = True)
windows_create_symlink = kernel32.CreateSymbolicLinkW
windows_create_symlink.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
windows_create_symlink.restype = ctypes.c_ubyte
@@ -1873,6 +1873,7 @@ def get_icon(wx=None):
if found_as is None:
_log.warning('no icon file found, falling back to builtin (ugly) icon')
icon_bmp_data = wx.BitmapFromXPMData(pickle.loads(zlib.decompress(__icon_serpent)))
+ icon = wx.Icon()
icon.CopyFromBitmap(icon_bmp_data)
else:
_log.debug('icon found in [%s]', found_as)
=====================================
server/sql/v21-v22/fixups/v22-gm-concat_table_structure_v19_and_up-fixup.sql
=====================================
@@ -0,0 +1,132 @@
+-- ==============================================================
+-- GNUmed database schema change script
+--
+-- License: GPL v2 or later
+-- Author: karsten.hilbert at gmx.net
+--
+-- ==============================================================
+-- force terminate + exit(3) on errors if non-interactive
+\set ON_ERROR_STOP 1
+
+--set default_transaction_read_only to off;
+set check_function_bodies to on;
+
+-- --------------------------------------------------------------
+create or replace function gm.concat_table_structure_v19_and_up()
+ returns text
+ language 'plpgsql'
+ security definer
+ as '
+declare
+ _table_desc record;
+ _pk_desc record;
+ _column_desc record;
+ _constraint_def record;
+ _total text;
+begin
+ _total := '''';
+
+ -- find relevant tables
+ for _table_desc in
+ select * from information_schema.tables tabs where
+ tabs.table_schema in (''dem'', ''clin'', ''blobs'', ''cfg'', ''ref'', ''i18n'', ''bill'')
+ and
+ tabs.table_type = ''BASE TABLE''
+ order by
+ decode(md5(tabs.table_schema || tabs.table_name), ''hex'')
+
+ -- loop over tables
+ loop
+ -- where are we at ?
+ _total := _total || ''TABLE:'' || _table_desc.table_schema || ''.'' || _table_desc.table_name || E''\n'';
+
+ -- find PKs of that table
+ for _pk_desc in
+ select * from (
+ select
+ pg_class.oid::regclass || ''.'' || pg_attribute.attname || ''::'' || format_type(pg_attribute.atttypid, pg_attribute.atttypmod) AS primary_key_column
+ from
+ pg_index, pg_class, pg_attribute
+ where
+ --pg_class.oid = ''TABLENAME''::regclass
+ pg_class.oid = (_table_desc.table_schema || ''.'' || _table_desc.table_name)::regclass
+ AND
+ indrelid = pg_class.oid
+ AND
+ pg_attribute.attrelid = pg_class.oid
+ AND
+ pg_attribute.attnum = any(pg_index.indkey)
+ AND
+ indisprimary
+ ) AS PKs
+ order by
+ decode(md5(PKs.primary_key_column), ''hex'')
+ -- and loop over those PK columns
+ loop
+ _total := _total || ''PK:'' || _pk_desc.primary_key_column || E''\n'';
+ end loop;
+
+ -- find columns of that table
+ for _column_desc in
+ select *
+ from information_schema.columns cols
+ where
+ cols.table_name = _table_desc.table_name
+ and
+ cols.table_schema = _table_desc.table_schema
+ order by
+ decode(md5(cols.column_name || cols.data_type), ''hex'')
+ -- and loop over those columns
+ loop
+ -- add columns in the format "schema.table.column::data_type"
+ _total := _total || ''COL:''
+ || _column_desc.table_schema || ''.''
+ || _column_desc.table_name || ''.''
+ || _column_desc.column_name || ''::''
+ || _column_desc.udt_name || E''\n'';
+
+ end loop;
+
+ -- find and loop over CONSTRAINTs of that table
+ for _constraint_def in
+ select * from
+ (select
+ tbl.contype,
+ ''CONSTRAINT:type=''
+ || tbl.contype::TEXT || '':''
+ || replace(pg_catalog.pg_get_constraintdef(tbl.oid, true), '' '', ''_'')
+ || ''::active=''
+ || tbl.convalidated::TEXT
+ as condef
+ from pg_catalog.pg_constraint tbl
+ where
+ tbl.conrelid = (_table_desc.table_schema || ''.'' || _table_desc.table_name)::regclass
+ -- include FKs only because we may have to add/remove
+ -- other (say, check) constraints in a minor release
+ -- for valid reasons which we do not want to affect
+ -- the hash, if however we need to modify a foreign
+ -- key that would, indeed, warrant a hash change
+ AND
+ tbl.contype = ''f''
+ ) as CONSTRAINTs
+ order by
+ CONSTRAINTs.contype,
+ decode(md5(CONSTRAINTs.condef), ''hex'')
+ loop
+ _total := _total || _constraint_def.condef || E''\n'';
+ end loop;
+
+ end loop; -- over tables
+
+ return _total;
+end;
+';
+
+comment on function gm.concat_table_structure_v19_and_up() is
+ 'new concat_table_structure() starting with gnumed_v19,
+ works on dem, clin, blobs, cfg, ref, i18n, bill,
+ includes primary keys and constraints,
+ sorts properly by bytea';
+
+-- ==============================================================
+select gm.log_script_insertion('v22-gm-concat_table_structure_v19_and_up-fixup.sql', '22.18');
=====================================
server/sql/v21-v22/fixups/v22-release_notes-fixup.sql
=====================================
@@ -17,19 +17,26 @@ INSERT INTO dem.message_inbox (
) VALUES (
(select pk from dem.staff where db_user = 'any-doc'),
(select pk_type from dem.v_inbox_item_type where type = 'memo' and category = 'administrative'),
- 'Release Notes for GNUmed 1.8.7 (database v22.17)',
- 'GNUmed 1.8.7 Release Notes:
+ 'Release Notes for GNUmed 1.8.8 (database v22.18)',
+ 'GNUmed 1.8.8 Release Notes:
- 1.8.7
+ 1.8.8
-FIX: export area: dumping encrypted/PDFed image to disk
-FIX: top panel: heart rate display
-FIX: paperwork: recalls list LaTeX template
+IMPROVED: PACS: better image/image buttons placement
- 22.17
+FIX: py3.10+ *requires* ints for rescaling images [thanks henrique]
+FIX: patient tags: do not crash when rescaling image fails [thanks henrique]
+FIX: fix a number of errors found by pyflakes3
+FIX: lists: no more wx.LIST_HITTEST_ONITEMRIGHT in wxPython 4.2 [thanks jonas]
+FIX: date/time input: exception on entering "0"
-FIX: CREATE FUNCTION ... RETURNS OPAQUE -> TRIGGER [thanks SantyCW at es_AR]
+ 22.18
+
+IMPROVED: bootstrapper: schema "public" permissions and ownership as per PG 15
+IMPROVED: backup: avoid unnecessary recompression
+
+FIX: bootstrapper: schema hash function in v19+ databases
');
-- --------------------------------------------------------------
-select gm.log_script_insertion('v22-release_notes-fixup.sql', '22.17 at 1.8.7');
+select gm.log_script_insertion('v22-release_notes-fixup.sql', '22.18 at 1.8.8');
View it on GitLab: https://salsa.debian.org/med-team/gnumed-server/-/commit/c1271f1d42cb3d601741c8d7b048589d7984e93a
--
View it on GitLab: https://salsa.debian.org/med-team/gnumed-server/-/commit/c1271f1d42cb3d601741c8d7b048589d7984e93a
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20230104/8977e9c5/attachment-0001.htm>
More information about the debian-med-commit
mailing list