[med-svn] [gnumed-server] 01/03: New upstream version 21.14
Andreas Tille
tille at debian.org
Tue Sep 12 14:44:44 UTC 2017
This is an automated email from the git hooks/post-receive script.
tille pushed a commit to branch master
in repository gnumed-server.
commit c270004c84fc413676f6ed3a5bf3f1560cd0ab63
Author: Andreas Tille <tille at debian.org>
Date: Tue Sep 12 16:31:37 2017 +0200
New upstream version 21.14
---
server/bootstrap/bootstrap_gm_db_system.py | 151 ++++++----
server/doc/schema/gnumed-entire_schema.html | 2 +-
server/pycommon/gmPG2.py | 330 +++++++++++++++------
server/pycommon/gmPsql.py | 61 ++--
server/pycommon/x-test-default_ro.py | 65 ++++
server/pycommon/x-test-psy.py | 203 +++++++++++++
.../v20-v21/dynamic/v21-release_notes-dynamic.sql | 32 +-
7 files changed, 654 insertions(+), 190 deletions(-)
diff --git a/server/bootstrap/bootstrap_gm_db_system.py b/server/bootstrap/bootstrap_gm_db_system.py
index ac3beed..a67a1c0 100755
--- a/server/bootstrap/bootstrap_gm_db_system.py
+++ b/server/bootstrap/bootstrap_gm_db_system.py
@@ -24,6 +24,8 @@ further details.
--quiet
--log-file=
--conf-file=
+
+Requires psycopg 2.7 !
"""
#==================================================================
# TODO
@@ -36,7 +38,14 @@ __author__ = "Karsten.Hilbert at gmx.net"
__license__ = "GPL v2 or later"
# standard library
-import sys, string, os.path, fileinput, os, time, getpass, glob, re as regex, tempfile
+import sys
+import os.path
+import fileinput
+import os
+import getpass
+import glob
+import re as regex
+import tempfile
import io
import logging
@@ -239,7 +248,7 @@ def create_db_group(cursor=None, group=None):
return True
#==================================================================
-def connect(host, port, db, user, passwd):
+def connect(host, port, db, user, passwd, conn_name=None):
"""
This is a wrapper to the database connect function.
Will try to recover gracefully from connection errors where possible
@@ -259,7 +268,7 @@ def connect(host, port, db, user, passwd):
dsn = gmPG2.make_psycopg2_dsn(database=db, host=host, port=port, user=user, password=passwd)
_log.info("trying DB connection to %s on %s as %s", db, host or 'localhost', user)
try:
- conn = gmPG2.get_connection(dsn=dsn, readonly=False, pooled=False, verbose=True)
+ conn = gmPG2.get_connection(dsn=dsn, readonly=False, pooled=False, verbose=True, connection_name = conn_name)
except:
_log.exception(u'connection failed')
raise
@@ -270,6 +279,7 @@ def connect(host, port, db, user, passwd):
_log.info('successfully connected')
return conn
+
#==================================================================
class user:
def __init__(self, anAlias = None, aPassword = None):
@@ -372,7 +382,7 @@ class db_server:
if self.conn.closed == 0:
self.conn.close()
- self.conn = connect(self.name, self.port, self.template_db, self.superuser.name, self.superuser.password)
+ self.conn = connect(self.name, self.port, self.template_db, self.superuser.name, self.superuser.password, conn_name = u'root at template.server')
if self.conn is None:
_log.error('Cannot connect.')
return None
@@ -578,6 +588,7 @@ class database:
return None
#--------------------------------------------------------------
def __bootstrap(self):
+
global _dbowner
# get owner
@@ -654,6 +665,7 @@ class database:
#self.conn.close()
return True
+
#--------------------------------------------------------------
def __connect_superuser_to_template(self):
if self.conn is not None:
@@ -665,7 +677,8 @@ class database:
self.server.port,
self.template_db,
self.server.superuser.name,
- self.server.superuser.password
+ self.server.superuser.password,
+ conn_name = u'postgres at template.db'
)
self.conn.cookie = 'database.__connect_superuser_to_template'
@@ -675,6 +688,7 @@ class database:
curs.close()
return self.conn and 1
+
#--------------------------------------------------------------
def __connect_superuser_to_db(self):
if self.conn is not None:
@@ -686,7 +700,8 @@ class database:
self.server.port,
self.name,
self.server.superuser.name,
- self.server.superuser.password
+ self.server.superuser.password,
+ conn_name = u'postgres at gnumed_vX'
)
self.conn.cookie = 'database.__connect_superuser_to_db'
@@ -749,7 +764,8 @@ class database:
return self.conn and 1
#--------------------------------------------------------------
def __db_exists(self):
- cmd = "BEGIN; SELECT datname FROM pg_database WHERE datname='%s'" % self.name
+ #cmd = "BEGIN; SELECT datname FROM pg_database WHERE datname='%s'" % self.name
+ cmd = "SELECT datname FROM pg_database WHERE datname='%s'" % self.name
aCursor = self.conn.cursor()
try:
@@ -790,16 +806,23 @@ class database:
print_msg("==> dropping pre-existing target database [%s] ..." % self.name)
_log.info('trying to drop target database')
cmd = 'DROP DATABASE "%s"' % self.name
- self.conn.set_isolation_level(0)
+ _log.debug('committing existing connection before setting autocommit')
+ self.conn.commit()
+ _log.debug('setting autocommit to TRUE')
+ self.conn.autocommit = True
+ self.conn.readonly = False
cursor = self.conn.cursor()
try:
+ cursor.execute(u'SET default_transaction_read_only TO OFF')
+ _log.debug('running SQL: %s', cmd)
cursor.execute(cmd)
except:
_log.exception(">>>[%s]<<< failed" % cmd)
- cursor.close()
+ _log.debug(u'conn state after failed DROP: %s', gmPG2.capture_conn_state(self.conn))
return False
- cursor.close()
- self.conn.commit()
+ finally:
+ cursor.close()
+ self.conn.set_session(readonly = False, autocommit = False)
else:
use_existing = bool(int(cfg_get(self.section, 'use existing target database')))
if use_existing:
@@ -828,35 +851,36 @@ class database:
tablespace = '%s'
;""" % (self.name, self.owner.name, self.template_db, tablespace)
- # create DB must be run outside transactions
- old_iso = self.conn.isolation_level
- self.conn.set_isolation_level(0)
- cursor = self.conn.cursor()
-
# get size
+ cursor = self.conn.cursor()
size_cmd = "SELECT pg_size_pretty(pg_database_size('%s'))" % self.template_db
cursor.execute(size_cmd)
size = cursor.fetchone()[0]
+ cursor.close()
# create database by cloning
print_msg("==> cloning [%s] (%s) as target database [%s] ..." % (self.template_db, size, self.name))
+ # create DB must be run outside transactions
+ self.conn.commit()
+ self.conn.autocommit = True
+ self.conn.readonly = False
+ cursor = self.conn.cursor()
try:
+ cursor.execute(u'SET default_transaction_read_only TO OFF')
cursor.execute(create_db_cmd)
except:
_log.exception(">>>[%s]<<< failed" % create_db_cmd)
- cursor.close()
- self.conn.set_isolation_level(old_iso)
return False
- cursor.close()
-
- self.conn.commit()
- self.conn.set_isolation_level(old_iso)
+ finally:
+ cursor.close()
+ self.conn.set_session(readonly = False, autocommit = False)
if not self.__db_exists():
return None
_log.info("Successfully created GNUmed database [%s]." % self.name)
return True
+
#--------------------------------------------------------------
def check_data_plausibility(self):
@@ -905,8 +929,8 @@ class database:
try:
tag, old_query = check_def.split('::::')
except:
- _log.exception('error in plausibility check, aborting')
- _log.error('check definition: %s', check_def)
+ _log.exception(u'error in plausibility check, aborting')
+ _log.error(u'check definition: %s', check_def)
print_msg(" ... failed (check definition error)")
all_tests_successful = False
continue
@@ -919,8 +943,8 @@ class database:
)
old_val = rows[0][0]
except:
- _log.exception('error in plausibility check [%s] (old), aborting' % tag)
- _log.error('SQL: %s', old_query)
+ _log.exception(u'error in plausibility check [%s] (old), aborting' % tag)
+ _log.error(u'SQL: %s', old_query)
print_msg(" ... failed (SQL error)")
all_tests_successful = False
continue
@@ -932,21 +956,21 @@ class database:
)
new_val = rows[0][0]
except:
- _log.exception('error in plausibility check [%s] (new), aborting' % tag)
- _log.error('SQL: %s', new_query)
+ _log.exception(u'error in plausibility check [%s] (new), aborting' % tag)
+ _log.error(u'SQL: %s', new_query)
print_msg(" ... failed (SQL error)")
all_tests_successful = False
continue
if new_val != old_val:
- _log.error('plausibility check [%s] failed, expected [%s], found [%s]' % (tag, old_val, new_val))
- _log.error('SQL (old DB): %s', old_query)
- _log.error('SQL (new DB): %s', new_query)
+ _log.error(u'plausibility check [%s] failed, expected: %s (in old DB), found: %s (in new DB)' % (tag, old_val, new_val))
+ _log.error(u'SQL (old DB): %s', old_query)
+ _log.error(u'SQL (new DB): %s', new_query)
print_msg(" ... failed (data error, check [%s])" % tag)
all_tests_successful = False
continue
- _log.info('plausibility check [%s] succeeded' % tag)
+ _log.info(u'plausibility check [%s] succeeded' % tag)
template_conn.close()
target_conn.close()
@@ -1028,8 +1052,10 @@ class database:
print_msg('')
print_msg(' http://wiki.gnumed.de/bin/view/Gnumed/ConfigurePostgreSQL')
print_msg('')
+
#--------------------------------------------------------------
def import_data(self):
+
print_msg("==> upgrading reference data sets ...")
import_scripts = cfg_get(self.section, "data import scripts")
@@ -1073,7 +1099,9 @@ class database:
#--------------------------------------------------------------
def verify_result_hash(self):
+
print_msg("==> verifying target database schema ...")
+
target_version = cfg_get(self.section, 'target version')
if target_version == 'devel':
print_msg(" ... skipped (devel version)")
@@ -1096,32 +1124,46 @@ class database:
def reindex_all(self):
print_msg("==> reindexing target database (can take a while) ...")
+
+ do_reindex = cfg_get(self.section, 'reindex')
+ if do_reindex is None:
+ do_reindex = True
+ else:
+ do_reindex = (int(do_reindex) == 1)
+ if not do_reindex:
+ _log.warning('skipping REINDEXing')
+ print_msg(" ... skipped")
+ return True
+
_log.info('REINDEXing cloned target database so upgrade does not fail in case of a broken index')
_log.info('this may potentially take "quite a long time" depending on how much data there is in the database')
_log.info('you may want to monitor the PostgreSQL log for signs of progress')
- old_iso = self.conn.isolation_level
- self.conn.set_isolation_level(0)
- curs = self.conn.cursor()
+ self.conn.commit()
+ self.conn.set_session(readonly = False, autocommit = True)
+ curs_outer = self.conn.cursor()
+ curs_outer.execute(u'SET default_transaction_read_only TO OFF')
cmd = 'REINDEX (VERBOSE) DATABASE %s' % self.name
try:
- curs.execute(cmd)
+ curs_outer.execute(cmd)
except:
_log.exception(">>>[%s]<<< failed" % cmd)
- curs.close()
# re-attempt w/o VERBOSE
_log.info('attempting REINDEXing without VERBOSE')
- curs = self.conn.cursor()
+ curs_inner = self.conn.cursor()
cmd = 'REINDEX DATABASE %s' % self.name
try:
- curs.execute(cmd)
+ curs_inner.execute(cmd)
except:
_log.exception(">>>[%s]<<< failed" % cmd)
- curs.close()
- self.conn.set_isolation_level(old_iso)
return False
- curs.close()
- self.conn.set_isolation_level(old_iso)
+ finally:
+ curs_inner.close()
+ self.conn.set_session(readonly = False, autocommit = False)
+ finally:
+ curs_outer.close()
+ self.conn.set_session(readonly = False, autocommit = False)
+
return True
#--------------------------------------------------------------
@@ -1335,6 +1377,7 @@ class database:
self.conn.commit()
return True
+
#==================================================================
class gmBundle:
def __init__(self, aBundleAlias = None):
@@ -1494,6 +1537,7 @@ def ask_for_confirmation():
else:
return None
return True
+
#--------------------------------------------------------------
def _import_schema (group=None, schema_opt="schema", conn=None):
# load schema
@@ -1517,14 +1561,21 @@ def _import_schema (group=None, schema_opt="schema", conn=None):
# and import them
psql = gmPsql.Psql(conn)
- for file in schema_files:
- the_file = os.path.join(schema_base_dir, file)
- if psql.run(the_file) == 0:
- _log.info('successfully imported [%s]' % the_file)
- else:
- _log.error('failed to import [%s]' % the_file)
- return None
+ for filename in schema_files:
+ if filename.strip() == u'':
+ continue # skip empty line
+ if filename.startswith(u'# '):
+ _log.info(filename) # log as comment
+ continue
+ full_path = os.path.join(schema_base_dir, filename)
+ if psql.run(full_path) == 0:
+ #_log.info('success')
+ continue
+ _log.error(u'failure')
+ return None
+
return True
+
#------------------------------------------------------------------
def exit_with_msg(aMsg = None):
if aMsg is not None:
diff --git a/server/doc/schema/gnumed-entire_schema.html b/server/doc/schema/gnumed-entire_schema.html
index 581eaa2..6a174d8 100644
--- a/server/doc/schema/gnumed-entire_schema.html
+++ b/server/doc/schema/gnumed-entire_schema.html
@@ -112,7 +112,7 @@
<body>
<!-- Primary Index -->
- <p><br><br>Dumped on 2017-05-14</p>
+ <p><br><br>Dumped on 2017-08-31</p>
<h1><a name="index">Index of database - gnumed_v21</a></h1>
<ul>
diff --git a/server/pycommon/gmPG2.py b/server/pycommon/gmPG2.py
index 4eb2bd6..36d2f87 100644
--- a/server/pycommon/gmPG2.py
+++ b/server/pycommon/gmPG2.py
@@ -35,7 +35,7 @@ from Gnumed.pycommon import gmDateTime
from Gnumed.pycommon import gmBorg
from Gnumed.pycommon import gmI18N
from Gnumed.pycommon import gmLog2
-from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character
+from Gnumed.pycommon.gmTools import prompted_input, u_replacement_character, format_dict_like
_log = logging.getLogger('gm.db')
@@ -169,6 +169,30 @@ map_client_branch2required_db_version = {
u'1.6': 21
}
+map_psyco_tx_status2str = [
+ u'TRANSACTION_STATUS_IDLE',
+ u'TRANSACTION_STATUS_ACTIVE',
+ u'TRANSACTION_STATUS_INTRANS',
+ u'TRANSACTION_STATUS_INERROR',
+ u'TRANSACTION_STATUS_UNKNOWN'
+]
+
+map_psyco_conn_status2str = [
+ u'0 - ?',
+ u'STATUS_READY',
+ u'STATUS_BEGIN_ALIAS_IN_TRANSACTION',
+ u'STATUS_PREPARED'
+]
+
+map_psyco_iso_level2str = {
+ None: u'ISOLATION_LEVEL_DEFAULT (configured on server)',
+ 0: u'ISOLATION_LEVEL_AUTOCOMMIT',
+ 1: u'ISOLATION_LEVEL_READ_UNCOMMITTED',
+ 2: u'ISOLATION_LEVEL_REPEATABLE_READ',
+ 3: u'ISOLATION_LEVEL_SERIALIZABLE',
+ 4: u'ISOLATION_LEVEL_READ_UNCOMMITTED'
+}
+
# get columns and data types for a given table
query_table_col_defs = u"""select
cols.column_name,
@@ -442,6 +466,7 @@ def __request_login_params_tui():
raise gmExceptions.ConnectionError(_("Cannot connect to database without login information!"))
return login
+
#---------------------------------------------------
def __request_login_params_gui_wx():
"""GUI (wx) input request for database login parameters.
@@ -511,6 +536,7 @@ def make_psycopg2_dsn(database=None, host=None, port=5432, user=None, password=N
dsn_parts.append('password=%s' % password)
dsn_parts.append('sslmode=prefer')
+ dsn_parts.append('fallback_application_name=GNUmed')
return ' '.join(dsn_parts)
@@ -878,10 +904,13 @@ def delete_translation_from_database(link_obj=None, language=None, original=None
return True
#------------------------------------------------------------------------
-def update_translation_in_database(language=None, original=None, translation=None):
- cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)'
+def update_translation_in_database(language=None, original=None, translation=None, link_obj=None):
+ if language is None:
+ cmd = u'SELECT i18n.upd_tx(%(orig)s, %(trans)s)'
+ else:
+ cmd = u'SELECT i18n.upd_tx(%(lang)s, %(orig)s, %(trans)s)'
args = {'lang': language, 'orig': original, 'trans': translation}
- run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False)
+ run_rw_queries(queries = [{'cmd': cmd, 'args': args}], return_data = False, link_obj = link_obj)
return args
#------------------------------------------------------------------------
@@ -1022,10 +1051,12 @@ def force_user_language(language=None):
def send_maintenance_notification():
cmd = u'notify "db_maintenance_warning"'
run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
+
#------------------------------------------------------------------------
def send_maintenance_shutdown():
cmd = u'notify "db_maintenance_disconnect"'
run_rw_queries(queries = [{'cmd': cmd}], return_data = False)
+
#------------------------------------------------------------------------
def is_pg_interval(candidate=None):
cmd = u'SELECT %(candidate)s::interval'
@@ -1161,15 +1192,6 @@ def bytea2file_object(data_query=None, file_obj=None, chunk_size=0, data_size=No
needed_chunks, remainder = divmod(data_size, chunk_size)
_log.debug('# of chunks: %s; remainder: %s bytes', needed_chunks, remainder)
-# # since we now require PG 9.1 we can disable this workaround:
-# # try setting "bytea_output"
-# # - fails if not necessary
-# # - succeeds if necessary
-# try:
-# run_ro_queries(link_obj = conn, queries = [{'cmd': u"set bytea_output to 'escape'"}])
-# except dbapi.ProgrammingError:
-# _log.debug('failed to set bytea_output to "escape", not necessary')
-
# retrieve chunks, skipped if data size < chunk size,
# does this not carry the danger of cutting up multi-byte escape sequences ?
# no, since bytea is binary,
@@ -1468,6 +1490,27 @@ def file2bytea_overlay(query=None, args=None, filename=None, conn=None, md5_quer
_log.error('MD5 sums of data file and database BYTEA field do not match: [file::%s] <> [DB::%s]', file_md5, db_md5)
return False
+#---------------------------------------------------------------------------
+def run_sql_script(sql_script, conn=None):
+
+ if conn is None:
+ conn = get_connection(readonly = False)
+
+ from Gnumed.pycommon import gmPsql
+ psql = gmPsql.Psql(conn)
+
+ if psql.run(sql_script) == 0:
+ query = {
+ 'cmd': u'select gm.log_script_insertion(%(name)s, %(ver)s)',
+ 'args': {'name': sql_script, 'ver': u'current'}
+ }
+ run_rw_queries(link_obj = conn, queries = [query])
+ conn.commit()
+ return True
+
+ _log.error('error running sql script: %s', sql_script)
+ return False
+
#------------------------------------------------------------------------
def sanitize_pg_regex(expression=None, escape_all=False):
"""Escape input for use in a PostgreSQL regular expression.
@@ -1496,6 +1539,56 @@ def sanitize_pg_regex(expression=None, escape_all=False):
#']', '\]', # not needed
#------------------------------------------------------------------------
+def capture_conn_state(conn=None):
+
+ tx_status = conn.get_transaction_status()
+ if tx_status in [ psycopg2.extensions.TRANSACTION_STATUS_INERROR, psycopg2.extensions.TRANSACTION_STATUS_UNKNOWN ]:
+ isolation_level = u'%s (tx aborted or unknown, cannot retrieve)' % conn.isolation_level
+ else:
+ isolation_level = u'%s (%s)' % (conn.isolation_level, map_psyco_iso_level2str[conn.isolation_level])
+ conn_status = u'%s (%s)' % (conn.status, map_psyco_conn_status2str[conn.status])
+ if conn.closed != 0:
+ conn_status = u'undefined (%s)' % conn_status
+
+ d = {
+ u'identity': id(conn),
+ u'backend PID': conn.get_backend_pid(),
+ u'protocol version': conn.protocol_version,
+ u'encoding': conn.encoding,
+ u'closed': conn.closed,
+ u'readonly': conn.readonly,
+ u'autocommit': conn.autocommit,
+ u'isolation level (psyco)': isolation_level,
+ u'async': conn.async,
+ u'deferrable': conn.deferrable,
+ u'transaction status': u'%s (%s)' % (tx_status, map_psyco_tx_status2str[tx_status]),
+ u'connection status': conn_status,
+ u'executing async op': conn.isexecuting(),
+ u'type': type(conn)
+ }
+ return u'%s\n' % conn + format_dict_like (
+ d,
+ relevant_keys = [
+ u'type',
+ u'identity',
+ u'backend PID',
+ u'protocol version',
+ u'encoding',
+ u'isolation level (psyco)',
+ u'readonly',
+ u'autocommit',
+ u'closed',
+ u'connection status',
+ u'transaction status',
+ u'deferrable',
+ u'async',
+ u'executing async op'
+ ],
+ tabular = True,
+ value_delimiters = None
+ )
+
+#------------------------------------------------------------------------
def capture_cursor_state(cursor=None):
conn = cursor.connection
@@ -1505,6 +1598,11 @@ def capture_cursor_state(cursor=None):
else:
isolation_level = conn.isolation_level
+ if cursor.query is None:
+ query = u'<no query>'
+ else:
+ query = unicode(cursor.query, 'utf8', 'replace')
+
txt = u"""Link state:
Cursor
identity: %s; name: %s
@@ -1514,7 +1612,7 @@ Cursor
statusmessage: %s
Connection
identity: %s; backend pid: %s; protocol version: %s;
- closed: %s; autocommit: %s; isolation level: %s; encoding: %s; async: %s;
+ closed: %s; autocommit: %s; isolation level: %s; encoding: %s; async: %s; deferrable: %s; readonly: %s;
TX status: %s; CX status: %s; executing async op: %s;
Query
%s
@@ -1540,11 +1638,13 @@ Query
isolation_level,
conn.encoding,
conn.async,
- tx_status,
- conn.status,
+ conn.deferrable,
+ conn.readonly,
+ map_psyco_tx_status2str[tx_status],
+ map_psyco_conn_status2str[conn.status],
conn.isexecuting(),
- unicode(cursor.query, 'utf8', 'replace'),
+ query
)
return txt
@@ -1602,6 +1702,14 @@ def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True,
except dbapi.Error as pg_exc:
_log.error('query failed in RO connection')
_log.error(capture_cursor_state(curs))
+ if hasattr(pg_exc, 'diag'):
+ for prop in dir(pg_exc.diag):
+ if prop.startswith(u'__'):
+ continue
+ val = getattr(pg_exc.diag, prop)
+ if val is None:
+ continue
+ _log.error(u'PG diags %s: %s', prop, val)
pg_exc = make_pg_exception_fields_unicode(pg_exc)
_log.error('PG error code: %s', pg_exc.pgcode)
if pg_exc.pgerror is not None:
@@ -1650,6 +1758,9 @@ def run_ro_queries(link_obj=None, queries=None, verbose=False, return_data=True,
col_idx = get_col_indices(curs)
curs_close()
+ # so we can see data committed meanwhile if the
+ # link object had been passed in and thusly might
+ # be part of a long-running read-only transaction
readonly_rollback_just_in_case()
return (data, col_idx)
@@ -1748,6 +1859,14 @@ def run_rw_queries(link_obj=None, queries=None, end_tx=False, return_data=None,
except dbapi.Error as pg_exc:
_log.error('query failed in RW connection')
_log.error(capture_cursor_state(curs))
+ if hasattr(pg_exc, 'diag'):
+ for prop in dir(pg_exc.diag):
+ if prop.startswith(u'__'):
+ continue
+ val = getattr(pg_exc.diag, prop)
+ if val is None:
+ continue
+ _log.error(u'PG diags %s: %s', prop, val)
for notice in notices_accessor.notices:
_log.error(notice.strip(u'\n').strip(u'\r'))
del notices_accessor.notices[:]
@@ -1906,7 +2025,7 @@ class cConnectionPool(psycopg2.pool.PersistentConnectionPool):
self._used[conn_key].original_close()
# -----------------------------------------------------------------------
-def get_raw_connection(dsn=None, verbose=False, readonly=True):
+def get_raw_connection(dsn=None, verbose=False, readonly=True, connection_name=None, autocommit=False):
"""Get a raw, unadorned connection.
- this will not set any parameters such as encoding, timezone, datestyle
@@ -1921,9 +2040,21 @@ def get_raw_connection(dsn=None, verbose=False, readonly=True):
if u'host=salaam.homeunix' in dsn:
raise ValueError('The public database is not hosted by <salaam.homeunix.com> anymore.\n\nPlease point your configuration files to <publicdb.gnumed.de>.')
+ # try to enforce a useful encoding early on so that we
+ # have a good chance of decoding authentication errors
+ # containing foreign language characters
+ if u' client_encoding=' not in dsn:
+ dsn += u' client_encoding=utf8'
+
+ if connection_name is None:
+ if u' application_name' not in dsn:
+ dsn += u" application_name=GNUmed"
+ else:
+ if u' application_name' not in dsn:
+ dsn += u" application_name=%s" % connection_name
+
try:
conn = dbapi.connect(dsn=dsn, connection_factory=psycopg2.extras.DictConnection)
- #conn = dbapi.connect(dsn=dsn, cursor_factory=psycopg2.extras.RealDictCursor)
except dbapi.OperationalError, e:
t, v, tb = sys.exc_info()
@@ -1932,22 +2063,27 @@ def get_raw_connection(dsn=None, verbose=False, readonly=True):
except (AttributeError, IndexError, TypeError):
raise
- msg = unicode(msg, gmI18N.get_encoding(), 'replace')
+ #msg = unicode(msg, gmI18N.get_encoding(), 'replace')
+ msg = unicode(msg, u'utf8', 'replace')
- if msg.find('fe_sendauth') != -1:
+ if u'fe_sendauth' in msg:
raise cAuthenticationError, (dsn, msg), tb
if regex.search('user ".*" does not exist', msg) is not None:
raise cAuthenticationError, (dsn, msg), tb
- if msg.find('uthenti') != -1:
+ if u'uthenti' in msg:
raise cAuthenticationError, (dsn, msg), tb
raise
- _log.debug('new database connection, backend PID: %s, readonly: %s', conn.get_backend_pid(), readonly)
+ if connection_name is None:
+ _log.debug('established anonymous database connection, backend PID: %s', conn.get_backend_pid())
+ else:
+ _log.debug('established database connection "%s", backend PID: %s', connection_name, conn.get_backend_pid())
- # do first-time stuff
+ # do first-connection-only stuff
+ # - verify PG version
global postgresql_version
if postgresql_version is None:
curs = conn.cursor()
@@ -1967,44 +2103,30 @@ def get_raw_connection(dsn=None, verbose=False, readonly=True):
except:
pass
if verbose:
- _log_PG_settings(curs=curs)
+ _log_PG_settings(curs = curs)
curs.close()
conn.commit()
+ # - verify PG understands client time zone
if _default_client_timezone is None:
__detect_client_timezone(conn = conn)
- curs = conn.cursor()
-
- # set access mode
- conn.set_session(readonly = readonly)
- conn.set_session(autocommit = readonly)
- if readonly:
- _log.debug('access mode [READ ONLY]')
- #conn.set_session(readonly = True)
- _log.debug('readonly: autocommit=True to avoid <IDLE IN TRANSACTION>')
-# conn.autocommit = True
-# cmd = 'set session characteristics as transaction READ ONLY'
-# curs.execute(cmd)
-# cmd = 'set default_transaction_read_only to on'
-# curs.execute(cmd)
+ # - set access mode
+ if readonly is True:
+ _log.debug('readonly: forcing autocommit=True to avoid <IDLE IN TRANSACTION>')
+ autocommit = True
else:
- _log.debug('access mode [READ WRITE]')
-# conn.set_session(readonly = False)
- _log.debug('readwrite: autocommit=False')
-# cmd = 'set session characteristics as transaction READ WRITE'
-# curs.execute(cmd)
-# cmd = 'set default_transaction_read_only to off'
-# curs.execute(cmd)
-
- curs.close()
+ _log.debug('autocommit is desired to be: %s', autocommit)
conn.commit()
+ conn.autocommit = autocommit
+ conn.readonly = readonly
conn.is_decorated = False
return conn
+
# =======================================================================
-def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True):
+def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled=True, connection_name=None, autocommit=False):
"""Get a new connection.
This assumes the locale system has been initialized
@@ -2015,15 +2137,20 @@ def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled
if pooled and readonly and (dsn is None):
global __ro_conn_pool
if __ro_conn_pool is None:
+ log_ro_conn = True
__ro_conn_pool = cConnectionPool (
minconn = 1,
maxconn = 2,
dsn = dsn,
verbose = verbose
)
+ else:
+ log_ro_conn = False
conn = __ro_conn_pool.getconn()
+ if log_ro_conn:
+ [ _log.debug(line) for line in capture_conn_state(conn = conn).split(u'\n') ]
else:
- conn = get_raw_connection(dsn=dsn, verbose=verbose, readonly=False)
+ conn = get_raw_connection(dsn = dsn, verbose = verbose, readonly = readonly, connection_name = connection_name, autocommit = autocommit)
if conn.is_decorated:
return conn
@@ -2048,57 +2175,42 @@ def get_connection(dsn=None, readonly=True, encoding=None, verbose=False, pooled
# - transaction isolation level
if readonly:
- # alter-database default, checked at connect, no need to set now
- iso_level = u'read committed'
+ # alter-database default, checked at connect, no need to set here
+ pass
else:
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_SERIALIZABLE)
- iso_level = u'serializable'
-
- _log.debug('client string encoding [%s], isolation level [%s], time zone [%s]', encoding, iso_level, _default_client_timezone)
- curs = conn.cursor()
+ _log.debug('client time zone [%s]', _default_client_timezone)
# - client time zone
+ curs = conn.cursor()
curs.execute(_sql_set_timezone, [_default_client_timezone])
-
- conn.commit()
-
-# # FIXME: remove this whole affair once either 9.0 is standard (Ubuntu 10 LTS is
-# # FIXME: PG 8.4, however!) or else when psycopg2 supports a workaround
-# #
-# # - bytea data format
-# # PG 9.0 switched to - by default - using "hex" rather than "escape",
-# # however, psycopg2's linked with a pre-9.0 libpq do assume "escape"
-# # as the transmission mode for bytea output,
-# # so try to set this setting back to "escape",
-# # if that's not possible the reason will be that PG < 9.0 does not support
-# # that setting - which also means we don't need it and can ignore the
-# # failure
-# cmd = "set bytea_output to 'escape'"
-# try:
-# curs.execute(cmd)
-# except dbapi.ProgrammingError:
-# _log.error('cannot set bytea_output format')
-
curs.close()
conn.commit()
conn.is_decorated = True
+ if verbose:
+ [ _log.debug(line) for line in capture_conn_state(conn = conn).split(u'\n') ]
+
return conn
+
#-----------------------------------------------------------------------
def shutdown():
if __ro_conn_pool is None:
return
__ro_conn_pool.shutdown()
+
# ======================================================================
# internal helpers
#-----------------------------------------------------------------------
def __noop():
pass
+
#-----------------------------------------------------------------------
def _raise_exception_on_ro_conn_close():
raise TypeError(u'close() called on read-only connection')
+
#-----------------------------------------------------------------------
def log_database_access(action=None):
run_insert (
@@ -2107,6 +2219,7 @@ def log_database_access(action=None):
values = {u'user_action': action},
end_tx = True
)
+
#-----------------------------------------------------------------------
def sanity_check_time_skew(tolerance=60):
"""Check server time and local time to be within
@@ -2150,6 +2263,7 @@ def sanity_check_time_skew(tolerance=60):
return False
return True
+
#-----------------------------------------------------------------------
def sanity_check_database_settings():
"""Checks database settings.
@@ -2237,22 +2351,43 @@ def sanity_check_database_settings():
return 1, u'\n'.join(msg)
return 0, u''
+
#------------------------------------------------------------------------
def _log_PG_settings(curs=None):
- # don't use any of the run_*()s since that might
- # create a loop if we fail here
- # FIXME: use pg_settings
+ # don't use any of the run_*()s helper functions
+ # since that might create a loop if we fail here
try:
- curs.execute(u'show all')
+ # .pending_restart does not exist in PG 9.4 yet
+ #curs.execute(u'SELECT name, setting, unit, source, reset_val, sourcefile, sourceline, pending_restart FROM pg_settings')
+ curs.execute(u'SELECT name, setting, unit, source, reset_val, sourcefile, sourceline FROM pg_settings')
except:
- _log.exception(u'cannot log PG settings (>>>show all<<< failed)')
+ _log.exception(u'cannot log PG settings ("SELECT ... FROM pg_settings" failed)')
return False
settings = curs.fetchall()
- if settings is None:
- _log.error(u'cannot log PG settings (>>>show all<<< did not return rows)')
- return False
for setting in settings:
- _log.debug(u'PG option [%s]: %s', setting['name'], setting['setting'])
+ if setting['unit'] is None:
+ unit = u''
+ else:
+ unit = u' %s' % setting['unit']
+ if setting['sourcefile'] is None:
+ sfile = u''
+ else:
+ sfile = u'// %s @ %s' % (setting['sourcefile'], setting['sourceline'])
+# # .pending_restart does not exist in PG 9.4 yet
+# if setting['pending_restart'] is False:
+# pending_restart = u''
+# else:
+# pending_restart = u'// needs restart'
+# _log.debug(u'%s: %s%s (set from: [%s] // sess RESET will set to: [%s]%s%s)',
+ _log.debug(u'%s: %s%s (set from: [%s] // sess RESET will set to: [%s]%s)',
+ setting['name'],
+ setting['setting'],
+ unit,
+ setting['source'],
+ setting['reset_val'],
+# pending_restart,
+ sfile
+ )
try:
curs.execute(u'select pg_available_extensions()')
@@ -2266,7 +2401,19 @@ def _log_PG_settings(curs=None):
for ext in extensions:
_log.debug(u'PG extension: %s', ext['pg_available_extensions'])
+ # not really that useful because:
+ # - clusterwide
+ # - not retained across server restart (fixed in 9.6.1 - really ?)
+# try:
+# curs.execute(u'SELECT pg_last_committed_xact()')
+# except:
+# _log.exception(u'cannot retrieve last committed xact')
+# xact = curs.fetchall()
+# if xact is not None:
+# _log.debug(u'last committed transaction in cluster: %s', xact[0])
+
return True
+
#========================================================================
def make_pg_exception_fields_unicode(exc):
@@ -2286,6 +2433,7 @@ def make_pg_exception_fields_unicode(exc):
exc.u_pgerror = unicode(exc.pgerror, gmI18N.get_encoding(), 'replace').strip().strip(u'\n').strip().strip(u'\n')
return exc
+
#------------------------------------------------------------------------
def extract_msg_from_pg_exception(exc=None):
@@ -2296,6 +2444,7 @@ def extract_msg_from_pg_exception(exc=None):
# assumption
return unicode(msg, gmI18N.get_encoding(), 'replace')
+
# =======================================================================
class cAuthenticationError(dbapi.OperationalError):
@@ -2719,6 +2868,7 @@ if __name__ == "__main__":
#--------------------------------------------------------------------
def test_sanity_check_time_skew():
sanity_check_time_skew()
+
#--------------------------------------------------------------------
def test_get_foreign_key_names():
print get_foreign_key_names (
@@ -2729,6 +2879,7 @@ if __name__ == "__main__":
target_table = u'episode',
target_column = u'pk'
)
+
#--------------------------------------------------------------------
def test_get_foreign_key_details():
for row in get_foreign_keys2column (
@@ -2743,6 +2894,7 @@ if __name__ == "__main__":
row['referenced_table'],
row['referenced_column']
)
+
#--------------------------------------------------------------------
def test_set_user_language():
# (user, language, result, exception type)
@@ -2772,10 +2924,12 @@ if __name__ == "__main__":
print "test:", test
print "expected exception"
print "result:", e
+
#--------------------------------------------------------------------
def test_get_schema_revision_history():
for line in get_schema_revision_history():
print u' - '.join(line)
+
#--------------------------------------------------------------------
def test_run_query():
gmDateTime.init()
@@ -2856,6 +3010,11 @@ SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM (
run_rw_queries(queries = [{'cmd': u'SELEC 1'}])
#--------------------------------------------------------------------
+ def test_log_settings():
+ conn = conn = get_connection()
+ _log_PG_settings(curs = conn.cursor())
+
+ #--------------------------------------------------------------------
# run tests
#test_get_connection()
#test_exceptions()
@@ -2881,5 +3040,6 @@ SELECT to_timestamp (foofoo,'YYMMDD.HH24MI') FROM (
#test_file2bytea_copy_from()
#test_file2bytea_lo()
test_faulty_SQL()
+ #test_log_settings()
# ======================================================================
diff --git a/server/pycommon/gmPsql.py b/server/pycommon/gmPsql.py
index 46df8c1..ad1b963 100644
--- a/server/pycommon/gmPsql.py
+++ b/server/pycommon/gmPsql.py
@@ -41,6 +41,7 @@ class Psql:
"""
self.conn = conn
self.vars = {'ON_ERROR_STOP':None}
+
#---------------------------------------------------------------
def match (self, str):
match = re.match (str, self.line)
@@ -50,6 +51,7 @@ class Psql:
ret = 1
self.groups = match.groups ()
return ret
+
#---------------------------------------------------------------
def fmt_msg(self, aMsg):
try:
@@ -65,6 +67,7 @@ class Psql:
except: pass
unformattable_error_id += 1
return tmp
+
#---------------------------------------------------------------
def run (self, filename):
"""
@@ -89,8 +92,8 @@ class Psql:
in_string = False
bracketlevel = 0
curr_cmd = ''
- curs = self.conn.cursor ()
-## transaction_started = False
+ curs = self.conn.cursor()
+
for self.line in self.file.readlines():
self.lineno += 1
if len(self.line.strip()) == 0:
@@ -100,20 +103,24 @@ class Psql:
if self.match (r"^\\echo (.*)"):
_log.info(self.fmt_msg(shell(self.groups[0])))
continue
+
# \qecho
if self.match (r"^\\qecho (.*)"):
_log.info(self.fmt_msg(shell (self.groups[0])))
continue
+
# \q
if self.match (r"^\\q"):
_log.warning(self.fmt_msg(u"script terminated by \\q"))
return 0
+
# \set
if self.match (r"^\\set (\S+) (\S+)"):
self.vars[self.groups[0]] = shell (self.groups[1])
if self.groups[0] == 'ON_ERROR_STOP':
self.vars['ON_ERROR_STOP'] = int (self.vars['ON_ERROR_STOP'])
continue
+
# \unset
if self.match (r"^\\unset (\S+)"):
self.vars[self.groups[0]] = None
@@ -150,46 +157,26 @@ class Psql:
curr_cmd += this_char
else:
try:
-# if curr_cmd.strip ().upper () == 'COMMIT':
-# if transaction_started:
-# self.conn.commit ()
-# curs.close ()
-# curs = self.conn.cursor ()
-# _log.debug(self.fmt_msg ("transaction committed"))
-# else:
-# _log.warning(self.fmt_msg ("COMMIT without BEGIN: no actual transaction happened!"))
-# transaction_started = False
-
-# elif curr_cmd.strip ().upper () == 'BEGIN':
-# if transaction_started:
-# _log.warning(self.fmt_msg ("BEGIN inside transaction"))
-# else:
-# transaction_started = True
-# _log.debug(self.fmt_msg ("starting transaction"))
-
-# else:
if curr_cmd.strip() != '':
- if curr_cmd.find('vacuum'):
- self.conn.commit();
- curs.close()
- old_iso_level = self.conn.isolation_level
- self.conn.set_isolation_level(0)
- curs = self.conn.cursor()
- curs.execute (curr_cmd)
- self.conn.set_isolation_level(old_iso_level)
- else:
- curs.execute (curr_cmd)
-# if not transaction_started:
- except Exception, error:
- _log.debug(curr_cmd)
+ curs.execute (curr_cmd)
+ except Exception as error:
+ _log.exception(curr_cmd)
if re.match (r"^NOTICE:.*", str(error)):
_log.warning(self.fmt_msg(error))
else:
+ _log.error(self.fmt_msg(error))
+ if hasattr(error, 'diag'):
+ for prop in dir(error.diag):
+ if prop.startswith(u'__'):
+ continue
+ val = getattr(error.diag, prop)
+ if val is None:
+ continue
+ _log.error(u'PG diags %s: %s', prop, val)
if self.vars['ON_ERROR_STOP']:
- _log.error(self.fmt_msg(error))
+ self.conn.commit()
+ curs.close()
return 1
- else:
- _log.debug(self.fmt_msg(error))
self.conn.commit()
curs.close()
@@ -204,6 +191,7 @@ class Psql:
self.conn.commit()
curs.close()
return 0
+
#===================================================================
# testing code
if __name__ == '__main__':
@@ -219,4 +207,3 @@ if __name__ == '__main__':
psql = Psql (conn)
psql.run (sys.argv[1])
conn.close ()
-#===================================================================
diff --git a/server/pycommon/x-test-default_ro.py b/server/pycommon/x-test-default_ro.py
new file mode 100644
index 0000000..ef3df0c
--- /dev/null
+++ b/server/pycommon/x-test-default_ro.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+#
+# please run this script against a database which is configured to be readonly by:
+#
+# alter databaes <NAME> set default_transaction_read_only to on
+#
+# if cmd line argument is "show_problem" -> exhibit the problem
+
+db = u'gnumed_v20' # a database configured "alter database %s set default_transaction_read_only to on"
+user = 'gm-dbo' # a user with CREATE DATABASE powers
+
+#--------------------------------------------------------------------------------
+import sys
+import psycopg2
+
+
+cmd_def_tx_ro = "SELECT upper(source), name, upper(setting) FROM pg_settings WHERE name = 'default_transaction_read_only'"
+cmd_create_db = "create database %s_copy template %s" % (db, db)
+cmd_drop_db = "drop database %s_copy" % db
+
+show_problem = False
+if len(sys.argv) > 1:
+ if sys.argv[1] == 'show_problem':
+ show_problem = True
+
+conn = psycopg2.connect(dbname = db, user = user)
+print 'conn:', conn
+print 'readonly:', conn.readonly
+print 'autocommit:', conn.autocommit
+print 'setting autocommit to False'
+conn.autocommit = False
+print 'autocommit now:', conn.autocommit
+if show_problem:
+ print 'vvvvv this creates the problem vvvvv'
+ print ' setting readonly to False'
+ conn.readonly = False
+ print ' readonly now:', conn.readonly
+ print '^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^'
+print 'setting autocommit to True'
+conn.autocommit = True
+print 'autocommit now:', conn.autocommit
+print 'setting readonly to False'
+conn.readonly = False
+print 'readonly now:', conn.readonly
+curs = conn.cursor()
+curs.execute(cmd_def_tx_ro)
+print 'querying DEFAULT_TRANSACTION_READ_ONLY state (should show "ON")'
+print curs.fetchall()
+curs.close()
+conn.commit()
+print 'the following SQL will fail:', cmd_create_db
+print '(note that the transaction being talked about is implicit to PostgreSQL, due to autocommit mode)'
+curs = conn.cursor()
+try:
+ curs.execute(cmd_create_db)
+ curs.execute(cmd_drop_db)
+except psycopg2.InternalError as ex:
+ print 'SQL failed:'
+ print ex
+
+print 'shutting down'
+
+curs.close()
+conn.rollback()
+conn.close()
diff --git a/server/pycommon/x-test-psy.py b/server/pycommon/x-test-psy.py
new file mode 100644
index 0000000..f030df2
--- /dev/null
+++ b/server/pycommon/x-test-psy.py
@@ -0,0 +1,203 @@
+
+db = u'gnumed_v20' # a database configured "alter database %s set default_transaction_read_only to on"
+user = 'gm-dbo' # a user with CREATE DATABASE powers
+
+
+
+cmd_def_tx_ro = "SELECT upper(source), name, upper(setting) FROM pg_settings WHERE name = 'default_transaction_read_only'"
+cmd_create_db = "create database %s_copy template %s" % (db, db)
+
+
+import sys
+import psycopg2
+
+
+conn = psycopg2.connect(dbname = db, user = user)
+print 'readonly:', conn.readonly
+print 'autocommit:', conn.autocommit
+conn.readonly = False
+print 'readonly now:', conn.readonly
+#curs = conn.cursor()
+#curs.execute(cmd_def_tx_ro)
+#print 'should show DEFAULT_TRANSACTION_READ_ONLY set to ON'
+#print curs.fetchall()
+#curs.close()
+#conn.commit()
+conn.autocommit = True
+print 'readonly:', conn.readonly
+print 'autocommit:', conn.autocommit
+print 'the following CREATE DATABASE should fail'
+curs = conn.cursor()
+curs.execute(cmd_create_db)
+curs.close()
+conn.rollback()
+conn.close()
+
+sys.exit()
+
+
+
+
+
+curs = conn.cursor()
+#cmd_def_tx_ro = u'show default_transaction_read_only;'
+cmd_def_tx_ro = "SELECT upper(source), name, upper(setting) FROM pg_settings WHERE name = 'default_transaction_read_only'"
+cmd_tx_ro = u'show transaction_read_only;'
+cmd_DEL = u'DELETE FROM dem.identity where pk is NULL'
+
+print conn
+print 'initial RO state:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+
+#print ''
+print 'setting <conn.readonly = False> ...'
+conn.readonly = False
+print 'RO state in same TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+print 'RO state in next TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+
+print ''
+print 'setting <conn.autocommit = True> (conn.readonly still False) ...'
+print '-> means exiting psyco TX handling, needed for some DDL such as CREATE DATABASE ...'
+conn.autocommit = True
+print 'RO state in same TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+print 'RO state in next TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+
+print ''
+print 'setting <conn.autocommit = False> (conn.readonly still False) ...'
+print '-> means exiting psyco TX handling, needed for some DDL such as CREATE DATABASE ...'
+conn.autocommit = False
+print 'RO state in same TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+print 'RO state in same TX:'
+print ' psyco (conn.readonly):', conn.readonly
+print ' psyco (conn.autocommit):', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_def_tx_ro
+curs.execute(cmd_tx_ro)
+print ' PG:', curs.fetchall(), u'- %s' % cmd_tx_ro
+print ' running DELETE:', cmd_DEL
+try:
+ curs.execute(cmd_DEL)
+ print ' success'
+except Exception as e:
+ print ' failed:', e
+conn.commit()
+
+
+
+sys.exit()
+
+
+
+
+
+
+print 'RO state in same TX:'
+print ' psyco - conn.readonly:', conn.readonly
+print ' psyco - conn.autocommit:', conn.autocommit
+curs.execute(cmd_def_tx_ro)
+print ' PG - default_transaction_read_only:', curs.fetchall()
+curs.execute(cmd_tx_ro)
+print ' PG - transaction_read_only:', curs.fetchall()
+conn.commit()
+print 'RO state in next TX:'
+print ' psyco - conn.readonly:', conn.readonly
+curs.execute(cmd_def_tx_ro)
+print ' PG - default_transaction_read_only:', curs.fetchall()
+curs.execute(cmd_tx_ro)
+print ' PG - transaction_read_only:', curs.fetchall()
+conn.commit()
+
+print ''
+
+print 'PG/psyco split brain because of:'
+cmd = "SELECT upper(source), name, upper(setting) FROM pg_settings WHERE name = 'default_transaction_read_only'"
+print ' SQL:', cmd
+curs.execute(cmd)
+print ' PG:', curs.fetchall()
+
+
+
+conn.commit()
+curs.execute(u'DELETE FROM dem.identity where pk is NULL')
+
+
+curs.close()
+conn.commit()
+conn.close()
diff --git a/server/sql/v20-v21/dynamic/v21-release_notes-dynamic.sql b/server/sql/v20-v21/dynamic/v21-release_notes-dynamic.sql
index cce6ee1..253a1d8 100644
--- a/server/sql/v20-v21/dynamic/v21-release_notes-dynamic.sql
+++ b/server/sql/v20-v21/dynamic/v21-release_notes-dynamic.sql
@@ -17,26 +17,24 @@ INSERT INTO dem.message_inbox (
) VALUES (
(select pk from dem.staff where db_user = 'any-doc'),
(select pk_type from dem.v_inbox_item_type where type = 'memo' and category = 'administrative'),
- 'Release Notes for GNUmed 1.6.13 (database v21.13)',
- 'GNUmed 1.6.13 Release Notes:
+ 'Release Notes for GNUmed 1.6.14 (database v21.14)',
+ 'GNUmed 1.6.14 Release Notes:
- 1.6.13
+ 1.6.14
-FIX: editing of drug products
-FIX: formatting of intervals with seconds [thanks Rickard]
-FIX: robustify backend listener against change notification trigger errors
-FIX: backport once-only detection of unicode char selector
-FIX: improper handling of notebook page change events
-FIX: error handling on uploading DICOM to Orthanc
+FIX: exception when having issues with calculating eGFR in medication plugin
+FIX: exception on disabling identity [thanks Marc]
+FIX: exception on adding archived documents to export area
+FIX: Orthanc DICOM patient ID modification
+FIX: faulty file drop target declarations
-IMPROVED: more fully prevent logfile based password leaks
-IMPROVED: add listing of latest vaccination per indication
-IMPROVED: export area change listening and sortability
-IMPROVED: episode edit area behaviour
-IMPROVED: add measurement by clicking empty cell in grid
-
-NEW: add Constans algorithm for upper extremity DVT
+IMPROVED: saving of export area items
+IMPROVED: patient display in provider inbox
+IMPROVED: copy document to export area from document plugin
+IMPROVED: Orthanc modification dialog title
+IMPROVED: imported documents deletion confirmation
+IMPROVED: patient media metadata
');
-- --------------------------------------------------------------
-select gm.log_script_insertion('v21-release_notes-dynamic.sql', '21.13');
+select gm.log_script_insertion('v21-release_notes-dynamic.sql', '21.14');
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/gnumed-server.git
More information about the debian-med-commit
mailing list