[Python-modules-commits] [asyncpg] 01/04: Import asyncpg_0.11.0.orig.tar.gz
Piotr Ożarowski
piotr at moszumanska.debian.org
Tue Jun 20 15:05:39 UTC 2017
This is an automated email from the git hooks/post-receive script.
piotr pushed a commit to branch master
in repository asyncpg.
commit c057c9e082417f1f799e7a7667c8c78e5a33a441
Author: Piotr Ożarowski <piotr at debian.org>
Date: Tue Jun 20 16:37:07 2017 +0200
Import asyncpg_0.11.0.orig.tar.gz
---
Makefile | 16 +-
PKG-INFO | 4 +-
asyncpg.egg-info/PKG-INFO | 4 +-
asyncpg.egg-info/SOURCES.txt | 14 +-
asyncpg/__init__.py | 8 +-
asyncpg/_testbase.py | 84 +-
asyncpg/cluster.py | 19 +-
asyncpg/compat.py | 29 +-
asyncpg/connect_utils.py | 387 +
asyncpg/connection.py | 1238 +-
asyncpg/cursor.py | 10 +-
asyncpg/exceptions/__init__.py | 30 +-
asyncpg/exceptions/_base.py | 54 +-
asyncpg/introspection.py | 2 +-
asyncpg/pool.py | 569 +-
asyncpg/prepared_stmt.py | 36 +-
asyncpg/protocol/__init__.py | 4 +-
asyncpg/protocol/buffer.pxd | 9 +-
asyncpg/protocol/buffer.pyx | 89 +-
asyncpg/protocol/codecs/array.pyx | 310 +-
asyncpg/protocol/codecs/base.pxd | 6 +-
asyncpg/protocol/codecs/base.pyx | 142 +-
asyncpg/protocol/codecs/bits.pyx | 2 +-
asyncpg/protocol/codecs/bytea.pyx | 2 +-
asyncpg/protocol/codecs/datetime.pyx | 2 +-
asyncpg/protocol/codecs/float.pyx | 2 +-
asyncpg/protocol/codecs/geometry.pyx | 2 +-
asyncpg/protocol/codecs/hstore.pyx | 2 +-
asyncpg/protocol/codecs/int.pyx | 2 +-
asyncpg/protocol/codecs/json.pyx | 2 +-
asyncpg/protocol/codecs/misc.pyx | 2 +-
asyncpg/protocol/codecs/money.pyx | 2 +-
asyncpg/protocol/codecs/network.pyx | 2 +-
asyncpg/protocol/codecs/numeric.pyx | 2 +-
asyncpg/protocol/codecs/range.pyx | 2 +-
asyncpg/protocol/codecs/record.pyx | 2 +-
asyncpg/protocol/codecs/text.pyx | 8 +-
asyncpg/protocol/codecs/textutils.pyx | 39 +-
asyncpg/protocol/codecs/tsearch.pyx | 2 +-
asyncpg/protocol/codecs/txid.pyx | 2 +-
asyncpg/protocol/codecs/uuid.pyx | 2 +-
asyncpg/protocol/consts.pxi | 4 +-
asyncpg/protocol/coreproto.pxd | 24 +-
asyncpg/protocol/coreproto.pyx | 213 +-
asyncpg/protocol/encodings.pyx | 2 +-
asyncpg/protocol/hton.pxd | 2 +-
asyncpg/protocol/pgtypes.pxi | 2 +-
asyncpg/protocol/prepared_stmt.pxd | 2 +-
asyncpg/protocol/prepared_stmt.pyx | 5 +-
asyncpg/protocol/protocol.c | 48107 ++++++++++++++++++++------------
asyncpg/protocol/protocol.pxd | 13 +-
asyncpg/protocol/protocol.pyx | 323 +-
asyncpg/protocol/python.pxd | 7 +-
asyncpg/protocol/record/__init__.pxd | 7 +-
asyncpg/protocol/record/recordobj.c | 15 +-
asyncpg/protocol/record/recordobj.h | 6 +-
asyncpg/protocol/settings.pxd | 2 +-
asyncpg/protocol/settings.pyx | 5 +-
asyncpg/serverversion.py | 21 +-
asyncpg/transaction.py | 8 +-
asyncpg/types.py | 4 +-
asyncpg/utils.py | 45 +
docs/api/index.rst | 26 +-
docs/conf.py | 7 +-
docs/examples.rst | 47 -
docs/faq.rst | 41 +
docs/index.rst | 8 +-
docs/usage.rst | 281 +
setup.cfg | 1 -
setup.py | 27 +-
tests/__init__.py | 7 +
tests/certs/ca.cert.pem | 35 +
tests/certs/server.cert.pem | 40 +
tests/certs/server.key.pem | 51 +
tests/test_cache_invalidation.py | 85 +
tests/test_cancellation.py | 2 +-
tests/test_codecs.py | 202 +-
tests/test_connect.py | 329 +-
tests/test_copy.py | 597 +
tests/test_cursor.py | 2 +-
tests/test_exceptions.py | 36 +-
tests/test_execute.py | 24 +-
tests/test_listeners.py | 2 +-
tests/test_pool.py | 435 +-
tests/test_prepare.py | 191 +-
tests/test_record.py | 14 +-
tests/test_test.py | 13 +-
tests/test_timeout.py | 45 +-
tests/test_transaction.py | 20 +-
tests/test_utils.py | 37 +
90 files changed, 35554 insertions(+), 19013 deletions(-)
diff --git a/Makefile b/Makefile
index 2f310e4..c3c5ce0 100644
--- a/Makefile
+++ b/Makefile
@@ -28,21 +28,13 @@ debug:
test:
- PYTHONASYNCIODEBUG=1 $(PYTHON) -m unittest discover -s tests
- $(PYTHON) -m unittest discover -s tests
- USE_UVLOOP=1 $(PYTHON) -m unittest discover -s tests
+ PYTHONASYNCIODEBUG=1 $(PYTHON) setup.py test
+ $(PYTHON) setup.py test
+ USE_UVLOOP=1 $(PYTHON) setup.py test
quicktest:
- $(PYTHON) -m unittest discover -s tests
-
-
-sdist: clean compile test
- $(PYTHON) setup.py sdist
-
-
-release: clean compile test
- $(PYTHON) setup.py sdist upload
+ $(PYTHON) setup.py test
htmldocs: compile
diff --git a/PKG-INFO b/PKG-INFO
index eef00cf..88de2a5 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: asyncpg
-Version: 0.8.4
+Version: 0.11.0
Summary: An asyncio PosgtreSQL driver
Home-page: https://github.com/MagicStack/asyncpg
Author: MagicStack Inc
@@ -106,5 +106,5 @@ Classifier: Programming Language :: Python :: 3.6
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
-Classifier: Development Status :: 4 - Beta
+Classifier: Development Status :: 5 - Production/Stable
Provides: asyncpg
diff --git a/asyncpg.egg-info/PKG-INFO b/asyncpg.egg-info/PKG-INFO
index eef00cf..88de2a5 100644
--- a/asyncpg.egg-info/PKG-INFO
+++ b/asyncpg.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: asyncpg
-Version: 0.8.4
+Version: 0.11.0
Summary: An asyncio PosgtreSQL driver
Home-page: https://github.com/MagicStack/asyncpg
Author: MagicStack Inc
@@ -106,5 +106,5 @@ Classifier: Programming Language :: Python :: 3.6
Classifier: Operating System :: POSIX
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
-Classifier: Development Status :: 4 - Beta
+Classifier: Development Status :: 5 - Production/Stable
Provides: asyncpg
diff --git a/asyncpg.egg-info/SOURCES.txt b/asyncpg.egg-info/SOURCES.txt
index 1a18c5c..e8bca63 100644
--- a/asyncpg.egg-info/SOURCES.txt
+++ b/asyncpg.egg-info/SOURCES.txt
@@ -8,6 +8,7 @@ asyncpg/__init__.py
asyncpg/_testbase.py
asyncpg/cluster.py
asyncpg/compat.py
+asyncpg/connect_utils.py
asyncpg/connection.py
asyncpg/cursor.py
asyncpg/introspection.py
@@ -16,6 +17,7 @@ asyncpg/prepared_stmt.py
asyncpg/serverversion.py
asyncpg/transaction.py
asyncpg/types.py
+asyncpg/utils.py
asyncpg.egg-info/PKG-INFO
asyncpg.egg-info/SOURCES.txt
asyncpg.egg-info/dependency_links.txt
@@ -68,13 +70,17 @@ asyncpg/protocol/record/__init__.pxd
asyncpg/protocol/record/recordobj.c
asyncpg/protocol/record/recordobj.h
docs/conf.py
-docs/examples.rst
+docs/faq.rst
docs/index.rst
docs/installation.rst
+docs/usage.rst
docs/api/index.rst
+tests/__init__.py
+tests/test_cache_invalidation.py
tests/test_cancellation.py
tests/test_codecs.py
tests/test_connect.py
+tests/test_copy.py
tests/test_cursor.py
tests/test_exceptions.py
tests/test_execute.py
@@ -84,4 +90,8 @@ tests/test_prepare.py
tests/test_record.py
tests/test_test.py
tests/test_timeout.py
-tests/test_transaction.py
\ No newline at end of file
+tests/test_transaction.py
+tests/test_utils.py
+tests/certs/ca.cert.pem
+tests/certs/server.cert.pem
+tests/certs/server.key.pem
\ No newline at end of file
diff --git a/asyncpg/__init__.py b/asyncpg/__init__.py
index 5488318..380dc26 100644
--- a/asyncpg/__init__.py
+++ b/asyncpg/__init__.py
@@ -1,14 +1,16 @@
-# Copyright (C) 2016-present the ayncpg authors and contributors
+# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
-from .connection import connect # NOQA
+from .connection import connect, Connection # NOQA
from .exceptions import * # NOQA
from .pool import create_pool # NOQA
+from .protocol import Record # NOQA
from .types import * # NOQA
-__all__ = ('connect', 'create_pool') + exceptions.__all__ # NOQA
+__all__ = ('connect', 'create_pool', 'Record', 'Connection') + \
+ exceptions.__all__ # NOQA
diff --git a/asyncpg/_testbase.py b/asyncpg/_testbase.py
index daf9cdf..ed0566e 100644
--- a/asyncpg/_testbase.py
+++ b/asyncpg/_testbase.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2016-present the ayncpg authors and contributors
+# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
@@ -12,11 +12,13 @@ import functools
import inspect
import logging
import os
+import re
import time
import unittest
from asyncpg import cluster as pg_cluster
+from asyncpg import connection as pg_connection
from asyncpg import pool as pg_pool
@@ -95,6 +97,30 @@ class TestCase(unittest.TestCase, metaclass=TestCaseMeta):
raise AssertionError(
'running block took longer than {}'.format(delta))
+ @contextlib.contextmanager
+ def assertLoopErrorHandlerCalled(self, msg_re: str):
+ contexts = []
+
+ def handler(loop, ctx):
+ contexts.append(ctx)
+
+ old_handler = self.loop.get_exception_handler()
+ self.loop.set_exception_handler(handler)
+ try:
+ yield
+
+ for ctx in contexts:
+ msg = ctx.get('message')
+ if msg and re.search(msg_re, msg):
+ return
+
+ raise AssertionError(
+ 'no message matching {!r} was logged with '
+ 'loop.call_exception_handler()'.format(msg_re))
+
+ finally:
+ self.loop.set_exception_handler(old_handler)
+
_default_cluster = None
@@ -128,18 +154,46 @@ def _shutdown_cluster(cluster):
cluster.destroy()
+def create_pool(dsn=None, *,
+ min_size=10,
+ max_size=10,
+ max_queries=50000,
+ max_inactive_connection_lifetime=60.0,
+ setup=None,
+ init=None,
+ loop=None,
+ pool_class=pg_pool.Pool,
+ connection_class=pg_connection.Connection,
+ **connect_kwargs):
+ return pool_class(
+ dsn,
+ min_size=min_size, max_size=max_size,
+ max_queries=max_queries, loop=loop, setup=setup, init=init,
+ max_inactive_connection_lifetime=max_inactive_connection_lifetime,
+ connection_class=connection_class,
+ **connect_kwargs)
+
+
class ClusterTestCase(TestCase):
@classmethod
+ def get_server_settings(cls):
+ return {
+ 'log_connections': 'on'
+ }
+
+ @classmethod
+ def setup_cluster(cls):
+ cls.cluster = _start_default_cluster(cls.get_server_settings())
+
+ @classmethod
def setUpClass(cls):
super().setUpClass()
- cls.cluster = _start_default_cluster({
- 'log_connections': 'on'
- })
+ cls.setup_cluster()
- def create_pool(self, **kwargs):
+ def create_pool(self, pool_class=pg_pool.Pool, **kwargs):
conn_spec = self.cluster.get_connection_spec()
conn_spec.update(kwargs)
- return pg_pool.create_pool(loop=self.loop, **conn_spec)
+ return create_pool(loop=self.loop, pool_class=pool_class, **conn_spec)
@classmethod
def start_cluster(cls, ClusterCls, *,
@@ -147,6 +201,17 @@ class ClusterTestCase(TestCase):
return _start_cluster(ClusterCls, cluster_kwargs, server_settings)
+def with_connection_options(**options):
+ if not options:
+ raise ValueError('no connection options were specified')
+
+ def wrap(func):
+ func.__connect_options__ = options
+ return func
+
+ return wrap
+
+
class ConnectedTestCase(ClusterTestCase):
def getExtraConnectOptions(self):
@@ -154,9 +219,14 @@ class ConnectedTestCase(ClusterTestCase):
def setUp(self):
super().setUp()
- opts = self.getExtraConnectOptions()
+
+ # Extract options set up with `with_connection_options`.
+ test_func = getattr(self, self._testMethodName).__func__
+ opts = getattr(test_func, '__connect_options__', {})
+
self.con = self.loop.run_until_complete(
self.cluster.connect(database='postgres', loop=self.loop, **opts))
+
self.server_version = self.con.get_server_version()
def tearDown(self):
diff --git a/asyncpg/cluster.py b/asyncpg/cluster.py
index b9601b5..afb8e6c 100644
--- a/asyncpg/cluster.py
+++ b/asyncpg/cluster.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2016-present the ayncpg authors and contributors
+# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
@@ -15,6 +15,7 @@ import re
import shutil
import socket
import subprocess
+import sys
import tempfile
import textwrap
import time
@@ -178,6 +179,15 @@ class Cluster:
if sockdir is None:
sockdir = '/tmp'
+ ssl_key = server_settings.get('ssl_key_file')
+ if ssl_key:
+ # Make sure server certificate key file has correct permissions.
+ keyfile = os.path.join(self._data_dir, 'srvkey.pem')
+ shutil.copy(ssl_key, keyfile)
+ os.chmod(keyfile, 0o400)
+ server_settings = server_settings.copy()
+ server_settings['ssl_key_file'] = keyfile
+
if self._pg_version < (9, 3):
sockdir_opt = 'unix_socket_directory'
else:
@@ -204,10 +214,15 @@ class Cluster:
'pg_ctl start exited with status {:d}: {}'.format(
process.returncode, stderr.decode()))
else:
+ if os.getenv('ASYNCPG_DEBUG_SERVER'):
+ stdout = sys.stdout
+ else:
+ stdout = subprocess.DEVNULL
+
self._daemon_process = \
subprocess.Popen(
[self._postgres, '-D', self._data_dir, *extra_args],
- stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
+ stdout=stdout, stderr=subprocess.STDOUT,
preexec_fn=ensure_dead_with_parent)
self._daemon_pid = self._daemon_process.pid
diff --git a/asyncpg/compat.py b/asyncpg/compat.py
index 1d330a4..20213aa 100644
--- a/asyncpg/compat.py
+++ b/asyncpg/compat.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2016-present the ayncpg authors and contributors
+# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
@@ -6,9 +6,13 @@
import functools
+import os
import sys
+PY_36 = sys.version_info >= (3, 6)
+
+
if sys.version_info < (3, 5, 2):
def aiter_compat(func):
@functools.wraps(func)
@@ -18,3 +22,26 @@ if sys.version_info < (3, 5, 2):
else:
def aiter_compat(func):
return func
+
+
+if PY_36:
+ fspath = os.fspath
+else:
+ def fspath(path):
+ fsp = getattr(path, '__fspath__', None)
+ if fsp is not None and callable(fsp):
+ path = fsp()
+ if not isinstance(path, (str, bytes)):
+ raise TypeError(
+ 'expected {}() to return str or bytes, not {}'.format(
+ fsp.__qualname__, type(path).__name__
+ ))
+ return path
+ elif isinstance(path, (str, bytes)):
+ return path
+ else:
+ raise TypeError(
+ 'expected str, bytes or path-like object, not {}'.format(
+ type(path).__name__
+ )
+ )
diff --git a/asyncpg/connect_utils.py b/asyncpg/connect_utils.py
new file mode 100644
index 0000000..8a3979b
--- /dev/null
+++ b/asyncpg/connect_utils.py
@@ -0,0 +1,387 @@
+# Copyright (C) 2016-present the asyncpg authors and contributors
+# <see AUTHORS file>
+#
+# This module is part of asyncpg and is released under
+# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
+
+
+import asyncio
+import collections
+import getpass
+import os
+import socket
+import struct
+import time
+import urllib.parse
+
+from . import exceptions
+from . import protocol
+
+
+_ConnectionParameters = collections.namedtuple(
+ 'ConnectionParameters',
+ [
+ 'user',
+ 'password',
+ 'database',
+ 'ssl',
+ 'connect_timeout',
+ 'server_settings',
+ ])
+
+
+_ClientConfiguration = collections.namedtuple(
+ 'ConnectionConfiguration',
+ [
+ 'command_timeout',
+ 'statement_cache_size',
+ 'max_cached_statement_lifetime',
+ 'max_cacheable_statement_size',
+ ])
+
+
+def _parse_connect_dsn_and_args(*, dsn, host, port, user,
+ password, database, ssl, connect_timeout,
+ server_settings):
+ if host is not None and not isinstance(host, str):
+ raise TypeError(
+ 'host argument is expected to be str, got {!r}'.format(
+ type(host)))
+
+ if dsn:
+ parsed = urllib.parse.urlparse(dsn)
+
+ if parsed.scheme not in {'postgresql', 'postgres'}:
+ raise ValueError(
+ 'invalid DSN: scheme is expected to be either of '
+ '"postgresql" or "postgres", got {!r}'.format(parsed.scheme))
+
+ if parsed.port and port is None:
+ port = int(parsed.port)
+
+ if parsed.hostname and host is None:
+ host = parsed.hostname
+
+ if parsed.path and database is None:
+ database = parsed.path
+ if database.startswith('/'):
+ database = database[1:]
+
+ if parsed.username and user is None:
+ user = parsed.username
+
+ if parsed.password and password is None:
+ password = parsed.password
+
+ if parsed.query:
+ query = urllib.parse.parse_qs(parsed.query, strict_parsing=True)
+ for key, val in query.items():
+ if isinstance(val, list):
+ query[key] = val[-1]
+
+ if 'host' in query:
+ val = query.pop('host')
+ if host is None:
+ host = val
+
+ if 'port' in query:
+ val = int(query.pop('port'))
+ if port is None:
+ port = val
+
+ if 'dbname' in query:
+ val = query.pop('dbname')
+ if database is None:
+ database = val
+
+ if 'database' in query:
+ val = query.pop('database')
+ if database is None:
+ database = val
+
+ if 'user' in query:
+ val = query.pop('user')
+ if user is None:
+ user = val
+
+ if 'password' in query:
+ val = query.pop('password')
+ if password is None:
+ password = val
+
+ if query:
+ if server_settings is None:
+ server_settings = query
+ else:
+ server_settings = {**query, **server_settings}
+
+ # On env-var -> connection parameter conversion read here:
+ # https://www.postgresql.org/docs/current/static/libpq-envars.html
+ # Note that env values may be an empty string in cases when
+ # the variable is "unset" by setting it to an empty value
+ #
+ if host is None:
+ host = os.getenv('PGHOST')
+ if not host:
+ host = ['/tmp', '/private/tmp',
+ '/var/pgsql_socket', '/run/postgresql',
+ 'localhost']
+ if not isinstance(host, list):
+ host = [host]
+
+ if port is None:
+ port = os.getenv('PGPORT')
+ if port:
+ port = int(port)
+ else:
+ port = 5432
+ else:
+ port = int(port)
+
+ if user is None:
+ user = os.getenv('PGUSER')
+ if not user:
+ user = getpass.getuser()
+
+ if password is None:
+ password = os.getenv('PGPASSWORD')
+
+ if database is None:
+ database = os.getenv('PGDATABASE')
+
+ if database is None:
+ database = user
+
+ if user is None:
+ raise exceptions.InterfaceError(
+ 'could not determine user name to connect with')
+
+ if database is None:
+ raise exceptions.InterfaceError(
+ 'could not determine database name to connect to')
+
+ addrs = []
+ for h in host:
+ if h.startswith('/'):
+ # UNIX socket name
+ if '.s.PGSQL.' not in h:
+ h = os.path.join(h, '.s.PGSQL.{}'.format(port))
+ addrs.append(h)
+ else:
+ # TCP host/port
+ addrs.append((h, port))
+
+ if not addrs:
+ raise ValueError(
+ 'could not determine the database address to connect to')
+
+ if ssl:
+ for addr in addrs:
+ if isinstance(addr, str):
+ # UNIX socket
+ raise exceptions.InterfaceError(
+ '`ssl` parameter can only be enabled for TCP addresses, '
+ 'got a UNIX socket path: {!r}'.format(addr))
+
+ if server_settings is not None and (
+ not isinstance(server_settings, dict) or
+ not all(isinstance(k, str) for k in server_settings) or
+ not all(isinstance(v, str) for v in server_settings.values())):
+ raise ValueError(
+ 'server_settings is expected to be None or '
+ 'a Dict[str, str]')
+
+ params = _ConnectionParameters(
+ user=user, password=password, database=database, ssl=ssl,
+ connect_timeout=connect_timeout, server_settings=server_settings)
+
+ return addrs, params
+
+
+def _parse_connect_arguments(*, dsn, host, port, user, password, database,
+ timeout, command_timeout, statement_cache_size,
+ max_cached_statement_lifetime,
+ max_cacheable_statement_size,
+ ssl, server_settings):
+
+ local_vars = locals()
+ for var_name in {'max_cacheable_statement_size',
+ 'max_cached_statement_lifetime',
+ 'statement_cache_size'}:
+ var_val = local_vars[var_name]
+ if var_val is None or isinstance(var_val, bool) or var_val < 0:
+ raise ValueError(
+ '{} is expected to be greater '
+ 'or equal to 0, got {!r}'.format(var_name, var_val))
+
+ if command_timeout is not None:
+ try:
+ if isinstance(command_timeout, bool):
+ raise ValueError
+ command_timeout = float(command_timeout)
+ if command_timeout <= 0:
+ raise ValueError
+ except ValueError:
+ raise ValueError(
+ 'invalid command_timeout value: '
+ 'expected greater than 0 float (got {!r})'.format(
+ command_timeout)) from None
+
+ addrs, params = _parse_connect_dsn_and_args(
+ dsn=dsn, host=host, port=port, user=user,
+ password=password, ssl=ssl,
+ database=database, connect_timeout=timeout,
+ server_settings=server_settings)
+
+ config = _ClientConfiguration(
+ command_timeout=command_timeout,
+ statement_cache_size=statement_cache_size,
+ max_cached_statement_lifetime=max_cached_statement_lifetime,
+ max_cacheable_statement_size=max_cacheable_statement_size,)
+
+ return addrs, params, config
+
+
+async def _connect_addr(*, addr, loop, timeout, params, config,
+ connection_class):
+ assert loop is not None
+
+ if timeout <= 0:
+ raise asyncio.TimeoutError
+
+ connected = _create_future(loop)
+ proto_factory = lambda: protocol.Protocol(
+ addr, connected, params, loop)
+
+ if isinstance(addr, str):
+ # UNIX socket
+ assert params.ssl is None
+ connector = loop.create_unix_connection(proto_factory, addr)
+ elif params.ssl:
+ connector = _create_ssl_connection(
+ proto_factory, *addr, loop=loop, ssl_context=params.ssl)
+ else:
+ connector = loop.create_connection(proto_factory, *addr)
+
+ before = time.monotonic()
+ tr, pr = await asyncio.wait_for(
+ connector, timeout=timeout, loop=loop)
+ timeout -= time.monotonic() - before
+
+ try:
+ if timeout <= 0:
+ raise asyncio.TimeoutError
+ await asyncio.wait_for(connected, loop=loop, timeout=timeout)
+ except Exception:
+ tr.close()
+ raise
+
+ con = connection_class(pr, tr, loop, addr, config, params)
+ pr.set_connection(con)
+ return con
+
+
+async def _connect(*, loop, timeout, connection_class, **kwargs):
+ if loop is None:
+ loop = asyncio.get_event_loop()
+
+ addrs, params, config = _parse_connect_arguments(timeout=timeout, **kwargs)
+
+ last_error = None
+ addr = None
+ for addr in addrs:
+ before = time.monotonic()
+ try:
+ con = await _connect_addr(
+ addr=addr, loop=loop, timeout=timeout,
+ params=params, config=config,
+ connection_class=connection_class)
+ except (OSError, asyncio.TimeoutError, ConnectionError) as ex:
+ last_error = ex
+ else:
+ return con
+ finally:
+ timeout -= time.monotonic() - before
+
+ raise last_error
+
+
+async def _get_ssl_ready_socket(host, port, *, loop):
+ reader, writer = await asyncio.open_connection(host, port, loop=loop)
+
+ tr = writer.transport
+ try:
+ sock = _get_socket(tr)
+ _set_nodelay(sock)
+
+ writer.write(struct.pack('!ll', 8, 80877103)) # SSLRequest message.
+ await writer.drain()
+ resp = await reader.readexactly(1)
+
+ if resp == b'S':
+ return sock.dup()
+ else:
+ raise ConnectionError(
+ 'PostgreSQL server at "{}:{}" rejected SSL upgrade'.format(
+ host, port))
+ finally:
+ tr.close()
+
+
+async def _create_ssl_connection(protocol_factory, host, port, *,
+ loop, ssl_context):
+ sock = await _get_ssl_ready_socket(host, port, loop=loop)
+ try:
+ return await loop.create_connection(
+ protocol_factory, sock=sock, ssl=ssl_context,
+ server_hostname=host)
+ except Exception:
+ sock.close()
+ raise
+
+
+async def _open_connection(*, loop, addr, params: _ConnectionParameters):
+ if isinstance(addr, str):
+ r, w = await asyncio.open_unix_connection(addr, loop=loop)
+ else:
+ if params.ssl:
+ sock = await _get_ssl_ready_socket(*addr, loop=loop)
+
+ try:
+ r, w = await asyncio.open_connection(
+ sock=sock,
+ loop=loop,
+ ssl=params.ssl,
+ server_hostname=addr[0])
+ except Exception:
+ sock.close()
+ raise
+
+ else:
+ r, w = await asyncio.open_connection(*addr, loop=loop)
+ _set_nodelay(_get_socket(w.transport))
+
+ return r, w
+
+
+def _get_socket(transport):
+ sock = transport.get_extra_info('socket')
+ if sock is None:
+ # Shouldn't happen with any asyncio-complaint event loop.
+ raise ConnectionError(
+ 'could not get the socket for transport {!r}'.format(transport))
+ return sock
+
+
+def _set_nodelay(sock):
+ if not hasattr(socket, 'AF_UNIX') or sock.family != socket.AF_UNIX:
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+
+def _create_future(loop):
+ try:
+ create_future = loop.create_future
+ except AttributeError:
+ return asyncio.Future(loop=loop)
+ else:
+ return create_future()
diff --git a/asyncpg/connection.py b/asyncpg/connection.py
index 9445a9b..9e94de4 100644
--- a/asyncpg/connection.py
+++ b/asyncpg/connection.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2016-present the ayncpg authors and contributors
+# Copyright (C) 2016-present the asyncpg authors and contributors
# <see AUTHORS file>
#
# This module is part of asyncpg and is released under
@@ -7,21 +7,30 @@
import asyncio
import collections
-import getpass
-import os
-import socket
+import collections.abc
import struct
-import urllib.parse
+import time
+from . import compat
+from . import connect_utils
from . import cursor
+from . import exceptions
from . import introspection
from . import prepared_stmt
from . import protocol
from . import serverversion
from . import transaction
+from . import utils
-class Connection:
+class ConnectionMeta(type):
+
+ def __instancecheck__(cls, instance):
+ mro = type(instance).__mro__
+ return Connection in mro or _ConnectionProxy in mro
+
+
+class Connection(metaclass=ConnectionMeta):
"""A representation of a database session.
Connections are created by calling :func:`~asyncpg.connection.connect`.
@@ -29,12 +38,15 @@ class Connection:
__slots__ = ('_protocol', '_transport', '_loop', '_types_stmt',
'_type_by_name_stmt', '_top_xact', '_uid', '_aborted',
- '_stmt_cache_max_size', '_stmt_cache', '_stmts_to_close',
- '_addr', '_opts', '_command_timeout', '_listeners',
- '_server_version', '_intro_query')
-
- def __init__(self, protocol, transport, loop, addr, opts, *,
- statement_cache_size, command_timeout):
+ '_stmt_cache', '_stmts_to_close', '_listeners',
+ '_server_version', '_server_caps', '_intro_query',
+ '_reset_query', '_proxy', '_stmt_exclusive_section',
+ '_config', '_params', '_addr')
+
+ def __init__(self, protocol, transport, loop,
+ addr: (str, int) or str,
+ config: connect_utils._ClientConfiguration,
+ params: connect_utils._ConnectionParameters):
self._protocol = protocol
self._transport = transport
self._loop = loop
@@ -45,36 +57,57 @@ class Connection:
self._aborted = False
self._addr = addr
- self._opts = opts
+ self._config = config
+ self._params = params
- self._stmt_cache_max_size = statement_cache_size
- self._stmt_cache = collections.OrderedDict()
- self._stmts_to_close = set()
+ self._stmt_cache = _StatementCache(
+ loop=loop,
+ max_size=config.statement_cache_size,
+ on_remove=self._maybe_gc_stmt,
+ max_lifetime=config.max_cached_statement_lifetime)
- self._command_timeout = command_timeout
+ self._stmts_to_close = set()
self._listeners = {}
- ver_string = self._protocol.get_settings().server_version
+ settings = self._protocol.get_settings()
+ ver_string = settings.server_version
self._server_version = \
serverversion.split_server_version_string(ver_string)
+ self._server_caps = _detect_server_capabilities(
+ self._server_version, settings)
+
if self._server_version < (9, 2):
self._intro_query = introspection.INTRO_LOOKUP_TYPES_91
else:
self._intro_query = introspection.INTRO_LOOKUP_TYPES
+ self._reset_query = None
+ self._proxy = None
+
+ # Used to serialize operations that might involve anonymous
+ # statements. Specifically, we want to make the following
+ # operation atomic:
+ # ("prepare an anonymous statement", "use the statement")
+ #
+ # Used for `con.fetchval()`, `con.fetch()`, `con.fetchrow()`,
+ # `con.execute()`, and `con.executemany()`.
+ self._stmt_exclusive_section = _Atomic()
+
async def add_listener(self, channel, callback):
"""Add a listener for Postgres notifications.
:param str channel: Channel to listen on.
+
:param callable callback:
- A callable receiving the following arguments:
... 83686 lines suppressed ...
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/asyncpg.git
More information about the Python-modules-commits
mailing list