[Python-modules-commits] [sqlalchemy] 02/14: Import sqlalchemy_1.0.15+ds1.orig.tar.gz

Piotr Ożarowski piotr at moszumanska.debian.org
Tue Oct 11 15:02:42 UTC 2016


This is an automated email from the git hooks/post-receive script.

piotr pushed a commit to branch master
in repository sqlalchemy.

commit 366de0ad86cd6332f7f59cad39250aac58275290
Author: Piotr Ożarowski <piotr at debian.org>
Date:   Tue Sep 20 21:44:23 2016 +0200

    Import sqlalchemy_1.0.15+ds1.orig.tar.gz
---
 PKG-INFO                                           |   2 +-
 doc/build/changelog/changelog_10.rst               |  46 +++++
 doc/build/conf.py                                  |   4 +-
 doc/build/core/custom_types.rst                    | 107 ++++++++--
 doc/build/dialects/index.rst                       |   1 +
 doc/build/faq/sessions.rst                         |   2 +-
 doc/build/orm/basic_relationships.rst              |   4 +-
 doc/build/orm/extensions/declarative/basic_use.rst |   1 +
 doc/build/orm/inheritance.rst                      |  21 +-
 doc/build/orm/loading_relationships.rst            |   8 +
 doc/build/orm/persistence_techniques.rst           |   8 +
 lib/sqlalchemy/__init__.py                         |   2 +-
 lib/sqlalchemy/dialects/mssql/base.py              |  19 ++
 lib/sqlalchemy/dialects/mysql/mysqlconnector.py    |  14 ++
 lib/sqlalchemy/dialects/postgresql/base.py         |   2 +-
 lib/sqlalchemy/ext/baked.py                        |   3 +-
 lib/sqlalchemy/ext/declarative/api.py              |   2 +-
 lib/sqlalchemy/orm/mapper.py                       |   2 -
 lib/sqlalchemy/orm/strategies.py                   |  90 +++++----
 lib/sqlalchemy/orm/util.py                         |  10 +-
 lib/sqlalchemy/sql/schema.py                       |   3 +
 lib/sqlalchemy/sql/type_api.py                     |   2 +-
 test/dialect/mysql/test_reflection.py              |   1 -
 test/dialect/mysql/test_types.py                   |   3 +-
 test/engine/test_transaction.py                    |   7 +-
 test/ext/test_baked.py                             |  33 ++--
 test/orm/inheritance/test_abc_inheritance.py       |   3 +-
 test/orm/inheritance/test_polymorphic_rel.py       |   4 +-
 test/orm/test_of_type.py                           | 216 +++++++++++++++++++--
 test/requirements.py                               |  28 +++
 test/sql/test_metadata.py                          |  15 ++
 test/sql/test_update.py                            |  14 +-
 32 files changed, 553 insertions(+), 124 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 2ba9b83..240f27a 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: SQLAlchemy
-Version: 1.0.14
+Version: 1.0.15
 Summary: Database Abstraction Library
 Home-page: http://www.sqlalchemy.org
 Author: Mike Bayer
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index 1b27a1d..b5f6dfa 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -16,6 +16,52 @@
         :start-line: 5
 
 .. changelog::
+    :version: 1.0.15
+    :released: September 1, 2016
+
+    .. change::
+        :tags: bug, mysql
+        :tickets: 3787
+        :versions: 1.1.0
+
+        Added support for parsing MySQL/Connector boolean and integer
+        arguments within the URL query string: connection_timeout,
+        connect_timeout, pool_size, get_warnings,
+        raise_on_warnings, raw, consume_results, ssl_verify_cert, force_ipv6,
+        pool_reset_session, compress, allow_local_infile, use_pure.
+
+    .. change::
+        :tags: bug, orm
+        :tickets: 3773, 3774
+        :versions: 1.1.0
+
+        Fixed bug in subquery eager loading where a subqueryload
+        of an "of_type()" object linked to a second subqueryload of a plain
+        mapped class, or a longer chain of several "of_type()" attributes,
+        would fail to link the joins correctly.
+
+    .. change::
+        :tags: bug, sql
+        :tickets: 3755
+        :versions: 1.1.0
+
+        Fixed bug in :class:`.Table` where the internal method
+        ``_reset_exported()`` would corrupt the state of the object.  This
+        method is intended for selectable objects and is called by the ORM
+        in some cases; an erroneous mapper configuration would could lead the
+        ORM to call this on on a :class:`.Table` object.
+
+    .. change::
+        :tags: bug, ext
+        :tickets: 3743
+        :versions: 1.1.0b3
+
+        Fixed bug in ``sqlalchemy.ext.baked`` where the unbaking of a
+        subquery eager loader query would fail due to a variable scoping
+        issue, when multiple subquery loaders were involved.  Pull request
+        courtesy Mark Hahnenberg.
+
+.. changelog::
     :version: 1.0.14
     :released: July 6, 2016
 
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 7653827..5d9cf94 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -107,9 +107,9 @@ copyright = u'2007-2016, the SQLAlchemy authors and contributors'
 # The short X.Y version.
 version = "1.0"
 # The full version, including alpha/beta/rc tags.
-release = "1.0.14"
+release = "1.0.15"
 
-release_date = "July 6, 2016"
+release_date = "September 1, 2016"
 
 site_base = os.environ.get("RTD_SITE_BASE", "http://www.sqlalchemy.org")
 site_adapter_template = "docs_adapter.mako"
diff --git a/doc/build/core/custom_types.rst b/doc/build/core/custom_types.rst
index 3290e4d..2c3784b 100644
--- a/doc/build/core/custom_types.rst
+++ b/doc/build/core/custom_types.rst
@@ -3,13 +3,13 @@
 .. _types_custom:
 
 Custom Types
-------------
+============
 
 A variety of methods exist to redefine the behavior of existing types
 as well as to provide new ones.
 
 Overriding Type Compilation
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+---------------------------
 
 A frequent need is to force the "string" version of a type, that is
 the one rendered in a CREATE TABLE statement or other SQL function
@@ -38,7 +38,7 @@ See the section :ref:`type_compilation_extension`, a subsection of
 .. _types_typedecorator:
 
 Augmenting Existing Types
-~~~~~~~~~~~~~~~~~~~~~~~~~
+-------------------------
 
 The :class:`.TypeDecorator` allows the creation of custom types which
 add bind-parameter and result-processing behavior to an existing
@@ -59,7 +59,8 @@ to and from the database is required.
 
 
 TypeDecorator Recipes
-~~~~~~~~~~~~~~~~~~~~~
+---------------------
+
 A few key :class:`.TypeDecorator` recipes follow.
 
 .. _coerce_to_unicode:
@@ -195,19 +196,95 @@ to/from JSON.   Can be modified to use Python's builtin json encoder::
                 value = json.loads(value)
             return value
 
-Note that the ORM by default will not detect "mutability" on such a type -
+Adding Mutability
+~~~~~~~~~~~~~~~~~
+
+The ORM by default will not detect "mutability" on such a type as above -
 meaning, in-place changes to values will not be detected and will not be
-flushed. Without further steps, you instead would need to replace the existing
-value with a new one on each parent object to detect changes. Note that
-there's nothing wrong with this, as many applications may not require that the
-values are ever mutated once created.  For those which do have this requirement,
-support for mutability is best applied using the ``sqlalchemy.ext.mutable``
-extension - see the example in :ref:`mutable_toplevel`.
+flushed.   Without further steps, you instead would need to replace the existing
+value with a new one on each parent object to detect changes::
+
+    obj.json_value["key"] = "value"  # will *not* be detected by the ORM
+
+    obj.json_value = {"key": "value"}  # *will* be detected by the ORM
+
+The above limitation may be
+fine, as many applications may not require that the values are ever mutated
+once created.  For those which do have this requirement, support for mutability
+is best applied using the ``sqlalchemy.ext.mutable`` extension.  For a
+dictionary-oriented JSON structure, we can apply this as::
+
+    json_type = MutableDict.as_mutable(JSONEncodedDict)
+
+    class MyClass(Base):
+        #  ...
+
+        json_data = Column(json_type)
+
+
+.. seealso::
+
+    :ref:`mutable_toplevel`
+
+Dealing with Comparison Operations
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The default behavior of :class:`.TypeDecorator` is to coerce the "right hand side"
+of any expression into the same type.  For a type like JSON, this means that
+any operator used must make sense in terms of JSON.    For some cases,
+users may wish for the type to behave like JSON in some circumstances, and
+as plain text in others.  One example is if one wanted to handle the
+LIKE operator for the JSON type.  LIKE makes no sense against a JSON structure,
+but it does make sense against the underlying textual representation.  To
+get at this with a type like ``JSONEncodedDict``, we need to
+**coerce** the column to a textual form using :func:`.cast` or
+:func:`.type_coerce` before attempting to use this operator::
+
+    from sqlalchemy import type_coerce, String
+
+    stmt = select([my_table]).where(
+        type_coerce(my_table.c.json_data, String).like('%foo%'))
+
+:class:`.TypeDecorator` provides a built-in system for working up type
+translations like these based on operators.  If we wanted to frequently use the
+LIKE operator with our JSON object interpreted as a string, we can build it
+into the type by overriding the :meth:`.TypeDecorator.coerce_compared_value`
+method::
+
+    from sqlalchemy.sql import operators
+    from sqlalchemy import String
+
+    class JSONEncodedDict(TypeDecorator):
+
+        impl = VARCHAR
+
+        def coerce_compared_value(self, op, value):
+            if op in (operators.like_op, operators.notlike_op):
+                return String()
+            else:
+                return self
+
+        def process_bind_param(self, value, dialect):
+            if value is not None:
+                value = json.dumps(value)
+
+            return value
+
+        def process_result_value(self, value, dialect):
+            if value is not None:
+                value = json.loads(value)
+            return value
+
+Above is just one approach to handling an operator like "LIKE".  Other
+applications may wish to raise ``NotImplementedError`` for operators that
+have no meaning with a JSON object such as "LIKE", rather than automatically
+coercing to text.
+
 
 .. _replacing_processors:
 
 Replacing the Bind/Result Processing of Existing Types
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+------------------------------------------------------
 
 Most augmentation of type behavior at the bind/result level
 is achieved using :class:`.TypeDecorator`.   For the rare scenario
@@ -251,7 +328,7 @@ cursor directly::
 .. _types_sql_value_processing:
 
 Applying SQL-level Bind/Result Processing
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+-----------------------------------------
 
 As seen in the sections :ref:`types_typedecorator` and :ref:`replacing_processors`,
 SQLAlchemy allows Python functions to be invoked both when parameters are sent
@@ -390,7 +467,7 @@ See also:
 .. _types_operators:
 
 Redefining and Creating New Operators
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+-------------------------------------
 
 SQLAlchemy Core defines a fixed set of expression operators available to all column expressions.
 Some of these operations have the effect of overloading Python's built in operators;
@@ -487,7 +564,7 @@ See also:
 
 
 Creating New Types
-~~~~~~~~~~~~~~~~~~
+------------------
 
 The :class:`.UserDefinedType` class is provided as a simple base class
 for defining entirely new database types.   Use this to represent native
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index 0cc42bc..f569cbd 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -54,6 +54,7 @@ Production Ready
 * `sqlalchemy-sqlany <https://github.com/sqlanywhere/sqlalchemy-sqlany>`_ - driver for SAP Sybase SQL
   Anywhere, developed by SAP.
 * `sqlalchemy-monetdb <https://github.com/gijzelaerr/sqlalchemy-monetdb>`_ - driver for MonetDB.
+* `snowflake-sqlalchemy <https://github.com/snowflakedb/snowflake-sqlalchemy>`_ - driver for `Snowflake <https://www.snowflake.net/>`_.
 
 Experimental / Incomplete
 ^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst
index ee280ae..f7247aa 100644
--- a/doc/build/faq/sessions.rst
+++ b/doc/build/faq/sessions.rst
@@ -321,7 +321,7 @@ how do I use ON DELETE CASCADE with SA's ORM?
 SQLAlchemy will always issue UPDATE or DELETE statements for dependent
 rows which are currently loaded in the :class:`.Session`.  For rows which
 are not loaded, it will by default issue SELECT statements to load
-those rows and udpate/delete those as well; in other words it assumes
+those rows and update/delete those as well; in other words it assumes
 there is no ON DELETE CASCADE configured.
 To configure SQLAlchemy to cooperate with ON DELETE CASCADE, see
 :ref:`passive_deletes`.
diff --git a/doc/build/orm/basic_relationships.rst b/doc/build/orm/basic_relationships.rst
index 069f8e7..cd6389a 100644
--- a/doc/build/orm/basic_relationships.rst
+++ b/doc/build/orm/basic_relationships.rst
@@ -388,8 +388,8 @@ associated object, and a second to a target attribute.
             right_id = Column(Integer, ForeignKey('right.id'), primary_key=True)
             extra_data = Column(String(50))
 
-            child = relationship("Child", back_populates="parent_associations")
-            parent = relationship("Parent", back_populates="child_associations")
+            child = relationship("Child", backref="parent_associations")
+            parent = relationship("Parent", backref="child_associations")
 
         class Parent(Base):
             __tablename__ = 'left'
diff --git a/doc/build/orm/extensions/declarative/basic_use.rst b/doc/build/orm/extensions/declarative/basic_use.rst
index 10b79e5..bc9f913 100644
--- a/doc/build/orm/extensions/declarative/basic_use.rst
+++ b/doc/build/orm/extensions/declarative/basic_use.rst
@@ -13,6 +13,7 @@ usage and declarative remain highly similar.
 
 As a simple example::
 
+    from sqlalchemy import Column, Integer, String
     from sqlalchemy.ext.declarative import declarative_base
 
     Base = declarative_base()
diff --git a/doc/build/orm/inheritance.rst b/doc/build/orm/inheritance.rst
index f640973..5b3e2c3 100644
--- a/doc/build/orm/inheritance.rst
+++ b/doc/build/orm/inheritance.rst
@@ -500,7 +500,7 @@ can be loaded::
 
     manager_and_engineer = with_polymorphic(
                                 Employee, [Manager, Engineer],
-                                aliased=True)
+                                flat=True)
 
     session.query(Company).\
         options(
@@ -508,12 +508,19 @@ can be loaded::
             )
         )
 
-.. versionadded:: 0.8
-    :func:`.joinedload`, :func:`.subqueryload`, :func:`.contains_eager`
-    and related loader options support
-    paths that are qualified with
-    :func:`~sqlalchemy.orm.interfaces.PropComparator.of_type`, supporting
-    single target types as well as :func:`.orm.with_polymorphic` targets.
+Note that once :meth:`~PropComparator.of_type` is the target of the eager load,
+that's the entity we would use for subsequent chaining, not the original class
+or derived class.  If we wanted to further eager load a collection on the
+eager-loaded ``Engineer`` class, we access this class from the namespace of the
+:func:`.orm.with_polymorphic` object::
+
+    session.query(Company).\
+        options(
+            joinedload(Company.employees.of_type(manager_and_engineer)).\
+            subqueryload(manager_and_engineer.Engineer.computers)
+            )
+        )
+
 
 Another option for the above query is to state the two subtypes separately;
 the :func:`.joinedload` directive should detect this and create the
diff --git a/doc/build/orm/loading_relationships.rst b/doc/build/orm/loading_relationships.rst
index 3a0026b..00e8c73 100644
--- a/doc/build/orm/loading_relationships.rst
+++ b/doc/build/orm/loading_relationships.rst
@@ -178,6 +178,14 @@ of a particular attribute, the :func:`.defaultload` method/function may be used:
     as well as the ``_all()`` functions will remain available for backwards-
     compatibility indefinitely.
 
+Polymorphic Eager Loading
+-------------------------
+
+Specification of polymorpic options on a per-eager-load basis is supported.
+See the section :ref:`eagerloading_polymorphic_subtypes` for examples
+of the :meth:`.PropComparator.of_type` method in conjunction with the 
+:func:`.orm.with_polymorphic` function.
+
 Default Loading Strategies
 --------------------------
 
diff --git a/doc/build/orm/persistence_techniques.rst b/doc/build/orm/persistence_techniques.rst
index aee4812..f38708d 100644
--- a/doc/build/orm/persistence_techniques.rst
+++ b/doc/build/orm/persistence_techniques.rst
@@ -201,6 +201,14 @@ to this approach is strictly one of reduced Python overhead:
   objects and assigning state to them, which normally is also subject to
   expensive tracking of history on a per-attribute basis.
 
+* The set of objects passed to all bulk methods are processed
+  in the order they are received.   In the case of
+  :meth:`.Session.bulk_save_objects`, when objects of different types are passed,
+  the INSERT and UPDATE statements are necessarily broken up into per-type
+  groups.  In order to reduce the number of batch INSERT or UPDATE statements
+  passed to the DBAPI, ensure that the incoming list of objects
+  are grouped by type.
+
 * The process of fetching primary keys after an INSERT also is disabled by
   default.   When performed correctly, INSERT statements can now more readily
   be batched by the unit of work process into ``executemany()`` blocks, which
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 06ceb49..0858ea0 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -121,7 +121,7 @@ from .schema import (
 from .inspection import inspect
 from .engine import create_engine, engine_from_config
 
-__version__ = '1.0.14'
+__version__ = '1.0.15'
 
 
 def __go(lcls):
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 927dcef..af93cc1 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -216,6 +216,25 @@ to either True or False.
    the SQL Server dialect's legacy behavior with schema-qualified table
    names.  This flag will default to False in version 1.1.
 
+MAX on VARCHAR / NVARCHAR
+-------------------------
+
+SQL Server supports the special string "MAX" within the
+:class:`.sqltypes.VARCHAR` and :class:`.sqltypes.NVARCHAR` datatypes,
+to indicate "maximum length possible".   The dialect currently handles this as
+a length of "None" in the base type, rather than supplying a
+dialect-specific version of these types, so that a base type
+specified such as ``VARCHAR(None)`` can assume "unlengthed" behavior on
+more than one backend without using dialect-specific types.
+
+To build a SQL Server VARCHAR or NVARCHAR with MAX length, use None::
+
+    my_table = Table(
+        'my_table', metadata,
+        Column('my_data', VARCHAR(None)),
+        Column('my_n_data', NVARCHAR(None))
+    )
+
 Collation Support
 -----------------
 
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index a3a3f2b..5ec259c 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -112,8 +112,22 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
 
         opts.update(url.query)
 
+        util.coerce_kw_type(opts, 'allow_local_infile', bool)
+        util.coerce_kw_type(opts, 'autocommit', bool)
         util.coerce_kw_type(opts, 'buffered', bool)
+        util.coerce_kw_type(opts, 'compress', bool)
+        util.coerce_kw_type(opts, 'connection_timeout', int)
+        util.coerce_kw_type(opts, 'connect_timeout', int)
+        util.coerce_kw_type(opts, 'consume_results', bool)
+        util.coerce_kw_type(opts, 'force_ipv6', bool)
+        util.coerce_kw_type(opts, 'get_warnings', bool)
+        util.coerce_kw_type(opts, 'pool_reset_session', bool)
+        util.coerce_kw_type(opts, 'pool_size', int)
         util.coerce_kw_type(opts, 'raise_on_warnings', bool)
+        util.coerce_kw_type(opts, 'raw', bool)
+        util.coerce_kw_type(opts, 'ssl_verify_cert', bool)
+        util.coerce_kw_type(opts, 'use_pure', bool)
+        util.coerce_kw_type(opts, 'use_unicode', bool)
 
         # unfortunately, MySQL/connector python refuses to release a
         # cursor without reading fully, so non-buffered isn't an option
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 766847c..3266677 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -360,7 +360,7 @@ Partial indexes add criterion to the index definition so that the index is
 applied to a subset of rows.   These can be specified on :class:`.Index`
 using the ``postgresql_where`` keyword argument::
 
-  Index('my_index', my_table.c.id, postgresql_where=tbl.c.value > 10)
+  Index('my_index', my_table.c.id, postgresql_where=my_table.c.value > 10)
 
 Operator Classes
 ^^^^^^^^^^^^^^^^^
diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py
index 2504be9..e435bd6 100644
--- a/lib/sqlalchemy/ext/baked.py
+++ b/lib/sqlalchemy/ext/baked.py
@@ -194,7 +194,8 @@ class BakedQuery(object):
 
         """
         for k, cache_key, query in context.attributes["baked_queries"]:
-            bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess))
+            bk = BakedQuery(self._bakery,
+                            lambda sess, q=query: q.with_session(sess))
             bk._cache_key = cache_key
             context.attributes[k] = bk.for_session(session).params(**params)
 
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 54e78ee..e67e79d 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -283,7 +283,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
 
     :param constructor:
       Defaults to
-      :func:`~sqlalchemy.ext.declarative._declarative_constructor`, an
+      :func:`~sqlalchemy.ext.declarative.base._declarative_constructor`, an
       __init__ implementation that assigns \**kwargs for declared
       fields and relationships to an instance.  If ``None`` is supplied,
       no __init__ will be provided and construction will fall back to
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 2a1b9e6..534e5e4 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1180,7 +1180,6 @@ class Mapper(InspectionAttr):
             instrumentation.unregister_class(self.class_)
 
     def _configure_pks(self):
-
         self.tables = sql_util.find_tables(self.mapped_table)
 
         self._pks_by_table = {}
@@ -1266,7 +1265,6 @@ class Mapper(InspectionAttr):
                 col.table not in self._cols_by_table))
 
     def _configure_properties(self):
-
         # Column and other ClauseElement objects which are mapped
         self.columns = self.c = util.OrderedProperties()
 
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index b4389d0..48b2b02 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -888,28 +888,21 @@ class SubqueryLoader(AbstractRelationshipLoader):
 
         # determine the immediate parent class we are joining from,
         # which needs to be aliased.
-        if len(to_join) > 1:
-            info = inspect(to_join[-1][0])
 
         if len(to_join) < 2:
             # in the case of a one level eager load, this is the
             # leftmost "left_alias".
             parent_alias = left_alias
-        elif info.mapper.isa(self.parent):
-            # In the case of multiple levels, retrieve
-            # it from subq_path[-2]. This is the same as self.parent
-            # in the vast majority of cases, and [ticket:2014]
-            # illustrates a case where sub_path[-2] is a subclass
-            # of self.parent
-            parent_alias = orm_util.AliasedClass(
-                to_join[-1][0],
-                use_mapper_path=True)
         else:
-            # if of_type() were used leading to this relationship,
-            # self.parent is more specific than subq_path[-2]
-            parent_alias = orm_util.AliasedClass(
-                self.parent,
-                use_mapper_path=True)
+            info = inspect(to_join[-1][0])
+            if info.is_aliased_class:
+                parent_alias = info.entity
+            else:
+                # alias a plain mapper as we may be
+                # joining multiple times
+                parent_alias = orm_util.AliasedClass(
+                    info.entity,
+                    use_mapper_path=True)
 
         local_cols = self.parent_property.local_columns
 
@@ -922,35 +915,46 @@ class SubqueryLoader(AbstractRelationshipLoader):
     def _apply_joins(
             self, q, to_join, left_alias, parent_alias,
             effective_entity):
-        for i, (mapper, key) in enumerate(to_join):
-
-            # we need to use query.join() as opposed to
-            # orm.join() here because of the
-            # rich behavior it brings when dealing with
-            # "with_polymorphic" mappers.  "aliased"
-            # and "from_joinpoint" take care of most of
-            # the chaining and aliasing for us.
-
-            first = i == 0
-            middle = i < len(to_join) - 1
-            second_to_last = i == len(to_join) - 2
-            last = i == len(to_join) - 1
-
-            if first:
-                attr = getattr(left_alias, key)
-                if last and effective_entity is not self.mapper:
-                    attr = attr.of_type(effective_entity)
-            else:
-                if last and effective_entity is not self.mapper:
-                    attr = getattr(parent_alias, key).\
-                        of_type(effective_entity)
+
+        ltj = len(to_join)
+        if ltj == 1:
+            to_join = [
+                getattr(left_alias, to_join[0][1]).of_type(effective_entity)
+            ]
+        elif ltj == 2:
+            to_join = [
+                getattr(left_alias, to_join[0][1]).of_type(parent_alias),
+                getattr(parent_alias, to_join[-1][1]).of_type(effective_entity)
+            ]
+        elif ltj > 2:
+            middle = [
+                (
+                    orm_util.AliasedClass(item[0])
+                    if not inspect(item[0]).is_aliased_class
+                    else item[0].entity,
+                    item[1]
+                ) for item in to_join[1:-1]
+            ]
+            inner = []
+
+            while middle:
+                item = middle.pop(0)
+                attr = getattr(item[0], item[1])
+                if middle:
+                    attr = attr.of_type(middle[0][0])
                 else:
-                    attr = getattr(mapper.entity, key)
+                    attr = attr.of_type(parent_alias)
 
-            if second_to_last:
-                q = q.join(parent_alias, attr, from_joinpoint=True)
-            else:
-                q = q.join(attr, aliased=middle, from_joinpoint=True)
+                inner.append(attr)
+
+            to_join = [
+                getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)
+            ] + inner + [
+                getattr(parent_alias, to_join[-1][1]).of_type(effective_entity)
+            ]
+
+        for attr in to_join:
+            q = q.join(attr, from_joinpoint=True)
         return q
 
     def _setup_options(self, q, subq_path, orig_query, effective_entity):
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 42fadca..9ea2554 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -692,11 +692,11 @@ def with_polymorphic(base, classes, selectable=False,
         versions of MySQL.
 
     :param flat: Boolean, will be passed through to the
-        :meth:`.FromClause.alias` call so that aliases of :class:`.Join`
-        objects don't include an enclosing SELECT.  This can lead to more
-        efficient queries in many circumstances.  A JOIN against a nested JOIN
-        will be rewritten as a JOIN against an aliased SELECT subquery on
-        backends that don't support this syntax.
+     :meth:`.FromClause.alias` call so that aliases of :class:`.Join`
+     objects don't include an enclosing SELECT.  This can lead to more
+     efficient queries in many circumstances.  A JOIN against a nested JOIN
+     will be rewritten as a JOIN against an aliased SELECT subquery on
+     backends that don't support this syntax.
 
      Setting ``flat`` to ``True`` implies the ``aliased`` flag is
      also ``True``.
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index f9c6d05..25f1311 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -598,6 +598,9 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
     def _init_collections(self):
         pass
 
+    def _reset_exported(self):
+        pass
+
     @util.memoized_property
     def _autoincrement_column(self):
         for col in self.primary_key:
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index 7b1bfcb..b27436d 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -720,7 +720,7 @@ class TypeDecorator(TypeEngine):
     return an empty tuple, in which case no values will be coerced to
     constants.
 
-    ..versionadded:: 0.8.2
+    .. versionadded:: 0.8.2
         Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
         control of ``__eq__()`` ``__ne__()`` operations.
 
diff --git a/test/dialect/mysql/test_reflection.py b/test/dialect/mysql/test_reflection.py
index b3173db..ba02f72 100644
--- a/test/dialect/mysql/test_reflection.py
+++ b/test/dialect/mysql/test_reflection.py
@@ -97,7 +97,6 @@ class TypeReflectionTest(fixtures.TestBase):
     def test_year_types(self):
         specs = [
             (mysql.YEAR(), mysql.YEAR(display_width=4)),
-            (mysql.YEAR(display_width=2), mysql.YEAR(display_width=2)),
             (mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
         ]
 
diff --git a/test/dialect/mysql/test_types.py b/test/dialect/mysql/test_types.py
index 7c279ff..a5850eb 100644
--- a/test/dialect/mysql/test_types.py
+++ b/test/dialect/mysql/test_types.py
@@ -484,6 +484,7 @@ class TypesTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL):
 
             )
 
+    @testing.requires.mysql_zero_date
     @testing.provide_metadata
     def test_timestamp_nullable(self):
         ts_table = Table(
@@ -733,7 +734,7 @@ class EnumSetTest(
             exc.StatementError, set_table.insert().execute,
             e1='c', e2='c', e3='c', e4='c')
 
-    @testing.fails_on("+oursql", "oursql raises on the truncate warning")
+    @testing.requires.mysql_non_strict
     @testing.provide_metadata
     def test_empty_set_no_empty_string(self):
         t = Table(
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index c81a758..5b84280 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -433,12 +433,7 @@ class TransactionTest(fixtures.TestBase):
         connection.close()
 
     @testing.requires.two_phase_transactions
-    @testing.crashes('mysql+oursql',
-                     'Times out in full test runs only, causing '
-                     'subsequent tests to fail')
-    @testing.crashes('mysql+zxjdbc',
-                     'Deadlocks, causing subsequent tests to fail')
-    @testing.fails_on('mysql', 'FIXME: unknown')
+    @testing.requires.two_phase_recovery
     def test_two_phase_recover(self):
 
         # MySQL recovery doesn't currently seem to work correctly
diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py
index 4d69381..cc03b24 100644
--- a/test/ext/test_baked.py
+++ b/test/ext/test_baked.py
@@ -304,12 +304,16 @@ class ResultTest(BakedTest):
     def setup_mappers(cls):
         User = cls.classes.User
         Address = cls.classes.Address
+        Order = cls.classes.Order
 
         mapper(User, cls.tables.users, properties={
             "addresses": relationship(
-                Address, order_by=cls.tables.addresses.c.id)
+                Address, order_by=cls.tables.addresses.c.id),
+            "orders": relationship(
+                Order, order_by=cls.tables.orders.c.id)
         })
         mapper(Address, cls.tables.addresses)
+        mapper(Order, cls.tables.orders)
 
     def test_cachekeys_on_constructor(self):
         User = self.classes.User
@@ -550,24 +554,29 @@ class ResultTest(BakedTest):
     def test_subquery_eagerloading(self):
         User = self.classes.User
         Address = self.classes.Address
+        Order = self.classes.Order
 
-        base_bq = self.bakery(
-            lambda s: s.query(User))
+        # Override the default bakery for one with a smaller size. This used to
+        # trigger a bug when unbaking subqueries.
+        self.bakery = baked.bakery(size=3)
+        base_bq = self.bakery(lambda s: s.query(User))
 
-        base_bq += lambda q: q.options(subqueryload(User.addresses))
+        base_bq += lambda q: q.options(subqueryload(User.addresses),
+                                       subqueryload(User.orders))
         base_bq += lambda q: q.order_by(User.id)
 
         assert_result = [
-            User(id=7, addresses=[
-                Address(id=1, email_address='jack at bean.com')]),
+            User(id=7,
+                addresses=[Address(id=1, email_address='jack at bean.com')],
+                orders=[Order(id=1), Order(id=3), Order(id=5)]),
             User(id=8, addresses=[
                 Address(id=2, email_address='ed at wood.com'),
                 Address(id=3, email_address='ed at bettyboop.com'),
                 Address(id=4, email_address='ed at lala.com'),
             ]),
-            User(id=9, addresses=[
-                Address(id=5)
-            ]),
+            User(id=9,
+                addresses=[Address(id=5)], 
+                orders=[Order(id=2), Order(id=4)]),
             User(id=10, addresses=[])
         ]
 
@@ -602,18 +611,18 @@ class ResultTest(BakedTest):
                         def go():
                             result = bq(sess).all()
                             eq_(assert_result[1:2], result)
-                        self.assert_sql_count(testing.db, go, 2)
+                        self.assert_sql_count(testing.db, go, 3)
                 else:
                     if cond1:
                         def go():
                             result = bq(sess).all()
                             eq_(assert_result[0:1], result)
-                        self.assert_sql_count(testing.db, go, 2)
+                        self.assert_sql_count(testing.db, go, 3)
                     else:
                         def go():
                             result = bq(sess).all()
                             eq_(assert_result[1:3], result)
-                        self.assert_sql_count(testing.db, go, 2)
+                        self.assert_sql_count(testing.db, go, 3)
 
                 sess.close()
 
diff --git a/test/orm/inheritance/test_abc_inheritance.py b/test/orm/inheritance/test_abc_inheritance.py
index 757f886..3fc5d52 100644
--- a/test/orm/inheritance/test_abc_inheritance.py
+++ b/test/orm/inheritance/test_abc_inheritance.py
@@ -114,6 +114,7 @@ def produce_test(parent, child, direction):
                                 relationship(child_mapper,
                                             primaryjoin=relationshipjoin,
                                             foreign_keys=foreign_keys,
+                                            order_by=child_mapper.c.id,
                                             remote_side=remote_side, uselist=True))
 
             sess = create_session()
@@ -177,4 +178,4 @@ for parent in ["a", "b", "c"]:
             exec("%s = testclass" % testclass.__name__)
             del testclass
 
-del produce_test
\ No newline at end of file
+del produce_test
diff --git a/test/orm/inheritance/test_polymorphic_rel.py b/test/orm/inheritance/test_polymorphic_rel.py
index 29fbcff..a3cb664 100644
--- a/test/orm/inheritance/test_polymorphic_rel.py
+++ b/test/orm/inheritance/test_polymorphic_rel.py
@@ -549,7 +549,8 @@ class _PolymorphicTestBase(object):
         sess = create_session()
         def go():
             eq_(sess.query(Person)
-                    .with_polymorphic(Engineer).all(),
+                    .with_polymorphic(Engineer).
+                    order_by(Person.person_id).all(),
                 self._emps_wo_relationships_fixture())
         self.assert_sql_count(testing.db, go, 3)
 
@@ -560,6 +561,7 @@ class _PolymorphicTestBase(object):
                     .with_polymorphic(
                         Engineer,
                         people.outerjoin(engineers))
+                    .order_by(Person.person_id)
                     .all(),
                 self._emps_wo_relationships_fixture())
         self.assert_sql_count(testing.db, go, 3)
diff --git a/test/orm/test_of_type.py b/test/orm/test_of_type.py
index b9ebc2d..0b6ef16 100644
--- a/test/orm/test_of_type.py
+++ b/test/orm/test_of_type.py
@@ -13,6 +13,7 @@ from .inheritance._poly_fixtures import Company, Person, Engineer, Manager, Boss
     Machine, Paperwork, _PolymorphicFixtureBase, _Polymorphic,\
     _PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
     _PolymorphicAliasedJoins
+from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
 
 
 class _PolymorphicTestBase(object):
@@ -313,6 +314,8 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
             id = Column(Integer, primary_key=True,
                                         test_needs_autoincrement=True)
             type = Column(String(10))
+            widget_id = Column(ForeignKey('widget.id'))
+            widget = relationship("Widget")
             container_id = Column(Integer, ForeignKey('data_container.id'))
             __mapper_args__ = {"polymorphic_on": type}
 
@@ -337,6 +340,13 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
             name = Column(String(10))
             jobs = relationship(Job, order_by=Job.id)
 
+        class Widget(ComparableEntity, Base):
+            __tablename__ = "widget"
+
+            id = Column(Integer, primary_key=True,
+                                        test_needs_autoincrement=True)
+            name = Column(String(10))
+
     @classmethod
     def insert_data(cls):
         s = Session(testing.db)
@@ -346,23 +356,24 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
 
     @classmethod
     def _fixture(cls):
-        ParentThing, DataContainer, SubJob = \
+        ParentThing, DataContainer, SubJob, Widget = \
             cls.classes.ParentThing,\
             cls.classes.DataContainer,\
-            cls.classes.SubJob
+            cls.classes.SubJob,\
+            cls.classes.Widget
         return [
             ParentThing(
                 container=DataContainer(name="d1",
                     jobs=[
-                        SubJob(attr="s1"),
-                        SubJob(attr="s2")
+                        SubJob(attr="s1", widget=Widget(name='w1')),
+                        SubJob(attr="s2", widget=Widget(name='w2'))
                     ])
             ),
             ParentThing(
                 container=DataContainer(name="d2",
                     jobs=[
-                        SubJob(attr="s3"),
-                        SubJob(attr="s4")
+                        SubJob(attr="s3", widget=Widget(name='w3')),
+                        SubJob(attr="s4", widget=Widget(name='w4'))
                     ])
             ),
         ]
@@ -389,7 +400,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                 q.all(),
                 self._dc_fixture()
             )
-        self.assert_sql_count(testing.db, go, 1)
+        self.assert_sql_count(testing.db, go, 5)
 
     def test_joinedload_wpoly(self):
         ParentThing, DataContainer, Job, SubJob = \
@@ -408,7 +419,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                 q.all(),
                 self._dc_fixture()
             )
-        self.assert_sql_count(testing.db, go, 1)
+        self.assert_sql_count(testing.db, go, 5)
 
     def test_joinedload_wsubclass(self):
         ParentThing, DataContainer, Job, SubJob = \
@@ -424,7 +435,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                 q.all(),
                 self._dc_fixture()
             )
-        self.assert_sql_count(testing.db, go, 1)
+        self.assert_sql_count(testing.db, go, 5)
 
     def test_lazyload(self):
         DataContainer = self.classes.DataContainer
@@ -438,7 +449,8 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
         # SELECT data container
         # SELECT job * 2 container rows
         # SELECT subjob * 4 rows
-        self.assert_sql_count(testing.db, go, 7)
+        # SELECT widget * 4 rows
+        self.assert_sql_count(testing.db, go, 11)
 
     def test_subquery_wsubclass(self):
         ParentThing, DataContainer, Job, SubJob = \
@@ -454,7 +466,7 @@ class SubclassRelationshipTest(testing.AssertsCompiledSQL, fixtures.DeclarativeM
                 q.all(),
                 self._dc_fixture()
             )
-        self.assert_sql_count(testing.db, go, 2)
... 342 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/sqlalchemy.git



More information about the Python-modules-commits mailing list