summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2010-03-30 18:15:02 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2010-03-30 18:15:02 -0400
commit00738b252c280111dafc8a034eade1507c1dddd8 (patch)
tree84250759b0e653e7b72278b649ccc00ce3d074a7
parent62d6bf4cc33171ac21cd9b4d52701d6af39cfb42 (diff)
parent4cbe117eb2feb7cff28c66d849d3a0613448fdce (diff)
downloadsqlalchemy-00738b252c280111dafc8a034eade1507c1dddd8.tar.gz
merge trunk. Re-instating topological._find_cycles for the moment
-rw-r--r--.hgtags2
-rw-r--r--CHANGES175
-rw-r--r--MANIFEST.in2
-rw-r--r--doc/build/builder/util.py2
-rw-r--r--doc/build/dbengine.rst3
-rw-r--r--doc/build/examples.rst2
-rw-r--r--doc/build/mappers.rst132
-rw-r--r--doc/build/ormtutorial.rst14
-rw-r--r--doc/build/reference/dialects/index.rst2
-rw-r--r--doc/build/reference/dialects/mssql.rst11
-rw-r--r--doc/build/reference/dialects/mysql.rst2
-rw-r--r--doc/build/reference/ext/horizontal_shard.rst14
-rw-r--r--doc/build/reference/ext/index.rst1
-rw-r--r--doc/build/reference/ext/orderinglist.rst87
-rw-r--r--doc/build/reference/orm/query.rst10
-rw-r--r--doc/build/session.rst2
-rw-r--r--doc/build/sqlexpression.rst24
-rw-r--r--doc/build/static/docs.css18
-rw-r--r--examples/adjacency_list/adjacency_list.py4
-rw-r--r--examples/association/basic_association.py4
-rw-r--r--examples/association/proxied_association.py6
-rw-r--r--examples/beaker_caching/advanced.py10
-rw-r--r--examples/beaker_caching/relation_caching.py4
-rw-r--r--examples/custom_attributes/custom_management.py6
-rw-r--r--examples/elementtree/adjacency_list.py4
-rw-r--r--examples/elementtree/optimized_al.py4
-rw-r--r--examples/inheritance/polymorph.py2
-rw-r--r--examples/sharding/attribute_shard.py2
-rw-r--r--lib/sqlalchemy/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py45
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py43
-rw-r--r--lib/sqlalchemy/dialects/access/base.py5
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py7
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py3
-rw-r--r--lib/sqlalchemy/dialects/maxdb/base.py5
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py127
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py36
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py85
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py126
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py4
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py111
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py11
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py28
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py15
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py19
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py7
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py81
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py59
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py105
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py32
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py19
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py9
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py17
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py32
-rw-r--r--lib/sqlalchemy/engine/__init__.py51
-rw-r--r--lib/sqlalchemy/engine/base.py5
-rw-r--r--lib/sqlalchemy/engine/default.py5
-rw-r--r--lib/sqlalchemy/ext/compiler.py2
-rw-r--r--lib/sqlalchemy/ext/declarative.py44
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py125
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py153
-rw-r--r--lib/sqlalchemy/orm/__init__.py221
-rw-r--r--lib/sqlalchemy/orm/interfaces.py168
-rw-r--r--lib/sqlalchemy/orm/properties.py192
-rw-r--r--lib/sqlalchemy/orm/query.py126
-rw-r--r--lib/sqlalchemy/orm/session.py27
-rw-r--r--lib/sqlalchemy/orm/shard.py112
-rw-r--r--lib/sqlalchemy/orm/strategies.py574
-rw-r--r--lib/sqlalchemy/pool.py39
-rw-r--r--lib/sqlalchemy/sql/compiler.py78
-rw-r--r--lib/sqlalchemy/sql/expression.py30
-rw-r--r--lib/sqlalchemy/sql/util.py10
-rw-r--r--lib/sqlalchemy/sql/visitors.py21
-rw-r--r--lib/sqlalchemy/test/requires.py13
-rw-r--r--lib/sqlalchemy/topological.py33
-rw-r--r--setup.cfg1
-rw-r--r--test/aaa_profiling/test_zoomark.py2
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py4
-rw-r--r--test/dialect/test_mssql.py5
-rw-r--r--test/dialect/test_mxodbc.py69
-rw-r--r--test/dialect/test_mysql.py11
-rw-r--r--test/dialect/test_oracle.py343
-rw-r--r--test/dialect/test_postgresql.py106
-rw-r--r--test/dialect/test_sqlite.py8
-rw-r--r--test/engine/test_pool.py19
-rw-r--r--test/ext/test_associationproxy.py14
-rw-r--r--test/ext/test_compiler.py57
-rw-r--r--test/ext/test_declarative.py67
-rw-r--r--test/ext/test_horizontal_shard.py (renamed from test/orm/sharding/test_shard.py)2
-rw-r--r--test/ext/test_orderinglist.py2
-rw-r--r--test/ext/test_serializer.py4
-rw-r--r--test/orm/_fixtures.py43
-rw-r--r--test/orm/inheritance/test_basic.py10
-rw-r--r--test/orm/inheritance/test_concrete.py4
-rw-r--r--test/orm/inheritance/test_magazine.py2
-rw-r--r--test/orm/inheritance/test_manytomany.py10
-rw-r--r--test/orm/inheritance/test_poly_linked_list.py4
-rw-r--r--test/orm/inheritance/test_polymorph2.py14
-rw-r--r--test/orm/inheritance/test_productspec.py20
-rw-r--r--test/orm/inheritance/test_query.py136
-rw-r--r--test/orm/inheritance/test_single.py2
-rw-r--r--test/orm/sharding/__init__.py0
-rw-r--r--test/orm/test_association.py2
-rw-r--r--test/orm/test_assorted_eager.py58
-rw-r--r--test/orm/test_cascade.py2
-rw-r--r--test/orm/test_compile.py10
-rw-r--r--test/orm/test_cycles.py8
-rw-r--r--test/orm/test_eager_relations.py365
-rw-r--r--test/orm/test_expire.py86
-rw-r--r--test/orm/test_lazy_relations.py36
-rw-r--r--test/orm/test_manytomany.py20
-rw-r--r--test/orm/test_mapper.py104
-rw-r--r--test/orm/test_merge.py2
-rw-r--r--test/orm/test_naturalpks.py4
-rw-r--r--test/orm/test_pickled.py6
-rw-r--r--test/orm/test_query.py546
-rw-r--r--test/orm/test_relationships.py4
-rw-r--r--test/orm/test_session.py14
-rw-r--r--test/orm/test_subquery_relations.py784
-rw-r--r--test/orm/test_unitofwork.py42
-rw-r--r--test/orm/test_versioning.py42
-rw-r--r--test/perf/masseagerload.py8
-rw-r--r--test/perf/objupdatespeed.py2
-rw-r--r--test/perf/ormsession.py10
-rw-r--r--test/sql/test_compiler.py236
-rw-r--r--test/sql/test_generative.py25
-rw-r--r--test/sql/test_types.py53
-rw-r--r--test/zblog/mappers.py18
130 files changed, 5221 insertions, 1844 deletions
diff --git a/.hgtags b/.hgtags
index 10bb65555..a7447f3e4 100644
--- a/.hgtags
+++ b/.hgtags
@@ -68,3 +68,5 @@ f7dc5c00726e372077e7d1b27de14a9611621ecf rel_0_3_2
f8e1d5a092499f3c1b15c1e567699b804399da5c rel_0_4_3
fa0ca1ddb96d099f91ad6d4b451a86a3622ed97c rel_0_4beta1
feb03a8ae2232756c4af5f70c2964b99ee815d4f rel_0_4_7
+610e98d5c6dac3766b621a48d130b415cc651cd5 rel_0_6beta2
+6e862c4332dd6d751c87539727c4f4ba337e92db rel_0_6beta3
diff --git a/CHANGES b/CHANGES
index abf9e775f..cca3298f0 100644
--- a/CHANGES
+++ b/CHANGES
@@ -3,7 +3,157 @@
=======
CHANGES
=======
+
+0.6.0
+=====
+
+- orm
+ - A collection lazy load will switch off default
+ eagerloading on the reverse many-to-one side, since
+ that loading is by definition unnecessary. [ticket:1495]
+
+ - Session.refresh() now does an equivalent expire()
+ on the given instance first, so that the "refresh-expire"
+ cascade is propagated. Previously, refresh() was
+ not affected in any way by the presence of "refresh-expire"
+ cascade. This is a change in behavior versus that
+ of 0.6beta2, where the "lockmode" flag passed to refresh()
+ would cause a version check to occur. Since the instance
+ is first expired, refresh() always upgrades the object
+ to the most recent version.
+
+ - The 'refresh-expire' cascade, when reaching a pending object,
+ will expunge the object if the cascade also includes
+ "delete-orphan", or will simply detach it otherwise.
+ [ticket:1754]
+
+- ext
+ - the compiler extension now allows @compiles decorators
+ on base classes that extend to child classes, @compiles
+ decorators on child classes that aren't broken by a
+ @compiles decorator on the base class.
+
+0.6beta3
+========
+
+- orm
+ - Major feature: Added new "subquery" loading capability to
+ relationship(). This is an eager loading option which
+ generates a second SELECT for each collection represented
+ in a query, across all parents at once. The query
+ re-issues the original end-user query wrapped in a subquery,
+ applies joins out to the target collection, and loads
+ all those collections fully in one result, similar to
+ "joined" eager loading but using all inner joins and not
+ re-fetching full parent rows repeatedly (as most DBAPIs seem
+ to do, even if columns are skipped). Subquery loading is
+ available at mapper config level using "lazy='subquery'" and
+ at the query options level using "subqueryload(props..)",
+ "subqueryload_all(props...)". [ticket:1675]
+
+ - To accomodate the fact that there are now two kinds of eager
+ loading available, the new names for eagerload() and
+ eagerload_all() are joinedload() and joinedload_all(). The
+ old names will remain as synonyms for the foreseeable future.
+
+ - The "lazy" flag on the relationship() function now accepts
+ a string argument for all kinds of loading: "select", "joined",
+ "subquery", "noload" and "dynamic", where the default is now
+ "select". The old values of True/
+ False/None still retain their usual meanings and will remain
+ as synonyms for the foreseeable future.
+
+ - Added with_hint() method to Query() construct. This calls
+ directly down to select().with_hint() and also accepts
+ entities as well as tables and aliases. See with_hint() in the
+ SQL section below. [ticket:921]
+
+ - Fixed bug in Query whereby calling q.join(prop).from_self(...).
+ join(prop) would fail to render the second join outside the
+ subquery, when joining on the same criterion as was on the
+ inside.
+
+ - Fixed bug in Query whereby the usage of aliased() constructs
+ would fail if the underlying table (but not the actual alias)
+ were referenced inside the subquery generated by
+ q.from_self() or q.select_from().
+
+ - Fixed bug which affected all eagerload() and similar options
+ such that "remote" eager loads, i.e. eagerloads off of a lazy
+ load such as query(A).options(eagerload(A.b, B.c))
+ wouldn't eagerload anything, but using eagerload("b.c") would
+ work fine.
+
+ - Query gains an add_columns(*columns) method which is a multi-
+ version of add_column(col). add_column(col) is future
+ deprecated.
+
+ - Query.join() will detect if the end result will be
+ "FROM A JOIN A", and will raise an error if so.
+
+ - Query.join(Cls.propname, from_joinpoint=True) will check more
+ carefully that "Cls" is compatible with the current joinpoint,
+ and act the same way as Query.join("propname", from_joinpoint=True)
+ in that regard.
+
+- sql
+ - Added with_hint() method to select() construct. Specify
+ a table/alias, hint text, and optional dialect name, and
+ "hints" will be rendered in the appropriate place in the
+ statement. Works for Oracle, Sybase, MySQL. [ticket:921]
+
+ - Fixed bug introduced in 0.6beta2 where column labels would
+ render inside of column expressions already assigned a label.
+ [ticket:1747]
+
+- postgresql
+ - The psycopg2 dialect will log NOTICE messages via the
+ "sqlalchemy.dialects.postgresql" logger name.
+ [ticket:877]
+
+ - the TIME and TIMESTAMP types are now availble from the
+ postgresql dialect directly, which add the PG-specific
+ argument 'precision' to both. 'precision' and
+ 'timezone' are correctly reflected for both TIME and
+ TIMEZONE types. [ticket:997]
+
+- mysql
+ - No longer guessing that TINYINT(1) should be BOOLEAN
+ when reflecting - TINYINT(1) is returned. Use Boolean/
+ BOOLEAN in table definition to get boolean conversion
+ behavior. [ticket:1752]
+
+- oracle
+ - The Oracle dialect will issue VARCHAR type definitions
+ using character counts, i.e. VARCHAR2(50 CHAR), so that
+ the column is sized in terms of characters and not bytes.
+ Column reflection of character types will also use
+ ALL_TAB_COLUMNS.CHAR_LENGTH instead of
+ ALL_TAB_COLUMNS.DATA_LENGTH. Both of these behaviors take
+ effect when the server version is 9 or higher - for
+ version 8, the old behaviors are used. [ticket:1744]
+
+- declarative
+ - Using a mixin won't break if the mixin implements an
+ unpredictable __getattribute__(), i.e. Zope interfaces.
+ [ticket:1746]
+
+ - Using @classdecorator and similar on mixins to define
+ __tablename__, __table_args__, etc. now works if
+ the method references attributes on the ultimate
+ subclass. [ticket:1749]
+
+ - relationships and columns with foreign keys aren't
+ allowed on declarative mixins, sorry. [ticket:1751]
+
+- ext
+ - The sqlalchemy.orm.shard module now becomes an extension,
+ sqlalchemy.ext.horizontal_shard. The old import
+ works with a deprecation warning.
+
0.6beta2
+========
+
- py3k
- Improved the installation/test setup regarding Python 3,
now that Distribute runs on Py3k. distribute_setup.py
@@ -25,7 +175,11 @@ CHANGES
pass through the string value to Query the same as
in with_lockmode(), will also do version check for a
version_id_col-enabled mapping.
-
+
+ - Fixed bug whereby calling query(A).join(A.bs).add_entity(B)
+ in a joined inheritance scenario would double-add B as a
+ target and produce an invalid query. [ticket:1188]
+
- Fixed bug in session.rollback() which involved not removing
formerly "pending" objects from the session before
re-integrating "deleted" objects, typically occured with
@@ -308,7 +462,12 @@ CHANGES
is assembled into a first_connect/connect pool listener by the
connection strategy if non-None. Provides a simpler interface
for dialects.
-
+
+ - StaticPool now initializes, disposes and recreates without
+ opening a new connection - the connection is only opened when
+ first requested. dispose() also works on AssertionPool now.
+ [ticket:1728]
+
- metadata
- Added the ability to strip schema information when using
"tometadata" by passing "schema=None" as an argument. If schema
@@ -359,10 +518,12 @@ CHANGES
- Added reflection/create table support for a wide range
of MySQL keywords. [ticket:1634]
+
+ - Fixed import error which could occur reflecting tables on
+ a Windows host [ticket:1580]
- mssql
- - Re-established initial support for pymssql (not functional
- yet, though)
+ - Re-established support for the pymssql dialect.
- Various fixes for implicit returning, reflection,
etc. - the MS-SQL dialects aren't quite complete
@@ -396,6 +557,12 @@ CHANGES
which is more or less equivalent on that platform.
[ticket:1712]
+ - Added support for rendering and reflecting
+ TIMESTAMP WITH TIME ZONE, i.e. TIMESTAMP(timezone=True).
+ [ticket:651]
+
+ - Oracle INTERVAL type can now be reflected.
+
- sqlite
- Added "native_datetime=True" flag to create_engine().
This will cause the DATE and TIMESTAMP types to skip
diff --git a/MANIFEST.in b/MANIFEST.in
index b582c4c66..d520be179 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,7 +1,7 @@
# any kind of "*" pulls in __init__.pyc files,
# so all extensions are explicit.
-recursive-include doc *.html *.css *.txt *.js *.jpg *.py Makefile *.rst *.mako *.sty
+recursive-include doc *.html *.css *.txt *.js *.jpg *.png *.py Makefile *.rst *.mako *.sty
recursive-include examples *.py *.xml
recursive-include test *.py *.dat
diff --git a/doc/build/builder/util.py b/doc/build/builder/util.py
index 3694d87e4..a7c4d6b5a 100644
--- a/doc/build/builder/util.py
+++ b/doc/build/builder/util.py
@@ -4,5 +4,5 @@ def striptags(text):
return re.compile(r'<[^>]*>').sub('', text)
def strip_toplevel_anchors(text):
- return re.compile(r'\.html#.*-toplevel').sub('.html', text)
+ return re.compile(r'\.html#\w+-toplevel').sub('.html', text)
diff --git a/doc/build/dbengine.rst b/doc/build/dbengine.rst
index 5fdd61434..246dc9f7f 100644
--- a/doc/build/dbengine.rst
+++ b/doc/build/dbengine.rst
@@ -88,7 +88,7 @@ adodbapi_ ``mssql+adodbapi`` development development
`jTDS JDBC Driver`_ ``mssql+zxjdbc`` no no development yes yes
mxodbc_ ``mssql+mxodbc`` yes development no yes with FreeTDS_ yes
pyodbc_ ``mssql+pyodbc``\* yes development no yes with FreeTDS_ yes
-pymssql_ ``mssql+pymssql`` development development no yes yes
+pymssql_ ``mssql+pymssql`` yes development no yes yes
**MySQL**
`MySQL Connector/J`_ ``mysql+zxjdbc`` no no yes yes yes
`MySQL Connector/Python`_ ``mysql+mysqlconnector`` yes partial no yes yes
@@ -487,6 +487,7 @@ Python's standard `logging <http://www.python.org/doc/lib/module-logging.html>`_
This section assumes familiarity with the above linked logging module. All logging performed by SQLAlchemy exists underneath the ``sqlalchemy`` namespace, as used by ``logging.getLogger('sqlalchemy')``. When logging has been configured (i.e. such as via ``logging.basicConfig()``), the general namespace of SA loggers that can be turned on is as follows:
* ``sqlalchemy.engine`` - controls SQL echoing. set to ``logging.INFO`` for SQL query output, ``logging.DEBUG`` for query + result set output.
+* ``sqlalchemy.dialects`` - controls custom logging for SQL dialects. See the documentation of individual dialects for details.
* ``sqlalchemy.pool`` - controls connection pool logging. set to ``logging.INFO`` or lower to log connection pool checkouts/checkins.
* ``sqlalchemy.orm`` - controls logging of various ORM functions. set to ``logging.INFO`` for configurational logging as well as unit of work dumps, ``logging.DEBUG`` for extensive logging during query and flush() operations. Subcategories of ``sqlalchemy.orm`` include:
* ``sqlalchemy.orm.attributes`` - logs certain instrumented attribute operations, such as triggered callables
diff --git a/doc/build/examples.rst b/doc/build/examples.rst
index ae84b6806..b62b808fa 100644
--- a/doc/build/examples.rst
+++ b/doc/build/examples.rst
@@ -59,6 +59,8 @@ Location: /examples/dynamic_dict/
.. automodule:: dynamic_dict
+.. _examples_sharding:
+
Horizontal Sharding
-------------------
diff --git a/doc/build/mappers.rst b/doc/build/mappers.rst
index c6ae0c85d..7e320c26a 100644
--- a/doc/build/mappers.rst
+++ b/doc/build/mappers.rst
@@ -323,7 +323,7 @@ The "default" ordering for a collection, which applies to list-based collections
'addresses': relationship(Address, order_by=addresses_table.c.address_id)
})
-Note that when using eager loaders with relationships, the tables used by the eager load's join are anonymously aliased. You can only order by these columns if you specify it at the :func:`~sqlalchemy.orm.relationship` level. To control ordering at the query level based on a related table, you ``join()`` to that relationship, then order by it::
+Note that when using joined eager loaders with relationships, the tables used by the eager load's join are anonymously aliased. You can only order by these columns if you specify it at the :func:`~sqlalchemy.orm.relationship` level. To control ordering at the query level based on a related table, you ``join()`` to that relationship, then order by it::
session.query(User).join('addresses').order_by(Address.street)
@@ -1198,12 +1198,12 @@ To add criterion to multiple points along a longer join, use ``from_joinpoint=Tr
Configuring Eager Loading
~~~~~~~~~~~~~~~~~~~~~~~~~~
-Eager loading of relationships occurs using joins or outerjoins from parent to child table during a normal query operation, such that the parent and its child collection can be populated from a single SQL statement. SQLAlchemy's eager loading uses aliased tables in all cases when joining to related items, so it is compatible with self-referential joining. However, to use eager loading with a self-referential relationship, SQLAlchemy needs to be told how many levels deep it should join; otherwise the eager load will not take place. This depth setting is configured via ``join_depth``:
+Eager loading of relationships occurs using joins or outerjoins from parent to child table during a normal query operation, such that the parent and its child collection can be populated from a single SQL statement, or a second statement for all collections at once. SQLAlchemy's joined and subquery eager loading uses aliased tables in all cases when joining to related items, so it is compatible with self-referential joining. However, to use eager loading with a self-referential relationship, SQLAlchemy needs to be told how many levels deep it should join; otherwise the eager load will not take place. This depth setting is configured via ``join_depth``:
.. sourcecode:: python+sql
mapper(Node, nodes, properties={
- 'children': relationship(Node, lazy=False, join_depth=2)
+ 'children': relationship(Node, lazy='joined', join_depth=2)
})
{sql}session.query(Node).all()
@@ -1531,14 +1531,18 @@ The ORM uses this approach for built-ins, quietly substituting a trivial subclas
The collections package provides additional decorators and support for authoring custom types. See the :mod:`sqlalchemy.orm.collections` package for more information and discussion of advanced usage and Python 2.3-compatible decoration options.
+.. _mapper_loader_strategies:
+
Configuring Loader Strategies: Lazy Loading, Eager Loading
-----------------------------------------------------------
+.. note:: SQLAlchemy version 0.6beta3 introduces the :func:`~sqlalchemy.orm.joinedload`, :func:`~sqlalchemy.orm.joinedload_all`, :func:`~sqlalchemy.orm.subqueryload` and :func:`~sqlalchemy.orm.subqueryload_all` functions described in this section. In previous versions, including 0.5 and 0.4, use :func:`~sqlalchemy.orm.eagerload` and :func:`~sqlalchemy.orm.eagerload_all`. Additionally, the ``lazy`` keyword argument on :func:`~sqlalchemy.orm.relationship` accepts the values ``True``, ``False`` and ``None`` in previous versions, whereas in the latest 0.6 it also accepts the arguments ``select``, ``joined``, ``noload``, and ``subquery``.
+
In the :ref:`ormtutorial_toplevel`, we introduced the concept of **Eager Loading**. We used an ``option`` in conjunction with the :class:`~sqlalchemy.orm.query.Query` object in order to indicate that a relationship should be loaded at the same time as the parent, within a single SQL query:
.. sourcecode:: python+sql
- {sql}>>> jack = session.query(User).options(eagerload('addresses')).filter_by(name='jack').all() #doctest: +NORMALIZE_WHITESPACE
+ {sql}>>> jack = session.query(User).options(joinedload('addresses')).filter_by(name='jack').all() #doctest: +NORMALIZE_WHITESPACE
SELECT addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address,
addresses_1.user_id AS addresses_1_user_id, users.id AS users_id, users.name AS users_name,
users.fullname AS users_fullname, users.password AS users_password
@@ -1551,65 +1555,149 @@ By default, all inter-object relationships are **lazy loading**. The scalar or
.. sourcecode:: python+sql
{sql}>>> jack.addresses
- SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address, addresses.user_id AS addresses_user_id
+ SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address,
+ addresses.user_id AS addresses_user_id
FROM addresses
WHERE ? = addresses.user_id
[5]
{stop}[<Address(u'jack@google.com')>, <Address(u'j25@yahoo.com')>]
-The default **loader strategy** for any :func:`~sqlalchemy.orm.relationship` is configured by the ``lazy`` keyword argument, which defaults to ``True``. Below we set it as ``False`` so that the ``children`` relationship is eager loading:
+A second option for eager loading exists, called "subquery" loading. This kind of eager loading emits an additional SQL statement for each collection requested, aggregated across all parent objects:
+
+.. sourcecode:: python+sql
+
+ {sql}>>>jack = session.query(User).options(subqueryload('addresses')).filter_by(name='jack').all()
+ SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname,
+ users.password AS users_password
+ FROM users
+ WHERE users.name = ?
+ ('jack',)
+ SELECT addresses.id AS addresses_id, addresses.email_address AS addresses_email_address,
+ addresses.user_id AS addresses_user_id, anon_1.users_id AS anon_1_users_id
+ FROM (SELECT users.id AS users_id
+ FROM users
+ WHERE users.name = ?) AS anon_1 JOIN addresses ON anon_1.users_id = addresses.user_id
+ ORDER BY anon_1.users_id, addresses.id
+ ('jack',)
+
+The default **loader strategy** for any :func:`~sqlalchemy.orm.relationship` is configured by the ``lazy`` keyword argument, which defaults to ``select``. Below we set it as ``joined`` so that the ``children`` relationship is eager loading, using a join:
+
+.. sourcecode:: python+sql
+
+ # load the 'children' collection using LEFT OUTER JOIN
+ mapper(Parent, parent_table, properties={
+ 'children': relationship(Child, lazy='joined')
+ })
+
+We can also set it to eagerly load using a second query for all collections, using ``subquery``:
.. sourcecode:: python+sql
- # eager load 'children' attribute
+ # load the 'children' attribute using a join to a subquery
mapper(Parent, parent_table, properties={
- 'children': relationship(Child, lazy=False)
+ 'children': relationship(Child, lazy='subquery')
})
-The loader strategy can be changed from lazy to eager as well as eager to lazy using the :func:`~sqlalchemy.orm.eagerload` and :func:`~sqlalchemy.orm.lazyload` query options:
+When querying, all three choices of loader strategy are available on a per-query basis, using the :func:`~sqlalchemy.orm.joinedload`, :func:`~sqlalchemy.orm.subqueryload` and :func:`~sqlalchemy.orm.lazyload` query options:
.. sourcecode:: python+sql
# set children to load lazily
session.query(Parent).options(lazyload('children')).all()
- # set children to load eagerly
- session.query(Parent).options(eagerload('children')).all()
+ # set children to load eagerly with a join
+ session.query(Parent).options(joinedload('children')).all()
+
+ # set children to load eagerly with a second statement
+ session.query(Parent).options(subqueryload('children')).all()
To reference a relationship that is deeper than one level, separate the names by periods:
.. sourcecode:: python+sql
- session.query(Parent).options(eagerload('foo.bar.bat')).all()
+ session.query(Parent).options(joinedload('foo.bar.bat')).all()
-When using dot-separated names with :func:`~sqlalchemy.orm.eagerload`, option applies **only** to the actual attribute named, and **not** its ancestors. For example, suppose a mapping from ``A`` to ``B`` to ``C``, where the relationships, named ``atob`` and ``btoc``, are both lazy-loading. A statement like the following:
+When using dot-separated names with :func:`~sqlalchemy.orm.joinedload` or :func:`~sqlalchemy.orm.subqueryload`, option applies **only** to the actual attribute named, and **not** its ancestors. For example, suppose a mapping from ``A`` to ``B`` to ``C``, where the relationships, named ``atob`` and ``btoc``, are both lazy-loading. A statement like the following:
.. sourcecode:: python+sql
- session.query(A).options(eagerload('atob.btoc')).all()
+ session.query(A).options(joinedload('atob.btoc')).all()
will load only ``A`` objects to start. When the ``atob`` attribute on each ``A`` is accessed, the returned ``B`` objects will *eagerly* load their ``C`` objects.
-Therefore, to modify the eager load to load both ``atob`` as well as ``btoc``, place eagerloads for both:
+Therefore, to modify the eager load to load both ``atob`` as well as ``btoc``, place joinedloads for both:
.. sourcecode:: python+sql
- session.query(A).options(eagerload('atob'), eagerload('atob.btoc')).all()
+ session.query(A).options(joinedload('atob'), joinedload('atob.btoc')).all()
-or more simply just use :func:`~sqlalchemy.orm.eagerload_all`:
+or more simply just use :func:`~sqlalchemy.orm.joinedload_all` or :func:`~sqlalchemy.orm.subqueryload_all`:
.. sourcecode:: python+sql
- session.query(A).options(eagerload_all('atob.btoc')).all()
+ session.query(A).options(joinedload_all('atob.btoc')).all()
There are two other loader strategies available, **dynamic loading** and **no loading**; these are described in :ref:`largecollections`.
+What Kind of Loading to Use ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Which type of loading to use typically comes down to optimizing the tradeoff between number of SQL executions, complexity of SQL emitted, and amount of data fetched. Lets take two examples, a :func:`~sqlalchemy.orm.relationship` which references a collection, and a :func:`~sqlalchemy.orm.relationship` that references a scalar many-to-one reference.
+
+* One to Many Collection
+
+ * When using the default lazy loading, if you load 100 objects, and then access a collection on each of
+ them, a total of 101 SQL statements will be emitted, although each statement will typically be a
+ simple SELECT without any joins.
+
+ * When using joined loading, the load of 100 objects and their collections will emit only one SQL
+ statement. However, the
+ total number of rows fetched will be equal to the sum of the size of all the collections, plus one
+ extra row for each parent object that has an empty collection. Each row will also contain the full
+ set of columns represented by the parents, repeated for each collection item - SQLAlchemy does not
+ re-fetch these columns other than those of the primary key, however most DBAPIs (with some
+ exceptions) will transmit the full data of each parent over the wire to the client connection in
+ any case. Therefore joined eager loading only makes sense when the size of the collections are
+ relatively small. The LEFT OUTER JOIN can also be performance intensive compared to an INNER join.
+
+ * When using subquery loading, the load of 100 objects will emit two SQL statements. The second
+ statement will fetch a total number of rows equal to the sum of the size of all collections. An
+ INNER JOIN is used, and a minimum of parent columns are requested, only the primary keys. So a
+ subquery load makes sense when the collections are larger.
+
+ * When multiple levels of depth are used with joined or subquery loading, loading collections-within-
+ collections will multiply the total number of rows fetched in a cartesian fashion. Both forms
+ of eager loading always join from the original parent class.
+
+* Many to One Reference
+
+ * When using the default lazy loading, a load of 100 objects will like in the case of the collection
+ emit as many as 101 SQL statements. However - there is a significant exception to this, in that
+ if the many-to-one reference is a simple foreign key reference to the target's primary key, each
+ reference will be checked first in the current identity map using ``query.get()``. So here,
+ if the collection of objects references a relatively small set of target objects, or the full set
+ of possible target objects have already been loaded into the session and are strongly referenced,
+ using the default of `lazy='select'` is by far the most efficient way to go.
+
+ * When using joined loading, the load of 100 objects will emit only one SQL statement. The join
+ will be a LEFT OUTER JOIN, and the total number of rows will be equal to 100 in all cases.
+ If you know that each parent definitely has a child (i.e. the foreign
+ key reference is NOT NULL), the joined load can be configured with ``innerjoin=True``, which is
+ usually specified within the :func:`~sqlalchemy.orm.relationship`. For a load of objects where
+ there are many possible target references which may have not been loaded already, joined loading
+ with an INNER JOIN is extremely efficient.
+
+ * Subquery loading will issue a second load for all the child objects, so for a load of 100 objects
+ there would be two SQL statements emitted. There's probably not much advantage here over
+ joined loading, however, except perhaps that subquery loading can use an INNER JOIN in all cases
+ whereas joined loading requires that the foreign key is NOT NULL.
+
Routing Explicit Joins/Statements into Eagerly Loaded Collections
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The behavior of :func:`~sqlalchemy.orm.eagerload()` is such that joins are created automatically, the results of which are routed into collections and scalar references on loaded objects. It is often the case that a query already includes the necessary joins which represent a particular collection or scalar reference, and the joins added by the eagerload feature are redundant - yet you'd still like the collections/references to be populated.
+The behavior of :func:`~sqlalchemy.orm.joinedload()` is such that joins are created automatically, the results of which are routed into collections and scalar references on loaded objects. It is often the case that a query already includes the necessary joins which represent a particular collection or scalar reference, and the joins added by the joinedload feature are redundant - yet you'd still like the collections/references to be populated.
-For this SQLAlchemy supplies the :func:`~sqlalchemy.orm.contains_eager()` option. This option is used in the same manner as the :func:`~sqlalchemy.orm.eagerload()` option except it is assumed that the :class:`~sqlalchemy.orm.query.Query` will specify the appropriate joins explicitly. Below it's used with a ``from_statement`` load::
+For this SQLAlchemy supplies the :func:`~sqlalchemy.orm.contains_eager()` option. This option is used in the same manner as the :func:`~sqlalchemy.orm.joinedload()` option except it is assumed that the :class:`~sqlalchemy.orm.query.Query` will specify the appropriate joins explicitly. Below it's used with a ``from_statement`` load::
# mapping is the users->addresses mapping
mapper(User, users_table, properties={
@@ -1735,12 +1823,12 @@ Note that eager/lazy loading options cannot be used in conjunction dynamic relat
Setting Noload
~~~~~~~~~~~~~~~
-The opposite of the dynamic relationship is simply "noload", specified using ``lazy=None``:
+The opposite of the dynamic relationship is simply "noload", specified using ``lazy='noload'``:
.. sourcecode:: python+sql
mapper(MyClass, table, properties={
- 'children': relationship(MyOtherClass, lazy=None)
+ 'children': relationship(MyOtherClass, lazy='noload')
})
Above, the ``children`` collection is fully writeable, and changes to it will be persisted to the database as well as locally available for reading at the time they are added. However when instances of ``MyClass`` are freshly loaded from the database, the ``children`` collection stays empty.
diff --git a/doc/build/ormtutorial.rst b/doc/build/ormtutorial.rst
index 616adae00..2ae760e99 100644
--- a/doc/build/ormtutorial.rst
+++ b/doc/build/ormtutorial.rst
@@ -809,14 +809,14 @@ Let's look at the ``addresses`` collection. Watch the SQL:
When we accessed the ``addresses`` collection, SQL was suddenly issued. This is an example of a **lazy loading relationship**. The ``addresses`` collection is now loaded and behaves just like an ordinary list.
-If you want to reduce the number of queries (dramatically, in many cases), we can apply an **eager load** to the query operation, using the :func:`~sqlalchemy.orm.eagerload` function. This function is a **query option** that gives additional instructions to the query on how we would like it to load, in this case we'd like to indicate that we'd like ``addresses`` to load "eagerly". SQLAlchemy then constructs an outer join between the ``users`` and ``addresses`` tables, and loads them at once, populating the ``addresses`` collection on each ``User`` object if it's not already populated:
+If you want to reduce the number of queries (dramatically, in many cases), we can apply an **eager load** to the query operation, using the :func:`~sqlalchemy.orm.joinedload` function. This function is a **query option** that gives additional instructions to the query on how we would like it to load, in this case we'd like to indicate that we'd like ``addresses`` to load "eagerly". SQLAlchemy then constructs an outer join between the ``users`` and ``addresses`` tables, and loads them at once, populating the ``addresses`` collection on each ``User`` object if it's not already populated:
.. sourcecode:: python+sql
- >>> from sqlalchemy.orm import eagerload
+ >>> from sqlalchemy.orm import joinedload
{sql}>>> jack = session.query(User).\
- ... options(eagerload('addresses')).\
+ ... options(joinedload('addresses')).\
... filter_by(name='jack').one() #doctest: +NORMALIZE_WHITESPACE
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname,
users.password AS users_password, addresses_1.id AS addresses_1_id, addresses_1.email_address
@@ -831,12 +831,12 @@ If you want to reduce the number of queries (dramatically, in many cases), we ca
>>> jack.addresses
[<Address('jack@google.com')>, <Address('j25@yahoo.com')>]
-See :func:`~sqlalchemy.orm.eagerload` for further detail. We'll also see another way to "eagerly" load in the next section.
+See :ref:`mapper_loader_strategies` for information on :func:`~sqlalchemy.orm.joinedload` and its new brother, :func:`~sqlalchemy.orm.subqueryload`. We'll also see another way to "eagerly" load in the next section.
Querying with Joins
====================
-While :func:`~sqlalchemy.orm.eagerload` created a JOIN specifically to populate a collection, we can also work explicitly with joins in many ways. For example, to construct a simple inner join between ``User`` and ``Address``, we can just :meth:`~sqlalchemy.orm.query.Query.filter()` their related columns together. Below we load the ``User`` and ``Address`` entities at once using this method:
+While :func:`~sqlalchemy.orm.joinedload` created a JOIN specifically to populate a collection, we can also work explicitly with joins in many ways. For example, to construct a simple inner join between ``User`` and ``Address``, we can just :meth:`~sqlalchemy.orm.query.Query.filter()` their related columns together. Below we load the ``User`` and ``Address`` entities at once using this method:
.. sourcecode:: python+sql
@@ -898,10 +898,10 @@ the :meth:`~sqlalchemy.orm.query.Query.select_from` method to set an explicit FR
Using join() to Eagerly Load Collections/Attributes
-------------------------------------------------------
-The "eager loading" capabilities of the :func:`~sqlalchemy.orm.eagerload` function and the join-construction capabilities of :meth:`~sqlalchemy.orm.query.Query.join()` or an equivalent can be combined together using the :func:`~sqlalchemy.orm.contains_eager` option. This is typically used
+The "eager loading" capabilities of the :func:`~sqlalchemy.orm.joinedload` function and the join-construction capabilities of :meth:`~sqlalchemy.orm.query.Query.join()` or an equivalent can be combined together using the :func:`~sqlalchemy.orm.contains_eager` option. This is typically used
for a query that is already joining to some related entity (more often than not via many-to-one), and you'd like the related entity to also be loaded onto the resulting objects
in one step without the need for additional queries and without the "automatic" join embedded
-by the :func:`~sqlalchemy.orm.eagerload` function:
+by the :func:`~sqlalchemy.orm.joinedload` function:
.. sourcecode:: python+sql
diff --git a/doc/build/reference/dialects/index.rst b/doc/build/reference/dialects/index.rst
index 30781199e..a1808dff9 100644
--- a/doc/build/reference/dialects/index.rst
+++ b/doc/build/reference/dialects/index.rst
@@ -18,6 +18,7 @@ current versions of SQLAlchemy.
oracle
postgresql
sqlite
+ sybase
Unsupported Databases
---------------------
@@ -31,5 +32,4 @@ ported to current versions of SQLAlchemy.
access
informix
maxdb
- sybase
diff --git a/doc/build/reference/dialects/mssql.rst b/doc/build/reference/dialects/mssql.rst
index 029919f36..ebb359867 100644
--- a/doc/build/reference/dialects/mssql.rst
+++ b/doc/build/reference/dialects/mssql.rst
@@ -7,9 +7,9 @@ PyODBC
------
.. automodule:: sqlalchemy.dialects.mssql.pyodbc
-AdoDBAPI
---------
-.. automodule:: sqlalchemy.dialects.mssql.adodbapi
+mxODBC
+------
+.. automodule:: sqlalchemy.dialects.mssql.mxodbc
pymssql
-------
@@ -19,3 +19,8 @@ zxjdbc Notes
--------------
.. automodule:: sqlalchemy.dialects.mssql.zxjdbc
+
+AdoDBAPI
+--------
+.. automodule:: sqlalchemy.dialects.mssql.adodbapi
+
diff --git a/doc/build/reference/dialects/mysql.rst b/doc/build/reference/dialects/mysql.rst
index f05d751c5..0a2af11b5 100644
--- a/doc/build/reference/dialects/mysql.rst
+++ b/doc/build/reference/dialects/mysql.rst
@@ -6,6 +6,8 @@ MySQL
MySQL Column Types
------------------
+.. automodule:: sqlalchemy.dialects.mysql
+
.. autoclass:: NUMERIC
:members: __init__
:show-inheritance:
diff --git a/doc/build/reference/ext/horizontal_shard.rst b/doc/build/reference/ext/horizontal_shard.rst
new file mode 100644
index 000000000..149cf2020
--- /dev/null
+++ b/doc/build/reference/ext/horizontal_shard.rst
@@ -0,0 +1,14 @@
+Horizontal Shard
+=================
+
+.. automodule:: sqlalchemy.ext.horizontal_shard
+
+API Documentation
+-----------------
+
+.. autoclass:: ShardedSession
+ :members:
+
+.. autoclass:: ShardedQuery
+ :members:
+
diff --git a/doc/build/reference/ext/index.rst b/doc/build/reference/ext/index.rst
index b15253ec5..19ea3dedc 100644
--- a/doc/build/reference/ext/index.rst
+++ b/doc/build/reference/ext/index.rst
@@ -17,4 +17,5 @@ core behavior.
serializer
sqlsoup
compiler
+ horizontal_shard
diff --git a/doc/build/reference/ext/orderinglist.rst b/doc/build/reference/ext/orderinglist.rst
index c5308689b..a3581df59 100644
--- a/doc/build/reference/ext/orderinglist.rst
+++ b/doc/build/reference/ext/orderinglist.rst
@@ -1,88 +1,9 @@
orderinglist
============
-.. module: sqlalchemy.ext.orderinglist
-
-:author: Jason Kirtland
-
-``orderinglist`` is a helper for mutable ordered relationships. It will intercept
-list operations performed on a relationship collection and automatically
-synchronize changes in list position with an attribute on the related objects.
-(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.)
-
-Example: Two tables that store slides in a presentation. Each slide
-has a number of bullet points, displayed in order by the 'position'
-column on the bullets table. These bullets can be inserted and re-ordered
-by your end users, and you need to update the 'position' column of all
-affected rows when changes are made.
-
-.. sourcecode:: python+sql
-
- slides_table = Table('Slides', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String))
-
- bullets_table = Table('Bullets', metadata,
- Column('id', Integer, primary_key=True),
- Column('slide_id', Integer, ForeignKey('Slides.id')),
- Column('position', Integer),
- Column('text', String))
-
- class Slide(object):
- pass
- class Bullet(object):
- pass
-
- mapper(Slide, slides_table, properties={
- 'bullets': relationship(Bullet, order_by=[bullets_table.c.position])
- })
- mapper(Bullet, bullets_table)
-
-The standard relationship mapping will produce a list-like attribute on each Slide
-containing all related Bullets, but coping with changes in ordering is totally
-your responsibility. If you insert a Bullet into that list, there is no
-magic- it won't have a position attribute unless you assign it it one, and
-you'll need to manually renumber all the subsequent Bullets in the list to
-accommodate the insert.
-
-An ``orderinglist`` can automate this and manage the 'position' attribute on all
-related bullets for you.
-
-.. sourcecode:: python+sql
-
- mapper(Slide, slides_table, properties={
- 'bullets': relationship(Bullet,
- collection_class=ordering_list('position'),
- order_by=[bullets_table.c.position])
- })
- mapper(Bullet, bullets_table)
-
- s = Slide()
- s.bullets.append(Bullet())
- s.bullets.append(Bullet())
- s.bullets[1].position
- >>> 1
- s.bullets.insert(1, Bullet())
- s.bullets[2].position
- >>> 2
-
-Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
-(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
-
-``ordering_list`` takes the name of the related object's ordering attribute as
-an argument. By default, the zero-based integer index of the object's
-position in the ``ordering_list`` is synchronized with the ordering attribute:
-index 0 will get position 0, index 1 position 1, etc. To start numbering at 1
-or some other integer, provide ``count_from=1``.
+.. automodule:: sqlalchemy.ext.orderinglist
-Ordering values are not limited to incrementing integers. Almost any scheme
-can implemented by supplying a custom ``ordering_func`` that maps a Python list
-index to any value you require. See the [module
-documentation](rel:docstrings_sqlalchemy.ext.orderinglist) for more
-information, and also check out the unit tests for examples of stepped
-numbering, alphabetical and Fibonacci numbering.
+API Reference
+-------------
-.. automodule:: sqlalchemy.ext.orderinglist
- :members:
- :undoc-members:
+.. autofunction:: ordering_list
diff --git a/doc/build/reference/orm/query.rst b/doc/build/reference/orm/query.rst
index b209f6fef..98ebdee59 100644
--- a/doc/build/reference/orm/query.rst
+++ b/doc/build/reference/orm/query.rst
@@ -34,6 +34,8 @@ Query Options
Options which are passed to ``query.options()``, to affect the behavior of loading.
+.. autofunction:: contains_alias
+
.. autofunction:: contains_eager
.. autofunction:: defer
@@ -44,7 +46,15 @@ Options which are passed to ``query.options()``, to affect the behavior of loadi
.. autofunction:: extension
+.. autofunction:: joinedload
+
+.. autofunction:: joinedload_all
+
.. autofunction:: lazyload
+.. autofunction:: subqueryload
+
+.. autofunction:: subqueryload_all
+
.. autofunction:: undefer
diff --git a/doc/build/session.rst b/doc/build/session.rst
index 7bb0ae7e0..0731405b5 100644
--- a/doc/build/session.rst
+++ b/doc/build/session.rst
@@ -393,7 +393,7 @@ Cascading is configured by setting the ``cascade`` keyword argument on a :func:`
The above mapper specifies two relationships, ``items`` and ``customer``. The ``items`` relationship specifies "all, delete-orphan" as its ``cascade`` value, indicating that all ``add``, ``merge``, ``expunge``, ``refresh`` ``delete`` and ``expire`` operations performed on a parent ``Order`` instance should also be performed on the child ``Item`` instances attached to it. The ``delete-orphan`` cascade value additionally indicates that if an ``Item`` instance is no longer associated with an ``Order``, it should also be deleted. The "all, delete-orphan" cascade argument allows a so-called *lifecycle* relationship between an ``Order`` and an ``Item`` object.
-The ``customer`` relationship specifies only the "save-update" cascade value, indicating most operations will not be cascaded from a parent ``Order`` instance to a child ``User`` instance except for the :func:`~sqlalchemy.orm.session.Session.add` operation. "save-update" cascade indicates that an :func:`~sqlalchemy.orm.session.Session.add` on the parent will cascade to all child items, and also that items added to a parent which is already present in the session will also be added. "save-update" cascade also cascades the *pending history* of a relationship()-based attribute, meaning that objects which were removed from a scalar or collection attribute whose changes have not yet been flushed are also placed into the new session - this so that foreign key clear operations and deletions will take place (new in 0.6).
+The ``customer`` relationship specifies only the "save-update" cascade value, indicating most operations will not be cascaded from a parent ``Order`` instance to a child ``User`` instance except for the :func:`~sqlalchemy.orm.session.Session.add` operation. "save-update" cascade indicates that an :func:`~sqlalchemy.orm.session.Session.add` on the parent will cascade to all child items, and also that items added to a parent which is already present in a session will also be added to that same session. "save-update" cascade also cascades the *pending history* of a relationship()-based attribute, meaning that objects which were removed from a scalar or collection attribute whose changes have not yet been flushed are also placed into the new session - this so that foreign key clear operations and deletions will take place (new in 0.6).
Note that the ``delete-orphan`` cascade only functions for relationships where the target object can have a single parent at a time, meaning it is only appropriate for one-to-one or one-to-many relationships. For a :func:`~sqlalchemy.orm.relationship` which establishes one-to-one via a local foreign key, i.e. a many-to-one that stores only a single parent, or one-to-one/one-to-many via a "secondary" (association) table, a warning will be issued if ``delete-orphan`` is configured. To disable this warning, also specify the ``single_parent=True`` flag on the relationship, which constrains objects to allow attachment to only one parent at a time.
diff --git a/doc/build/sqlexpression.rst b/doc/build/sqlexpression.rst
index 8a3b7fc71..971d7394f 100644
--- a/doc/build/sqlexpression.rst
+++ b/doc/build/sqlexpression.rst
@@ -1037,22 +1037,40 @@ Finally, we're back to UPDATE. Updates work a lot like INSERTS, except there is
.. sourcecode:: pycon+sql
>>> # change 'jack' to 'ed'
- {sql}>>> conn.execute(users.update().where(users.c.name=='jack').values(name='ed')) #doctest: +ELLIPSIS
+ {sql}>>> conn.execute(users.update().
+ ... where(users.c.name=='jack').
+ ... values(name='ed')
+ ... ) #doctest: +ELLIPSIS
UPDATE users SET name=? WHERE users.name = ?
('ed', 'jack')
COMMIT
{stop}<sqlalchemy.engine.base.ResultProxy object at 0x...>
>>> # use bind parameters
- >>> u = users.update().where(users.c.name==bindparam('oldname')).values(name=bindparam('newname'))
+ >>> u = users.update().\
+ ... where(users.c.name==bindparam('oldname')).\
+ ... values(name=bindparam('newname'))
{sql}>>> conn.execute(u, oldname='jack', newname='ed') #doctest: +ELLIPSIS
UPDATE users SET name=? WHERE users.name = ?
('ed', 'jack')
COMMIT
{stop}<sqlalchemy.engine.base.ResultProxy object at 0x...>
+ >>> # with binds, you can also update many rows at once
+ {sql}>>> conn.execute(u,
+ ... {'oldname':'jack', 'newname':'ed'},
+ ... {'oldname':'wendy', 'newname':'mary'},
+ ... {'oldname':'jim', 'newname':'jake'},
+ ... ) #doctest: +ELLIPSIS
+ UPDATE users SET name=? WHERE users.name = ?
+ [('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim')]
+ COMMIT
+ {stop}<sqlalchemy.engine.base.ResultProxy object at 0x...>
+
>>> # update a column to an expression.:
- {sql}>>> conn.execute(users.update().values(fullname="Fullname: " + users.c.name)) #doctest: +ELLIPSIS
+ {sql}>>> conn.execute(users.update().
+ ... values(fullname="Fullname: " + users.c.name)
+ ... ) #doctest: +ELLIPSIS
UPDATE users SET fullname=(? || users.name)
('Fullname: ',)
COMMIT
diff --git a/doc/build/static/docs.css b/doc/build/static/docs.css
index f60eef3a6..33fbca527 100644
--- a/doc/build/static/docs.css
+++ b/doc/build/static/docs.css
@@ -2,12 +2,13 @@
body, td {
font-family: verdana, sans-serif;
- font-size:.95em;
+ font-size:.9em;
}
body {
background-color: #FDFBFC;
- margin:20px 20px 20px 20px;
+ margin:38px;
+ color:#333333;
}
form {
@@ -20,7 +21,10 @@ p {
}
-a {font-weight:normal; text-decoration:underline;}
+a {
+ font-weight:normal;
+ text-decoration:none;
+}
a:link {color:#0000FF;}
a:visited {color:#0000FF;}
a:active {color:#0000FF;}
@@ -57,7 +61,7 @@ strong a {
.topnav .prevnext {
padding: 5px 0px 0px 0px;
- font-size: 0.8em
+ /*font-size: 0.8em*/
}
h1, h2, h3, h4, h5 {
@@ -149,7 +153,7 @@ li.toctree-l1 ul li li
}
-div.note {
+div.note, div.warning {
background-color:#EEFFEF;
}
@@ -160,6 +164,10 @@ div.admonition {
font-size:.9em;
}
+div.warning .admonition-title {
+ color:#FF0000;
+}
+
div.admonition .admonition-title {
font-weight:bold;
}
diff --git a/examples/adjacency_list/adjacency_list.py b/examples/adjacency_list/adjacency_list.py
index 494ae1c27..624239869 100644
--- a/examples/adjacency_list/adjacency_list.py
+++ b/examples/adjacency_list/adjacency_list.py
@@ -2,7 +2,7 @@ from sqlalchemy import MetaData, Table, Column, Sequence, ForeignKey,\
Integer, String, create_engine
from sqlalchemy.orm import sessionmaker, mapper, relationship, backref,\
- eagerload_all
+ joinedload_all
from sqlalchemy.orm.collections import attribute_mapped_collection
@@ -119,7 +119,7 @@ if __name__ == '__main__':
"selecting tree on root, using eager loading to join four levels deep.")
session.expunge_all()
node = session.query(TreeNode).\
- options(eagerload_all("children", "children",
+ options(joinedload_all("children", "children",
"children", "children")).\
filter(TreeNode.name=="rootnode").\
first()
diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py
index 83f53e000..9c280d7ea 100644
--- a/examples/association/basic_association.py
+++ b/examples/association/basic_association.py
@@ -67,7 +67,7 @@ mapper(Order, orders, properties={
})
mapper(Item, items)
mapper(OrderItem, orderitems, properties={
- 'item': relationship(Item, lazy=False)
+ 'item': relationship(Item, lazy='joined')
})
session = create_session()
@@ -101,7 +101,7 @@ print [(order_item.item.description, order_item.price)
for order_item in order.order_items]
# print customers who bought 'MySQL Crowbar' on sale
-q = session.query(Order).join(['order_items', 'item'])
+q = session.query(Order).join('order_items', 'item')
q = q.filter(and_(Item.description == 'MySQL Crowbar',
Item.price > OrderItem.price))
diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py
index 68f8450a5..ac258121e 100644
--- a/examples/association/proxied_association.py
+++ b/examples/association/proxied_association.py
@@ -48,11 +48,11 @@ class Item(object):
mapper(Order, orders, properties={
- 'itemassociations':relationship(OrderItem, cascade="all, delete-orphan", lazy=False)
+ 'itemassociations':relationship(OrderItem, cascade="all, delete-orphan", lazy='joined')
})
mapper(Item, items)
mapper(OrderItem, orderitems, properties={
- 'item':relationship(Item, lazy=False)
+ 'item':relationship(Item, lazy='joined')
})
session = create_session()
@@ -100,6 +100,6 @@ print [(item.description, item.price)
for item in order.items]
# print customers who bought 'MySQL Crowbar' on sale
-orders = session.query(Order).join(['itemassociations', 'item']).filter(
+orders = session.query(Order).join('itemassociations', 'item').filter(
and_(Item.description=='MySQL Crowbar', Item.price > OrderItem.price))
print [order.customer_name for order in orders]
diff --git a/examples/beaker_caching/advanced.py b/examples/beaker_caching/advanced.py
index 8e3361b1e..e14b96973 100644
--- a/examples/beaker_caching/advanced.py
+++ b/examples/beaker_caching/advanced.py
@@ -8,8 +8,8 @@ and collection caching.
import environment
from model import Person, Address, cache_address_bits
-from meta import Session, FromCache, RelationCache
-from sqlalchemy.orm import eagerload
+from meta import Session, FromCache, RelationshipCache
+from sqlalchemy.orm import joinedload
def load_name_range(start, end, invalidate=False):
"""Load Person objects on a range of names.
@@ -22,7 +22,7 @@ def load_name_range(start, end, invalidate=False):
The `Person.addresses` collections are also cached. Its basically
another level of tuning here, as that particular cache option
- can be transparently replaced with eagerload(Person.addresses).
+ can be transparently replaced with joinedload(Person.addresses).
The effect is that each Person and his/her Address collection
is cached either together or separately, affecting the kind of
SQL that emits for unloaded Person objects as well as the distribution
@@ -35,13 +35,13 @@ def load_name_range(start, end, invalidate=False):
# have the "addresses" collection cached separately
# each lazyload of Person.addresses loads from cache.
- q = q.options(RelationCache("default", "by_person", Person.addresses))
+ q = q.options(RelationshipCache("default", "by_person", Person.addresses))
# alternatively, eagerly load the "addresses" collection, so that they'd
# be cached together. This issues a bigger SQL statement and caches
# a single, larger value in the cache per person rather than two
# separate ones.
- #q = q.options(eagerload(Person.addresses))
+ #q = q.options(joinedload(Person.addresses))
# if requested, invalidate the cache on current criterion.
if invalidate:
diff --git a/examples/beaker_caching/relation_caching.py b/examples/beaker_caching/relation_caching.py
index f5f0fad69..100f5412d 100644
--- a/examples/beaker_caching/relation_caching.py
+++ b/examples/beaker_caching/relation_caching.py
@@ -8,10 +8,10 @@ term cache.
import environment
from model import Person, Address, cache_address_bits
from meta import Session
-from sqlalchemy.orm import eagerload
+from sqlalchemy.orm import joinedload
import os
-for p in Session.query(Person).options(eagerload(Person.addresses), cache_address_bits):
+for p in Session.query(Person).options(joinedload(Person.addresses), cache_address_bits):
print p.format_full()
diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py
index 0ffd0db4b..4d135edcd 100644
--- a/examples/custom_attributes/custom_management.py
+++ b/examples/custom_attributes/custom_management.py
@@ -118,16 +118,16 @@ class MyCollectionAdapter(object):
def fire_append_event(self, item, initiator=None):
if initiator is not False and item is not None:
- self.state.get_impl(self.key).fire_append_event(self.state, item,
+ self.state.get_impl(self.key).fire_append_event(self.state, self.state.dict, item,
initiator)
def fire_remove_event(self, item, initiator=None):
if initiator is not False and item is not None:
- self.state.get_impl(self.key).fire_remove_event(self.state, item,
+ self.state.get_impl(self.key).fire_remove_event(self.state, self.state.dict, item,
initiator)
def fire_pre_remove_event(self, initiator=None):
- self.state.get_impl(self.key).fire_pre_remove_event(self.state,
+ self.state.get_impl(self.key).fire_pre_remove_event(self.state, self.state.dict,
initiator)
class MyCollection(object):
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index ad0f3f607..78d71f3fe 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -79,13 +79,13 @@ class _Attribute(object):
# setup mappers. Document will eagerly load a list of _Node objects.
mapper(Document, documents, properties={
- '_root':relationship(_Node, lazy=False, cascade="all")
+ '_root':relationship(_Node, lazy='joined', cascade="all")
})
mapper(_Node, elements, properties={
'children':relationship(_Node, cascade="all"),
# eagerly load attributes
- 'attributes':relationship(_Attribute, lazy=False, cascade="all, delete-orphan"),
+ 'attributes':relationship(_Attribute, lazy='joined', cascade="all, delete-orphan"),
})
mapper(_Attribute, attributes)
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index 9cd2acc30..98c4e1129 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -80,7 +80,7 @@ class _Attribute(object):
# they will be ordered in primary key/insert order, so that we can reconstruct
# an ElementTree structure from the list.
mapper(Document, documents, properties={
- '_nodes':relationship(_Node, lazy=False, cascade="all, delete-orphan")
+ '_nodes':relationship(_Node, lazy='joined', cascade="all, delete-orphan")
})
# the _Node objects change the way they load so that a list of _Nodes will organize
@@ -89,7 +89,7 @@ mapper(Document, documents, properties={
# ordering to rows which will suffice.
mapper(_Node, elements, properties={
'children':relationship(_Node, lazy=None), # doesnt load; used only for the save relationship
- 'attributes':relationship(_Attribute, lazy=False, cascade="all, delete-orphan"), # eagerly load attributes
+ 'attributes':relationship(_Attribute, lazy='joined', cascade="all, delete-orphan"), # eagerly load attributes
})
mapper(_Attribute, attributes)
diff --git a/examples/inheritance/polymorph.py b/examples/inheritance/polymorph.py
index 3ebcd6efe..7ca207f9f 100644
--- a/examples/inheritance/polymorph.py
+++ b/examples/inheritance/polymorph.py
@@ -61,7 +61,7 @@ mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engine
mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager')
mapper(Company, companies, properties={
- 'employees': relationship(Person, lazy=False, backref='company', cascade="all, delete-orphan")
+ 'employees': relationship(Person, lazy='joined', backref='company', cascade="all, delete-orphan")
})
session = create_session()
diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py
index 352829cb6..07f76c309 100644
--- a/examples/sharding/attribute_shard.py
+++ b/examples/sharding/attribute_shard.py
@@ -3,7 +3,7 @@
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
String, ForeignKey, Float, DateTime)
from sqlalchemy.orm import sessionmaker, mapper, relationship
-from sqlalchemy.orm.shard import ShardedSession
+from sqlalchemy.ext.horizontal_shard import ShardedSession
from sqlalchemy.sql import operators
from sqlalchemy import sql
import datetime
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 13e843801..376b13e64 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -114,6 +114,6 @@ from sqlalchemy.engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
-__version__ = '0.6beta2'
+__version__ = '0.6beta3'
del inspect, sys
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index 484c11d49..816474d43 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -9,6 +9,7 @@ and 2008, using the SQL Server Native driver. However, it is
possible for this to be used on other database platforms.
For more info on mxODBC, see http://www.egenix.com/
+
"""
import sys
@@ -31,6 +32,9 @@ class MxODBCConnector(Connector):
@classmethod
def dbapi(cls):
+ # this classmethod will normally be replaced by an instance
+ # attribute of the same name, so this is normally only called once.
+ cls._load_mx_exceptions()
platform = sys.platform
if platform == 'win32':
from mx.ODBC import Windows as module
@@ -43,6 +47,16 @@ class MxODBCConnector(Connector):
raise ImportError, "Unrecognized platform for mxODBC import"
return module
+ @classmethod
+ def _load_mx_exceptions(cls):
+ """ Import mxODBC exception classes into the module namespace,
+ as if they had been imported normally. This is done here
+ to avoid requiring all SQLAlchemy users to install mxODBC.
+ """
+ global InterfaceError, ProgrammingError
+ from mx.ODBC import InterfaceError
+ from mx.ODBC import ProgrammingError
+
def on_connect(self):
def connect(conn):
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
@@ -52,10 +66,9 @@ class MxODBCConnector(Connector):
return connect
def _error_handler(self):
- """Return a handler that adjusts mxODBC's raised Warnings to
+ """ Return a handler that adjusts mxODBC's raised Warnings to
emit Python standard warnings.
"""
-
from mx.ODBC.Error import Warning as MxOdbcWarning
def error_handler(connection, cursor, errorclass, errorvalue):
@@ -85,10 +98,10 @@ class MxODBCConnector(Connector):
"""
opts = url.translate_connect_args(username='user')
opts.update(url.query)
- args = opts['host'],
- kwargs = {'user':opts['user'],
- 'password': opts['password']}
- return args, kwargs
+ args = opts.pop('host')
+ opts.pop('port', None)
+ opts.pop('database', None)
+ return (args,), opts
def is_disconnect(self, e):
# eGenix recommends checking connection.closed here,
@@ -101,6 +114,7 @@ class MxODBCConnector(Connector):
return False
def _get_server_version_info(self, connection):
+ # eGenix suggests using conn.dbms_version instead of what we're doing here
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
@@ -112,4 +126,21 @@ class MxODBCConnector(Connector):
version.append(n)
return tuple(version)
-
+ def do_execute(self, cursor, statement, parameters, context=None):
+ if context:
+ native_odbc_execute = context.execution_options.\
+ get('native_odbc_execute', 'auto')
+ if native_odbc_execute is True:
+ # user specified native_odbc_execute=True
+ cursor.execute(statement, parameters)
+ elif native_odbc_execute is False:
+ # user specified native_odbc_execute=False
+ cursor.executedirect(statement, parameters)
+ elif context.is_crud:
+ # statement is UPDATE, DELETE, INSERT
+ cursor.execute(statement, parameters)
+ else:
+ # all other statements
+ cursor.executedirect(statement, parameters)
+ else:
+ cursor.executedirect(statement, parameters)
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index 5cf00bc92..b291f3e16 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -5,49 +5,6 @@ import sys
import re
import urllib
import decimal
-from sqlalchemy import processors, types as sqltypes
-
-class PyODBCNumeric(sqltypes.Numeric):
- """Turns Decimals with adjusted() < -6 into floats, > 7 into strings"""
-
- convert_large_decimals_to_string = False
-
- def bind_processor(self, dialect):
- super_process = super(PyODBCNumeric, self).bind_processor(dialect)
-
- def process(value):
- if self.asdecimal and \
- isinstance(value, decimal.Decimal):
-
- if value.adjusted() < -6:
- return processors.to_float(value)
- elif self.convert_large_decimals_to_string and \
- value.adjusted() > 7:
- return self._large_dec_to_string(value)
-
- if super_process:
- return super_process(value)
- else:
- return value
- return process
-
- def _large_dec_to_string(self, value):
- if 'E' in str(value):
- result = "%s%s%s" % (
- (value < 0 and '-' or ''),
- "".join([str(s) for s in value._int]),
- "0" * (value.adjusted() - (len(value._int)-1)))
- else:
- if (len(value._int) - 1) > value.adjusted():
- result = "%s%s.%s" % (
- (value < 0 and '-' or ''),
- "".join([str(s) for s in value._int][0:value.adjusted() + 1]),
- "".join([str(s) for s in value._int][value.adjusted() + 1:]))
- else:
- result = "%s%s" % (
- (value < 0 and '-' or ''),
- "".join([str(s) for s in value._int][0:value.adjusted() + 1]))
- return result
class PyODBCConnector(Connector):
driver='pyodbc'
diff --git a/lib/sqlalchemy/dialects/access/base.py b/lib/sqlalchemy/dialects/access/base.py
index 7dfb3153e..2b76b93d0 100644
--- a/lib/sqlalchemy/dialects/access/base.py
+++ b/lib/sqlalchemy/dialects/access/base.py
@@ -16,7 +16,7 @@ This dialect is *not* tested on SQLAlchemy 0.6.
"""
from sqlalchemy import sql, schema, types, exc, pool
from sqlalchemy.sql import compiler, expression
-from sqlalchemy.engine import default, base
+from sqlalchemy.engine import default, base, reflection
from sqlalchemy import processors
class AcNumeric(types.Numeric):
@@ -299,7 +299,8 @@ class AccessDialect(default.DefaultDialect):
finally:
dtbs.Close()
- def table_names(self, connection, schema):
+ @reflection.cache
+ def get_table_names(self, connection, schema=None, **kw):
# A fresh DAO connection is opened for each reflection
# This is necessary, so we get the latest updates
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index a2da132da..70318157c 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -378,7 +378,8 @@ class FBDialect(default.DefaultDialect):
c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
return c.first() is not None
- def table_names(self, connection, schema):
+ @reflection.cache
+ def get_table_names(self, connection, schema=None, **kw):
s = """
SELECT DISTINCT rdb$relation_name
FROM rdb$relation_fields
@@ -387,10 +388,6 @@ class FBDialect(default.DefaultDialect):
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self.table_names(connection, schema)
-
- @reflection.cache
def get_view_names(self, connection, schema=None, **kw):
s = """
SELECT distinct rdb$view_name
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
index 54aae6eb3..266a74a7b 100644
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ b/lib/sqlalchemy/dialects/informix/base.py
@@ -193,7 +193,8 @@ class InformixDialect(default.DefaultDialect):
cu.execute('SET LOCK MODE TO WAIT')
#cu.execute('SET ISOLATION TO REPEATABLE READ')
- def table_names(self, connection, schema):
+ @reflection.cache
+ def get_table_names(self, connection, schema=None, **kw):
s = "select tabname from systables"
return [row[0] for row in connection.execute(s)]
diff --git a/lib/sqlalchemy/dialects/maxdb/base.py b/lib/sqlalchemy/dialects/maxdb/base.py
index 758cfaf05..2e1d6a58f 100644
--- a/lib/sqlalchemy/dialects/maxdb/base.py
+++ b/lib/sqlalchemy/dialects/maxdb/base.py
@@ -63,7 +63,7 @@ import datetime, itertools, re
from sqlalchemy import exc, schema, sql, util, processors
from sqlalchemy.sql import operators as sql_operators, expression as sql_expr
from sqlalchemy.sql import compiler, visitors
-from sqlalchemy.engine import base as engine_base, default
+from sqlalchemy.engine import base as engine_base, default, reflection
from sqlalchemy import types as sqltypes
@@ -880,7 +880,8 @@ class MaxDBDialect(default.DefaultDialect):
rp = connection.execute(sql, bind)
return bool(rp.first())
- def table_names(self, connection, schema):
+ @reflection.cache
+ def get_table_names(self, connection, schema=None, **kw):
if schema is None:
sql = (" SELECT TABLENAME FROM TABLES WHERE "
" SCHEMANAME=CURRENT_SCHEMA ")
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index 9e12a944d..502a02acc 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -1,3 +1,7 @@
+"""
+The adodbapi dialect is not implemented for 0.6 at this time.
+
+"""
from sqlalchemy import types as sqltypes, util
from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
import sys
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 7660fe9f7..066ab8d04 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -2,119 +2,10 @@
"""Support for the Microsoft SQL Server database.
-Driver
-------
-
-The MSSQL dialect will work with three different available drivers:
-
-* *pyodbc* - http://pyodbc.sourceforge.net/. This is the recommeded
- driver.
-
-* *pymssql* - http://pymssql.sourceforge.net/
-
-* *adodbapi* - http://adodbapi.sourceforge.net/
-
-Drivers are loaded in the order listed above based on availability.
-
-If you need to load a specific driver pass ``module_name`` when
-creating the engine::
-
- engine = create_engine('mssql+module_name://dsn')
-
-``module_name`` currently accepts: ``pyodbc``, ``pymssql``, and
-``adodbapi``.
-
-Currently the pyodbc driver offers the greatest level of
-compatibility.
-
Connecting
----------
-Connecting with create_engine() uses the standard URL approach of
-``mssql://user:pass@host/dbname[?key=value&key=value...]``.
-
-If the database name is present, the tokens are converted to a
-connection string with the specified values. If the database is not
-present, then the host token is taken directly as the DSN name.
-
-Examples of pyodbc connection string URLs:
-
-* *mssql+pyodbc://mydsn* - connects using the specified DSN named ``mydsn``.
- The connection string that is created will appear like::
-
- dsn=mydsn;Trusted_Connection=Yes
-
-* *mssql+pyodbc://user:pass@mydsn* - connects using the DSN named
- ``mydsn`` passing in the ``UID`` and ``PWD`` information. The
- connection string that is created will appear like::
-
- dsn=mydsn;UID=user;PWD=pass
-
-* *mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english* - connects
- using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD``
- information, plus the additional connection configuration option
- ``LANGUAGE``. The connection string that is created will appear
- like::
-
- dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
-
-* *mssql+pyodbc://user:pass@host/db* - connects using a connection string
- dynamically created that would appear like::
-
- DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
-
-* *mssql+pyodbc://user:pass@host:123/db* - connects using a connection
- string that is dynamically created, which also includes the port
- information using the comma syntax. If your connection string
- requires the port information to be passed as a ``port`` keyword
- see the next example. This will create the following connection
- string::
-
- DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
-
-* *mssql+pyodbc://user:pass@host/db?port=123* - connects using a connection
- string that is dynamically created that includes the port
- information as a separate ``port`` keyword. This will create the
- following connection string::
-
- DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
-
-If you require a connection string that is outside the options
-presented above, use the ``odbc_connect`` keyword to pass in a
-urlencoded connection string. What gets passed in will be urldecoded
-and passed directly.
-
-For example::
-
- mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb
-
-would create the following connection string::
-
- dsn=mydsn;Database=db
-
-Encoding your connection string can be easily accomplished through
-the python shell. For example::
-
- >>> import urllib
- >>> urllib.quote_plus('dsn=mydsn;Database=db')
- 'dsn%3Dmydsn%3BDatabase%3Ddb'
-
-Additional arguments which may be specified either as query string
-arguments on the URL, or as keyword argument to
-:func:`~sqlalchemy.create_engine()` are:
-
-* *query_timeout* - allows you to override the default query timeout.
- Defaults to ``None``. This is only supported on pymssql.
-
-* *use_scope_identity* - allows you to specify that SCOPE_IDENTITY
- should be used in place of the non-scoped version @@IDENTITY.
- Defaults to True.
-
-* *max_identifier_length* - allows you to se the maximum length of
- identfiers supported by the database. Defaults to 128. For pymssql
- the default is 30.
-
-* *schema_name* - use to set the schema name. Defaults to ``dbo``.
+See the individual driver sections below for details on connecting.
Auto Increment Behavior
-----------------------
@@ -220,9 +111,6 @@ Known Issues
* No support for more than one ``IDENTITY`` column per table
-* pymssql has problems with binary and unicode data that this module
- does **not** work around
-
"""
import datetime, decimal, inspect, operator, sys, re
import itertools
@@ -1149,11 +1037,6 @@ class MSDialect(default.DefaultDialect):
pass
return self.schema_name
- def table_names(self, connection, schema):
- s = select([ischema.tables.c.table_name],
- ischema.tables.c.table_schema==schema)
- return [row[0] for row in connection.execute(s)]
-
def has_table(self, connection, tablename, schema=None):
current_schema = schema or self.default_schema_name
@@ -1182,7 +1065,7 @@ class MSDialect(default.DefaultDialect):
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == current_schema,
- tables.c.table_type == 'BASE TABLE'
+ tables.c.table_type == u'BASE TABLE'
),
order_by=[tables.c.table_name]
)
@@ -1196,7 +1079,7 @@ class MSDialect(default.DefaultDialect):
s = sql.select([tables.c.table_name],
sql.and_(
tables.c.table_schema == current_schema,
- tables.c.table_type == 'VIEW'
+ tables.c.table_type == u'VIEW'
),
order_by=[tables.c.table_name]
)
@@ -1320,11 +1203,11 @@ class MSDialect(default.DefaultDialect):
table_fullname = "%s.%s" % (current_schema, tablename)
cursor = connection.execute(
"select ident_seed('%s'), ident_incr('%s')"
- % (tablename, tablename)
+ % (table_fullname, table_fullname)
)
row = cursor.first()
- if not row is None:
+ if row is not None and row[0] is not None:
colmap[ic]['sequence'].update({
'start' : int(row[0]),
'increment' : int(row[1])
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index bb6ff315a..312e83cb1 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -21,7 +21,7 @@ tables = Table("TABLES", ischema,
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("TABLE_TYPE", String, key="table_type"),
+ Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"),
schema="INFORMATION_SCHEMA")
columns = Table("COLUMNS", ischema,
@@ -42,7 +42,7 @@ constraints = Table("TABLE_CONSTRAINTS", ischema,
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- Column("CONSTRAINT_TYPE", String, key="constraint_type"),
+ Column("CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"),
schema="INFORMATION_SCHEMA")
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index 7148a3628..efe763659 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -1,9 +1,41 @@
"""
-MSSQL dialect tweaked to work with mxODBC, mainly by making use
-of the MSSQLStrictCompiler.
+Support for MS-SQL via mxODBC.
+
+mxODBC is available at:
+
+ http://www.egenix.com/
This was tested with mxODBC 3.1.2 and the SQL Server Native
Client connected to MSSQL 2005 and 2008 Express Editions.
+
+Connecting
+~~~~~~~~~~
+
+Connection is via DSN::
+
+ mssql+mxodbc://<username>:<password>@<dsnname>
+
+Execution Modes
+~~~~~~~~~~~~~~~
+
+mxODBC features two styles of statement execution, using the ``cursor.execute()``
+and ``cursor.executedirect()`` methods (the second being an extension to the
+DBAPI specification). The former makes use of the native
+parameter binding services of the ODBC driver, while the latter uses string escaping.
+The primary advantage to native parameter binding is that the same statement, when
+executed many times, is only prepared once. Whereas the primary advantage to the
+latter is that the rules for bind parameter placement are relaxed. MS-SQL has very
+strict rules for native binds, including that they cannot be placed within the argument
+lists of function calls, anywhere outside the FROM, or even within subqueries within the
+FROM clause - making the usage of bind parameters within SELECT statements impossible for
+all but the most simplistic statements. For this reason, the mxODBC dialect uses the
+"native" mode by default only for INSERT, UPDATE, and DELETE statements, and uses the
+escaped string mode for all other statements. This behavior can be controlled completely
+via :meth:`~sqlalchemy.sql.expression.Executable.execution_options`
+using the ``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a value of
+``True`` will unconditionally use native bind parameters and a value of ``False`` will
+uncondtionally use string-escaped parameters.
+
"""
import re
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b3a57d318..ca1c4a142 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -1,40 +1,101 @@
"""
Support for the pymssql dialect.
-Going forward we will be supporting the 1.0 release of pymssql.
+This dialect supports pymssql 1.0 and greater.
+
+pymssql is available at:
+
+ http://pymssql.sourceforge.net/
+
+Connecting
+^^^^^^^^^^
+
+Sample connect string::
+
+ mssql+pymssql://<username>:<password>@<freetds_name>
+
+Adding "?charset=utf8" or similar will cause pymssql to return
+strings as Python unicode objects. This can potentially improve
+performance in some scenarios as decoding of strings is
+handled natively.
+
+Limitations
+^^^^^^^^^^^
+
+pymssql inherits a lot of limitations from FreeTDS, including:
+
+* no support for multibyte schema identifiers
+* poor support for large decimals
+* poor support for binary fields
+* poor support for VARCHAR/CHAR fields over 255 characters
+
+Please consult the pymssql documentation for further information.
"""
from sqlalchemy.dialects.mssql.base import MSDialect
-from sqlalchemy import types as sqltypes
+from sqlalchemy import types as sqltypes, util, processors
+import re
+import decimal
+class _MSNumeric_pymssql(sqltypes.Numeric):
+ def result_processor(self, dialect, type_):
+ if not self.asdecimal:
+ return processors.to_float
+ else:
+ return sqltypes.Numeric.result_processor(self, dialect, type_)
class MSDialect_pymssql(MSDialect):
supports_sane_rowcount = False
max_identifier_length = 30
driver = 'pymssql'
-
+
+ colspecs = util.update_copy(
+ MSDialect.colspecs,
+ {
+ sqltypes.Numeric:_MSNumeric_pymssql,
+ sqltypes.Float:sqltypes.Float,
+ }
+ )
@classmethod
def dbapi(cls):
- import pymssql as module
+ module = __import__('pymssql')
# pymmsql doesn't have a Binary method. we use string
# TODO: monkeypatching here is less than ideal
- module.Binary = lambda st: str(st)
+ module.Binary = str
+
+ client_ver = tuple(int(x) for x in module.__version__.split("."))
+ if client_ver < (1, ):
+ util.warn("The pymssql dialect expects at least "
+ "the 1.0 series of the pymssql DBAPI.")
return module
def __init__(self, **params):
super(MSDialect_pymssql, self).__init__(**params)
self.use_scope_identity = True
+ def _get_server_version_info(self, connection):
+ vers = connection.scalar("select @@version")
+ m = re.match(r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
+ if m:
+ return tuple(int(x) for x in m.group(1, 2, 3, 4))
+ else:
+ return None
def create_connect_args(self, url):
- keys = url.query
- if keys.get('port'):
- # pymssql expects port as host:port, not a separate arg
- keys['host'] = ''.join([keys.get('host', ''), ':', str(keys['port'])])
- del keys['port']
- return [[], keys]
+ opts = url.translate_connect_args(username='user')
+ opts.update(url.query)
+ opts.pop('port', None)
+ return [[], opts]
def is_disconnect(self, e):
- return isinstance(e, self.dbapi.DatabaseError) and "Error 10054" in str(e)
+ for msg in (
+ "Error 10054",
+ "Not connected to any MS SQL server",
+ "Connection is closed"
+ ):
+ if msg in str(e):
+ return True
+ else:
+ return False
dialect = MSDialect_pymssql \ No newline at end of file
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 8e7e90629..c74be0e53 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -1,22 +1,134 @@
"""
Support for MS-SQL via pyodbc.
-http://pypi.python.org/pypi/pyodbc/
+pyodbc is available at:
-Connect strings are of the form::
+ http://pypi.python.org/pypi/pyodbc/
- mssql+pyodbc://<username>:<password>@<dsn>/
- mssql+pyodbc://<username>:<password>@<host>/<database>
+Connecting
+^^^^^^^^^^
+
+Examples of pyodbc connection string URLs:
+
+* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``.
+ The connection string that is created will appear like::
+
+ dsn=mydsn;Trusted_Connection=Yes
+
+* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named
+ ``mydsn`` passing in the ``UID`` and ``PWD`` information. The
+ connection string that is created will appear like::
+
+ dsn=mydsn;UID=user;PWD=pass
+
+* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects
+ using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD``
+ information, plus the additional connection configuration option
+ ``LANGUAGE``. The connection string that is created will appear
+ like::
+
+ dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
+
+* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection string
+ dynamically created that would appear like::
+
+ DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
+
+* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
+ string that is dynamically created, which also includes the port
+ information using the comma syntax. If your connection string
+ requires the port information to be passed as a ``port`` keyword
+ see the next example. This will create the following connection
+ string::
+
+ DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
+
+* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection
+ string that is dynamically created that includes the port
+ information as a separate ``port`` keyword. This will create the
+ following connection string::
+
+ DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
+
+If you require a connection string that is outside the options
+presented above, use the ``odbc_connect`` keyword to pass in a
+urlencoded connection string. What gets passed in will be urldecoded
+and passed directly.
+
+For example::
+
+ mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb
+
+would create the following connection string::
+
+ dsn=mydsn;Database=db
+
+Encoding your connection string can be easily accomplished through
+the python shell. For example::
+
+ >>> import urllib
+ >>> urllib.quote_plus('dsn=mydsn;Database=db')
+ 'dsn%3Dmydsn%3BDatabase%3Ddb'
"""
from sqlalchemy.dialects.mssql.base import MSExecutionContext, MSDialect
-from sqlalchemy.connectors.pyodbc import PyODBCConnector, PyODBCNumeric
+from sqlalchemy.connectors.pyodbc import PyODBCConnector
from sqlalchemy import types as sqltypes, util
+import decimal
+
+class _MSNumeric_pyodbc(sqltypes.Numeric):
+ """Turns Decimals with adjusted() < 0 or > 7 into strings.
+
+ This is the only method that is proven to work with Pyodbc+MSSQL
+ without crashing (floats can be used but seem to cause sporadic
+ crashes).
+
+ """
+
+ def bind_processor(self, dialect):
+ super_process = super(_MSNumeric_pyodbc, self).bind_processor(dialect)
+
+ def process(value):
+ if self.asdecimal and \
+ isinstance(value, decimal.Decimal):
+
+ adjusted = value.adjusted()
+ if adjusted < 0:
+ return self._small_dec_to_string(value)
+ elif adjusted > 7:
+ return self._large_dec_to_string(value)
-class _MSNumeric_pyodbc(PyODBCNumeric):
- convert_large_decimals_to_string = True
+ if super_process:
+ return super_process(value)
+ else:
+ return value
+ return process
+
+ def _small_dec_to_string(self, value):
+ return "%s0.%s%s" % (
+ (value < 0 and '-' or ''),
+ '0' * (abs(value.adjusted()) - 1),
+ "".join([str(nint) for nint in value._int]))
+
+ def _large_dec_to_string(self, value):
+ if 'E' in str(value):
+ result = "%s%s%s" % (
+ (value < 0 and '-' or ''),
+ "".join([str(s) for s in value._int]),
+ "0" * (value.adjusted() - (len(value._int)-1)))
+ else:
+ if (len(value._int) - 1) > value.adjusted():
+ result = "%s%s.%s" % (
+ (value < 0 and '-' or ''),
+ "".join([str(s) for s in value._int][0:value.adjusted() + 1]),
+ "".join([str(s) for s in value._int][value.adjusted() + 1:]))
+ else:
+ result = "%s%s" % (
+ (value < 0 and '-' or ''),
+ "".join([str(s) for s in value._int][0:value.adjusted() + 1]))
+ return result
class MSExecutionContext_pyodbc(MSExecutionContext):
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index e4ecccdfc..f37a0c766 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -6,12 +6,12 @@ base.dialect = mysqldb.dialect
from sqlalchemy.dialects.mysql.base import \
BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, DOUBLE, ENUM, DECIMAL,\
FLOAT, INTEGER, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, \
- NVARCHAR, NUMERIC, SET, SMALLINT, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT,\
+ NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT,\
VARBINARY, VARCHAR, YEAR, dialect
__all__ = (
'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE',
'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT',
-'MEDIUMTEXT', 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'TEXT', 'TIME', 'TIMESTAMP',
+'MEDIUMTEXT', 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP',
'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', 'YEAR', 'dialect'
)
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 2311b06df..6a0761476 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1,37 +1,13 @@
# -*- fill-column: 78 -*-
-# mysql.py
+# mysql/base.py
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
+# and Jason Kirtland.
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the MySQL database.
-Overview
---------
-
-For normal SQLAlchemy usage, importing this module is unnecessary. It will be
-loaded on-demand when a MySQL connection is needed. The generic column types
-like :class:`~sqlalchemy.String` and :class:`~sqlalchemy.Integer` will
-automatically be adapted to the optimal matching MySQL column type.
-
-But if you would like to use one of the MySQL-specific or enhanced column
-types when creating tables with your :class:`~sqlalchemy.Table` definitions,
-then you will need to import them from this module::
-
- from sqlalchemy.dialect.mysql import base as mysql
-
- Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
- Column('ittybittyblob', mysql.TINYBLOB),
- Column('biggy', mysql.BIGINT(unsigned=True)))
-
-All standard MySQL column types are supported. The OpenGIS types are
-available for use via table reflection but have no special support or mapping
-to Python classes. If you're using these types and have opinions about how
-OpenGIS can be smartly integrated into SQLAlchemy please join the mailing
-list!
-
Supported Versions and Features
-------------------------------
@@ -44,10 +20,7 @@ in the suite 100%. No heroic measures are taken to work around major missing
SQL features- if your server version does not support sub-selects, for
example, they won't work in SQLAlchemy either.
-Currently, the only DB-API driver supported is `MySQL-Python` (also referred to
-as `MySQLdb`). Either 1.2.1 or 1.2.2 are recommended. The alpha, beta and
-gamma releases of 1.2.1 and 1.2.2 should be avoided. Support for Jython and
-IronPython is planned.
+Most available DBAPI drivers are supported; see below.
===================================== ===============
Feature Minimum Version
@@ -64,6 +37,37 @@ Nested Transactions 5.0.3
See the official MySQL documentation for detailed information about features
supported in any given server release.
+Connecting
+----------
+
+See the API documentation on individual drivers for details on connecting.
+
+Data Types
+----------
+
+All of MySQL's standard types are supported. These can also be specified within
+table metadata, for the purpose of issuing CREATE TABLE statements
+which include MySQL-specific extensions. The types are available
+from the module, as in::
+
+ from sqlalchemy.dialects import mysql
+
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('ittybittyblob', mysql.TINYBLOB),
+ Column('biggy', mysql.BIGINT(unsigned=True)))
+
+See the API documentation on specific column types for further details.
+
+Connection Timeouts
+-------------------
+
+MySQL features an automatic connection close behavior, for connections that have
+been idle for eight hours or more. To circumvent having this issue, use the
+``pool_recycle`` option which controls the maximum age of any connection::
+
+ engine = create_engine('mysql+mysqldb://...', pool_recycle=3600)
+
Storage Engines
---------------
@@ -159,20 +163,13 @@ And of course any valid MySQL statement can be executed as a string as well.
Some limited direct support for MySQL extensions to SQL is currently
available.
- * SELECT pragma::
-
- select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT'])
+* SELECT pragma::
- * UPDATE with LIMIT::
+ select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT'])
- update(..., mysql_limit=10)
+* UPDATE with LIMIT::
-Boolean Types
--------------
-
-MySQL's BOOL type is a synonym for SMALLINT, so is actually a numeric value,
-and additionally MySQL doesn't support CHECK constraints. Therefore SQLA's
-Boolean type cannot fully constrain values to just "True" and "False" the way it does for most other backends.
+ update(..., mysql_limit=10)
Troubleshooting
---------------
@@ -1154,7 +1151,10 @@ class MySQLCompiler(compiler.SQLCompiler):
def visit_match_op(self, binary, **kw):
return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % (self.process(binary.left), self.process(binary.right))
-
+
+ def get_from_hint_text(self, table, text):
+ return text
+
def visit_typeclause(self, typeclause):
type_ = typeclause.type.dialect_impl(self.dialect)
if isinstance(type_, sqltypes.Integer):
@@ -1204,11 +1204,11 @@ class MySQLCompiler(compiler.SQLCompiler):
# support can be added, preferably after dialects are
# refactored to be version-sensitive.
return ''.join(
- (self.process(join.left, asfrom=True),
+ (self.process(join.left, asfrom=True, **kwargs),
(join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "),
- self.process(join.right, asfrom=True),
+ self.process(join.right, asfrom=True, **kwargs),
" ON ",
- self.process(join.onclause)))
+ self.process(join.onclause, **kwargs)))
def for_update_clause(self, select):
if select.for_update == 'read':
@@ -1766,24 +1766,20 @@ class MySQLDialect(default.DefaultDialect):
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
+ """Return a Unicode SHOW TABLES from a given schema."""
if schema is not None:
current_schema = schema
else:
current_schema = self.default_schema_name
- table_names = self.table_names(connection, current_schema)
- return table_names
-
- def table_names(self, connection, schema):
- """Return a Unicode SHOW TABLES from a given schema."""
charset = self._connection_charset
if self.server_version_info < (5, 0, 2):
rp = connection.execute("SHOW TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(schema))
+ self.identifier_preparer.quote_identifier(current_schema))
return [row[0] for row in self._compat_fetchall(rp, charset=charset)]
else:
rp = connection.execute("SHOW FULL TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(schema))
+ self.identifier_preparer.quote_identifier(current_schema))
return [row[0] for row in self._compat_fetchall(rp, charset=charset)\
if row[1] == 'BASE TABLE']
@@ -1796,7 +1792,7 @@ class MySQLDialect(default.DefaultDialect):
if schema is None:
schema = self.default_schema_name
if self.server_version_info < (5, 0, 2):
- return self.table_names(connection, schema)
+ return self.get_table_names(connection, schema)
charset = self._connection_charset
rp = connection.execute("SHOW FULL TABLES FROM %s" %
self.identifier_preparer.quote_identifier(schema))
@@ -1946,7 +1942,7 @@ class MySQLDialect(default.DefaultDialect):
# For winxx database hosts. TODO: is this really needed?
if casing == 1 and table.name != table.name.lower():
table.name = table.name.lower()
- lc_alias = schema._get_table_key(table.name, table.schema)
+ lc_alias = sa_schema._get_table_key(table.name, table.schema)
table.metadata.tables[lc_alias] = table
def _detect_charset(self, connection):
@@ -2208,13 +2204,6 @@ class MySQLTableDefinitionParser(object):
name, type_, args, notnull = \
spec['name'], spec['coltype'], spec['arg'], spec['notnull']
- # Convention says that TINYINT(1) columns == BOOLEAN
- if type_ == 'tinyint' and args == '1':
- type_ = 'boolean'
- args = None
- spec['unsigned'] = None
- spec['zerofill'] = None
-
try:
col_type = self.dialect.ischema_names[type_]
except KeyError:
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index 981e1e204..2da18e50f 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -1,6 +1,15 @@
"""Support for the MySQL database via the MySQL Connector/Python adapter.
-# TODO: add docs/notes here regarding MySQL Connector/Python
+MySQL Connector/Python is available at:
+
+ https://launchpad.net/myconnpy
+
+Connecting
+-----------
+
+Connect string format::
+
+ mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
"""
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 9d34939a1..6e6bb0ecc 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -1,5 +1,18 @@
"""Support for the MySQL database via the MySQL-python adapter.
+MySQL-Python is available at:
+
+ http://sourceforge.net/projects/mysql-python
+
+At least version 1.2.1 or 1.2.2 should be used.
+
+Connecting
+-----------
+
+Connect string format::
+
+ mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
+
Character Sets
--------------
@@ -14,10 +27,21 @@ enabling ``use_unicode`` in the driver by default. For regular encoded
strings, also pass ``use_unicode=0`` in the connection arguments::
# set client encoding to utf8; all strings come back as unicode
- create_engine('mysql:///mydb?charset=utf8')
+ create_engine('mysql+mysqldb:///mydb?charset=utf8')
# set client encoding to utf8; all strings come back as utf8 str
- create_engine('mysql:///mydb?charset=utf8&use_unicode=0')
+ create_engine('mysql+mysqldb:///mydb?charset=utf8&use_unicode=0')
+
+Known Issues
+-------------
+
+MySQL-python at least as of version 1.2.2 has a serious memory leak related
+to unicode conversion, a feature which is disabled via ``use_unicode=0``.
+The recommended connection form with SQLAlchemy is::
+
+ engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600)
+
+
"""
import re
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index f26bc4da2..ebc726482 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -1,5 +1,16 @@
"""Support for the MySQL database via the oursql adapter.
+OurSQL is available at:
+
+ http://packages.python.org/oursql/
+
+Connecting
+-----------
+
+Connect string format::
+
+ mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
+
Character Sets
--------------
@@ -151,8 +162,8 @@ class MySQLDialect_oursql(MySQLDialect):
**kw
)
- def table_names(self, connection, schema):
- return MySQLDialect.table_names(self,
+ def get_table_names(self, connection, schema=None, **kw):
+ return MySQLDialect.get_table_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
schema
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index 5add45b21..1f73c6ef1 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -1,5 +1,24 @@
"""Support for the MySQL database via the pyodbc adapter.
+pyodbc is available at:
+
+ http://pypi.python.org/pypi/pyodbc/
+
+Connecting
+----------
+
+Connect string::
+
+ mysql+pyodbc://<username>:<password>@<dsnname>
+
+Limitations
+-----------
+
+The mysql-pyodbc dialect is subject to unresolved character encoding issues
+which exist within the current ODBC drivers available.
+(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage
+of OurSQL, MySQLdb, or MySQL-connector/Python.
+
"""
from sqlalchemy.dialects.mysql.base import MySQLDialect, MySQLExecutionContext
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index f4cf0013c..06d3e6616 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -6,6 +6,13 @@ JDBC Driver
The official MySQL JDBC driver is at
http://dev.mysql.com/downloads/connector/j/.
+Connecting
+----------
+
+Connect string format:
+
+ mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/<database>
+
Character Sets
--------------
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index f76edabf2..475730988 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -225,6 +225,8 @@ ischema_names = {
'CLOB' : CLOB,
'NCLOB' : NCLOB,
'TIMESTAMP' : TIMESTAMP,
+ 'TIMESTAMP WITH TIME ZONE' : TIMESTAMP,
+ 'INTERVAL DAY TO SECOND' : INTERVAL,
'RAW' : RAW,
'FLOAT' : FLOAT,
'DOUBLE PRECISION' : DOUBLE_PRECISION,
@@ -256,7 +258,13 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
"(%d)" % type_.second_precision or
"",
)
-
+
+ def visit_TIMESTAMP(self, type_):
+ if type_.timezone:
+ return "TIMESTAMP WITH TIME ZONE"
+ else:
+ return "TIMESTAMP"
+
def visit_DOUBLE_PRECISION(self, type_):
return self._generate_numeric(type_, "DOUBLE PRECISION")
@@ -278,7 +286,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale}
def visit_VARCHAR(self, type_):
- return "VARCHAR(%(length)s)" % {'length' : type_.length}
+ if self.dialect.supports_char_length:
+ return "VARCHAR(%(length)s CHAR)" % {'length' : type_.length}
+ else:
+ return "VARCHAR(%(length)s)" % {'length' : type_.length}
def visit_NVARCHAR(self, type_):
return "NVARCHAR2(%(length)s)" % {'length' : type_.length}
@@ -331,6 +342,11 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_match_op(self, binary, **kw):
return "CONTAINS (%s, %s)" % (self.process(binary.left), self.process(binary.right))
+ def get_select_hint_text(self, byfroms):
+ return " ".join(
+ "/*+ %s */" % text for table, text in byfroms.items()
+ )
+
def function_argspec(self, fn, **kw):
if len(fn.clauses) > 0:
return compiler.SQLCompiler.function_argspec(self, fn, **kw)
@@ -349,7 +365,9 @@ class OracleCompiler(compiler.SQLCompiler):
if self.dialect.use_ansi:
return compiler.SQLCompiler.visit_join(self, join, **kwargs)
else:
- return self.process(join.left, asfrom=True) + ", " + self.process(join.right, asfrom=True)
+ kwargs['asfrom'] = True
+ return self.process(join.left, **kwargs) + \
+ ", " + self.process(join.right, **kwargs)
def _get_nonansi_join_whereclause(self, froms):
clauses = []
@@ -381,14 +399,18 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_sequence(self, seq):
return self.dialect.identifier_preparer.format_sequence(seq) + ".nextval"
- def visit_alias(self, alias, asfrom=False, **kwargs):
+ def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
"""Oracle doesn't like ``FROM table AS alias``. Is the AS standard SQL??"""
-
- if asfrom:
+
+ if asfrom or ashint:
alias_name = isinstance(alias.name, expression._generated_label) and \
self._truncated_identifier("alias", alias.name) or alias.name
-
- return self.process(alias.original, asfrom=asfrom, **kwargs) + " " + self.preparer.format_alias(alias, alias_name)
+
+ if ashint:
+ return alias_name
+ elif asfrom:
+ return self.process(alias.original, asfrom=asfrom, **kwargs) + \
+ " " + self.preparer.format_alias(alias, alias_name)
else:
return self.process(alias.original, **kwargs)
@@ -561,7 +583,8 @@ class OracleDialect(default.DefaultDialect):
execution_ctx_cls = OracleExecutionContext
reflection_options = ('oracle_resolve_synonyms', )
-
+
+ supports_char_length = True
def __init__(self,
use_ansi=True,
@@ -576,6 +599,8 @@ class OracleDialect(default.DefaultDialect):
self.implicit_returning = self.server_version_info > (10, ) and \
self.__dict__.get('implicit_returning', True)
+ self.supports_char_length = self.server_version_info >= (9, )
+
if self.server_version_info < (9,):
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
@@ -631,18 +656,6 @@ class OracleDialect(default.DefaultDialect):
def _get_default_schema_name(self, connection):
return self.normalize_name(connection.execute(u'SELECT USER FROM DUAL').scalar())
- def table_names(self, connection, schema):
- # note that table_names() isnt loading DBLINKed or synonym'ed tables
- if schema is None:
- schema = self.default_schema_name
- s = sql.text(
- "SELECT table_name FROM all_tables "
- "WHERE nvl(tablespace_name, 'no tablespace') NOT IN ('SYSTEM', 'SYSAUX') "
- "AND OWNER = :owner "
- "AND IOT_NAME IS NULL")
- cursor = connection.execute(s, owner=self.denormalize_name(schema))
- return [self.normalize_name(row[0]) for row in cursor]
-
def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
"""search for a local synonym matching the given desired owner/name.
@@ -712,7 +725,18 @@ class OracleDialect(default.DefaultDialect):
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
schema = self.denormalize_name(schema or self.default_schema_name)
- return self.table_names(connection, schema)
+
+ # note that table_names() isnt loading DBLINKed or synonym'ed tables
+ if schema is None:
+ schema = self.default_schema_name
+ s = sql.text(
+ "SELECT table_name FROM all_tables "
+ "WHERE nvl(tablespace_name, 'no tablespace') NOT IN ('SYSTEM', 'SYSAUX') "
+ "AND OWNER = :owner "
+ "AND IOT_NAME IS NULL")
+ cursor = connection.execute(s, owner=schema)
+ return [self.normalize_name(row[0]) for row in cursor]
+
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
@@ -742,11 +766,16 @@ class OracleDialect(default.DefaultDialect):
resolve_synonyms, dblink,
info_cache=info_cache)
columns = []
+ if self.supports_char_length:
+ char_length_col = 'char_length'
+ else:
+ char_length_col = 'data_length'
+
c = connection.execute(sql.text(
- "SELECT column_name, data_type, data_length, data_precision, data_scale, "
+ "SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, "
"nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "
"WHERE table_name = :table_name AND owner = :owner "
- "ORDER BY column_id" % {'dblink': dblink}),
+ "ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}),
table_name=table_name, owner=schema)
for row in c:
@@ -755,8 +784,10 @@ class OracleDialect(default.DefaultDialect):
if coltype == 'NUMBER' :
coltype = NUMBER(precision, scale)
- elif coltype=='CHAR' or coltype=='VARCHAR2':
+ elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'):
coltype = self.ischema_names.get(coltype)(length)
+ elif 'WITH TIME ZONE' in coltype:
+ coltype = TIMESTAMP(timezone=True)
else:
coltype = re.sub(r'\(\d+\)', '', coltype)
try:
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index c6e9cea5d..91af6620b 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -207,11 +207,19 @@ class OracleCompiler_cx_oracle(OracleCompiler):
class OracleExecutionContext_cx_oracle(OracleExecutionContext):
def pre_exec(self):
- quoted_bind_names = getattr(self.compiled, '_quoted_bind_names', {})
+ quoted_bind_names = \
+ getattr(self.compiled, '_quoted_bind_names', None)
if quoted_bind_names:
+ if not self.dialect.supports_unicode_binds:
+ quoted_bind_names = \
+ dict(
+ (fromname, toname.encode(self.dialect.encoding))
+ for fromname, toname in
+ quoted_bind_names.items()
+ )
for param in self.parameters:
- for fromname, toname in self.compiled._quoted_bind_names.iteritems():
- param[toname.encode(self.dialect.encoding)] = param[fromname]
+ for fromname, toname in quoted_bind_names.items():
+ param[toname] = param[fromname]
del param[fromname]
if self.dialect.auto_setinputsizes:
@@ -219,14 +227,12 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
# on String, including that outparams/RETURNING
# breaks for varchars
self.set_input_sizes(quoted_bind_names,
- exclude_types=self.dialect._cx_oracle_string_types
+ exclude_types=self.dialect._cx_oracle_string_types
)
-
+
+ # if a single execute, check for outparams
if len(self.compiled_parameters) == 1:
- for key in self.compiled.binds:
- bindparam = self.compiled.binds[key]
- name = self.compiled.bind_names[bindparam]
- value = self.compiled_parameters[0][name]
+ for bindparam in self.compiled.binds.values():
if bindparam.isoutparam:
dbtype = bindparam.type.dialect_impl(self.dialect).\
get_dbapi_type(self.dialect.dbapi)
@@ -238,6 +244,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
" cx_oracle" %
(name, bindparam.type)
)
+ name = self.compiled.bind_names[bindparam]
self.out_parameters[name] = self.cursor.var(dbtype)
self.parameters[0][quoted_bind_names.get(name, name)] = \
self.out_parameters[name]
@@ -250,7 +257,10 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
def get_result_proxy(self):
if hasattr(self, 'out_parameters') and self.compiled.returning:
- returning_params = dict((k, v.getvalue()) for k, v in self.out_parameters.items())
+ returning_params = dict(
+ (k, v.getvalue())
+ for k, v in self.out_parameters.items()
+ )
return ReturningResultProxy(self, returning_params)
result = None
@@ -264,10 +274,11 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
result = base.ResultProxy(self)
if hasattr(self, 'out_parameters'):
- if self.compiled_parameters is not None and len(self.compiled_parameters) == 1:
+ if self.compiled_parameters is not None and \
+ len(self.compiled_parameters) == 1:
result.out_parameters = out_parameters = {}
- for bind, name in self.compiled.bind_names.iteritems():
+ for bind, name in self.compiled.bind_names.items():
if name in self.out_parameters:
type = bind.type
impl_type = type.dialect_impl(self.dialect)
@@ -291,12 +302,14 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
"""Support WITH_UNICODE in Python 2.xx.
- WITH_UNICODE allows cx_Oracle's Python 3 unicode handling behavior under Python 2.x.
- This mode in some cases disallows and in other cases silently
- passes corrupted data when non-Python-unicode strings (a.k.a. plain old Python strings)
- are passed as arguments to connect(), the statement sent to execute(), or any of the bind
- parameter keys or values sent to execute(). This optional context
- therefore ensures that all statements are passed as Python unicode objects.
+ WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
+ behavior under Python 2.x. This mode in some cases disallows
+ and in other cases silently passes corrupted data when
+ non-Python-unicode strings (a.k.a. plain old Python strings)
+ are passed as arguments to connect(), the statement sent to execute(),
+ or any of the bind parameter keys or values sent to execute().
+ This optional context therefore ensures that all statements are
+ passed as Python unicode objects.
"""
def __init__(self, *arg, **kw):
@@ -373,17 +386,19 @@ class OracleDialect_cx_oracle(OracleDialect):
if hasattr(self.dbapi, 'version'):
cx_oracle_ver = tuple([int(x) for x in self.dbapi.version.split('.')])
- self.supports_unicode_binds = cx_oracle_ver >= (5, 0)
- self._cx_oracle_native_nvarchar = cx_oracle_ver >= (5, 0)
else:
- cx_oracle_ver = None
+ cx_oracle_ver = (0, 0, 0)
def types(*names):
- return set([getattr(self.dbapi, name, None) for name in names]).difference([None])
+ return set([
+ getattr(self.dbapi, name, None) for name in names
+ ]).difference([None])
self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB")
self._cx_oracle_unicode_types = types("UNICODE", "NCLOB")
self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
+ self.supports_unicode_binds = cx_oracle_ver >= (5, 0)
+ self._cx_oracle_native_nvarchar = cx_oracle_ver >= (5, 0)
if cx_oracle_ver is None:
# this occurs in tests with mock DBAPIs
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index cbd92ccfe..bef2f1c61 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -78,7 +78,7 @@ from sqlalchemy import types as sqltypes
from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
CHAR, TEXT, FLOAT, NUMERIC, \
- TIMESTAMP, TIME, DATE, BOOLEAN
+ DATE, BOOLEAN
class REAL(sqltypes.Float):
__visit_name__ = "REAL"
@@ -101,6 +101,16 @@ class MACADDR(sqltypes.TypeEngine):
__visit_name__ = "MACADDR"
PGMacAddr = MACADDR
+class TIMESTAMP(sqltypes.TIMESTAMP):
+ def __init__(self, timezone=False, precision=None):
+ super(TIMESTAMP, self).__init__(timezone=timezone)
+ self.precision = precision
+
+class TIME(sqltypes.TIME):
+ def __init__(self, timezone=False, precision=None):
+ super(TIME, self).__init__(timezone=timezone)
+ self.precision = precision
+
class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
def __init__(self, precision=None):
@@ -466,10 +476,16 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
return self.dialect.identifier_preparer.format_type(type_)
def visit_TIMESTAMP(self, type_):
- return "TIMESTAMP " + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
+ return "TIMESTAMP%s %s" % (
+ getattr(type_, 'precision', None) and "(%d)" % type_.precision or "",
+ (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
+ )
def visit_TIME(self, type_):
- return "TIME " + (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
+ return "TIME%s %s" % (
+ getattr(type_, 'precision', None) and "(%d)" % type_.precision or "",
+ (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
+ )
def visit_INTERVAL(self, type_):
if type_.precision is not None:
@@ -725,17 +741,6 @@ class PGDialect(default.DefaultDialect):
cursor = connection.execute(sql.text(query, bindparams=bindparams))
return bool(cursor.scalar())
- def table_names(self, connection, schema):
- result = connection.execute(
- sql.text(u"SELECT relname FROM pg_class c "
- "WHERE relkind = 'r' "
- "AND '%s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) " %
- schema,
- typemap = {'relname':sqltypes.Unicode}
- )
- )
- return [row[0] for row in result]
-
def _get_server_version_info(self, connection):
v = connection.execute("select version()").scalar()
m = re.match('PostgreSQL (\d+)\.(\d+)(?:\.(\d+))?(?:devel)?', v)
@@ -805,8 +810,17 @@ class PGDialect(default.DefaultDialect):
current_schema = schema
else:
current_schema = self.default_schema_name
- table_names = self.table_names(connection, current_schema)
- return table_names
+
+ result = connection.execute(
+ sql.text(u"SELECT relname FROM pg_class c "
+ "WHERE relkind = 'r' "
+ "AND '%s' = (select nspname from pg_namespace n where n.oid = c.relnamespace) " %
+ current_schema,
+ typemap = {'relname':sqltypes.Unicode}
+ )
+ )
+ return [row[0] for row in result]
+
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
@@ -877,39 +891,48 @@ class PGDialect(default.DefaultDialect):
# format columns
columns = []
for name, format_type, default, notnull, attnum, table_oid in rows:
- ## strip (30) from character varying(30)
- attype = re.search('([^\([]+)', format_type).group(1)
+ ## strip (5) from character varying(5), timestamp(5) with time zone, etc
+ attype = re.sub(r'\([\d,]+\)', '', format_type)
+
+ # strip '[]' from integer[], etc.
+ attype = re.sub(r'\[\]', '', attype)
+
nullable = not notnull
is_array = format_type.endswith('[]')
- try:
- charlen = re.search('\(([\d,]+)\)', format_type).group(1)
- except:
- charlen = False
- numericprec = False
- numericscale = False
+ charlen = re.search('\(([\d,]+)\)', format_type)
+ if charlen:
+ charlen = charlen.group(1)
+ kwargs = {}
+
if attype == 'numeric':
- if charlen is False:
- numericprec, numericscale = (None, None)
+ if charlen:
+ prec, scale = charlen.split(',')
+ args = (int(prec), int(scale))
else:
- numericprec, numericscale = charlen.split(',')
- charlen = False
+ args = ()
elif attype == 'double precision':
- numericprec, numericscale = (53, False)
- charlen = False
+ args = (53, )
elif attype == 'integer':
- numericprec, numericscale = (32, 0)
- charlen = False
- args = []
- for a in (charlen, numericprec, numericscale):
- if a is None:
- args.append(None)
- elif a is not False:
- args.append(int(a))
- kwargs = {}
- if attype == 'timestamp with time zone':
+ args = (32, 0)
+ elif attype in ('timestamp with time zone', 'time with time zone'):
kwargs['timezone'] = True
- elif attype == 'timestamp without time zone':
+ if charlen:
+ kwargs['precision'] = int(charlen)
+ args = ()
+ elif attype in ('timestamp without time zone', 'time without time zone', 'time'):
kwargs['timezone'] = False
+ if charlen:
+ kwargs['precision'] = int(charlen)
+ args = ()
+ elif attype in ('interval','interval year to month','interval day to second'):
+ if charlen:
+ kwargs['precision'] = int(charlen)
+ args = ()
+ elif charlen:
+ args = (int(charlen),)
+ else:
+ args = ()
+
if attype in self.ischema_names:
coltype = self.ischema_names[attype]
elif attype in enums:
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index c239a3ee0..f21c9a558 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -12,7 +12,7 @@ Note that psycopg1 is **not** supported.
Connecting
----------
-URLs are of the form `postgresql+psycopg2://user@password@host:port/dbname[?key=value&key=value...]`.
+URLs are of the form `postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]`.
psycopg2-specific keyword arguments which are accepted by :func:`~sqlalchemy.create_engine()` are:
@@ -34,6 +34,15 @@ Transactions
The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
+NOTICE logging
+---------------
+
+The psycopg2 dialect will log Postgresql NOTICE messages via the
+``sqlalchemy.dialects.postgresql`` logger::
+
+ import logging
+ logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
+
Per-Statement Execution Options
-------------------------------
@@ -46,8 +55,10 @@ The following per-statement execution options are respected:
"""
-import random, re
+import random
+import re
import decimal
+import logging
from sqlalchemy import util
from sqlalchemy import processors
@@ -59,6 +70,10 @@ from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler, \
PGIdentifierPreparer, PGExecutionContext, \
ENUM, ARRAY
+
+logger = logging.getLogger('sqlalchemy.dialects.postgresql')
+
+
class _PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return None
@@ -130,11 +145,22 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
return self._connection.connection.cursor()
def get_result_proxy(self):
+ if logger.isEnabledFor(logging.INFO):
+ self._log_notices(self.cursor)
+
if self.__is_server_side:
return base.BufferedRowResultProxy(self)
else:
return base.ResultProxy(self)
+ def _log_notices(self, cursor):
+ for notice in cursor.connection.notices:
+ # NOTICE messages have a
+ # newline character at the end
+ logger.info(notice.rstrip())
+
+ cursor.connection.notices[:] = []
+
class PGCompiler_psycopg2(PGCompiler):
def visit_mod(self, binary, **kw):
@@ -190,7 +216,7 @@ class PGDialect_psycopg2(PGDialect):
return connect
else:
return base_on_connect
-
+
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index d7637e71b..ca0a39136 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -331,6 +331,9 @@ class SQLiteDialect(default.DefaultDialect):
colspecs = colspecs
isolation_level = None
+ supports_cast = True
+ supports_default_values = True
+
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
if isolation_level and isolation_level not in ('SERIALIZABLE',
@@ -345,6 +348,13 @@ class SQLiteDialect(default.DefaultDialect):
# conversions (and perhaps datetime/time as well on some
# hypothetical driver ?)
self.native_datetime = native_datetime
+
+ if self.dbapi is not None:
+ self.supports_default_values = \
+ self.dbapi.sqlite_version_info >= (3, 3, 8)
+ self.supports_cast = \
+ self.dbapi.sqlite_version_info >= (3, 2, 3)
+
def on_connect(self):
if self.isolation_level is not None:
@@ -360,8 +370,9 @@ class SQLiteDialect(default.DefaultDialect):
return connect
else:
return None
-
- def table_names(self, connection, schema):
+
+ @reflection.cache
+ def get_table_names(self, connection, schema=None, **kw):
if schema is not None:
qschema = self.identifier_preparer.quote_identifier(schema)
master = '%s.sqlite_master' % qschema
@@ -401,10 +412,6 @@ class SQLiteDialect(default.DefaultDialect):
return (row is not None)
@reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self.table_names(connection, schema)
-
- @reflection.cache
def get_view_names(self, connection, schema=None, **kw):
if schema is not None:
qschema = self.identifier_preparer.quote_identifier(schema)
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index b48abbb7d..575cb37f2 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -187,20 +187,15 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
def __init__(self, **kwargs):
SQLiteDialect.__init__(self, **kwargs)
- def vers(num):
- return tuple([int(x) for x in num.split('.')])
+
if self.dbapi is not None:
sqlite_ver = self.dbapi.version_info
- if sqlite_ver < (2, 1, '3'):
+ if sqlite_ver < (2, 1, 3):
util.warn(
("The installed version of pysqlite2 (%s) is out-dated "
"and will cause errors in some cases. Version 2.1.3 "
"or greater is recommended.") %
'.'.join([str(subver) for subver in sqlite_ver]))
- if self.dbapi.sqlite_version_info < (3, 3, 8):
- self.supports_default_values = False
- self.supports_cast = (self.dbapi is None or vers(self.dbapi.sqlite_version) >= vers("3.2.3"))
-
@classmethod
def dbapi(cls):
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index bdaab2eb7..6719b422b 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,6 +1,9 @@
-# sybase.py
-# Copyright (C) 2007 Fisch Asset Management AG http://www.fam.ch
-# Coding: Alexander Houben alexander.houben@thor-solutions.ch
+# sybase/base.py
+# Copyright (C) 2010 Michael Bayer mike_mp@zzzcomputing.com
+# get_select_precolumns(), limit_clause() implementation
+# copyright (C) 2007 Fisch Asset Management
+# AG http://www.fam.ch, with coding by Alexander Houben
+# alexander.houben@thor-solutions.ch
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -277,6 +280,9 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
s += "START AT %s " % (select._offset+1,)
return s
+ def get_from_hint_text(self, table, text):
+ return text
+
def limit_clause(self, select):
# Limit in sybase is after the select keyword
return ""
@@ -310,8 +316,6 @@ class SybaseDDLCompiler(compiler.DDLCompiler):
"columns in order to generate DDL")
seq_col = column.table._autoincrement_column
-
-
# install a IDENTITY Sequence if we have an implicit IDENTITY column
if seq_col is column:
sequence = isinstance(column.default, sa_schema.Sequence) and column.default
@@ -382,9 +386,6 @@ class SybaseDialect(default.DefaultDialect):
def get_table_names(self, connection, schema=None, **kw):
if schema is None:
schema = self.default_schema_name
- return self.table_names(connection, schema)
-
- def table_names(self, connection, schema):
result = connection.execute(
text("select sysobjects.name from sysobjects, sysusers "
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 19ad70fe8..e34f2605c 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -29,12 +29,34 @@ Currently *not* supported are::
"""
from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext
-from sqlalchemy.connectors.pyodbc import PyODBCConnector, PyODBCNumeric
+from sqlalchemy.connectors.pyodbc import PyODBCConnector
+import decimal
+from sqlalchemy import types as sqltypes, util, processors
-from sqlalchemy import types as sqltypes, util
+class _SybNumeric_pyodbc(sqltypes.Numeric):
+ """Turns Decimals with adjusted() < -6 into floats.
+
+ It's not yet known how to get decimals with many
+ significant digits or very large adjusted() into Sybase
+ via pyodbc.
+
+ """
+
+ def bind_processor(self, dialect):
+ super_process = super(_SybNumeric_pyodbc, self).bind_processor(dialect)
+
+ def process(value):
+ if self.asdecimal and \
+ isinstance(value, decimal.Decimal):
-class _SybNumeric_pyodbc(PyODBCNumeric):
- convert_large_decimals_to_string = False
+ if value.adjusted() < -6:
+ return processors.to_float(value)
+
+ if super_process:
+ return super_process(value)
+ else:
+ return value
+ return process
class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
def set_ddl_autocommit(self, connection, value):
@@ -43,8 +65,6 @@ class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
else:
connection.autocommit = False
-
-
class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
execution_ctx_cls = SybaseExecutionContext_pyodbc
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 9a53545df..9b3dbedd8 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -107,10 +107,11 @@ def create_engine(*args, **kwargs):
arguments sent as options to the dialect and resulting Engine.
The URL is a string in the form
- ``dialect://user:password@host/dbname[?key=value..]``, where
- ``dialect`` is a name such as ``mysql``, ``oracle``, ``postgresql``,
- etc. Alternatively, the URL can be an instance of
- :class:`~sqlalchemy.engine.url.URL`.
+ ``dialect+driver://user:password@host/dbname[?key=value..]``, where
+ ``dialect`` is a database name such as ``mysql``, ``oracle``,
+ ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
+ ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
+ the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
`**kwargs` takes a wide variety of options which are routed
towards their appropriate components. Arguments may be
@@ -120,11 +121,11 @@ def create_engine(*args, **kwargs):
that are common to most ``create_engine()`` usage.
:param assert_unicode: Deprecated. A warning is raised in all cases when a non-Unicode
- object is passed when SQLAlchemy would coerce into an encoding
- (note: but **not** when the DBAPI handles unicode objects natively).
- To suppress or raise this warning to an
- error, use the Python warnings filter documented at:
- http://docs.python.org/library/warnings.html
+ object is passed when SQLAlchemy would coerce into an encoding
+ (note: but **not** when the DBAPI handles unicode objects natively).
+ To suppress or raise this warning to an
+ error, use the Python warnings filter documented at:
+ http://docs.python.org/library/warnings.html
:param connect_args: a dictionary of options which will be
passed directly to the DBAPI's ``connect()`` method as
@@ -144,11 +145,6 @@ def create_engine(*args, **kwargs):
connections. Usage of this function causes connection
parameters specified in the URL argument to be bypassed.
- :param logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.engine" logger. Defaults to a hexstring of the
- object's id.
-
:param echo=False: if True, the Engine will log all statements
as well as a repr() of their parameter lists to the engines
logger, which defaults to sys.stdout. The ``echo`` attribute of
@@ -158,11 +154,6 @@ def create_engine(*args, **kwargs):
controls a Python logger; see :ref:`dbengine_logging` for
information on how to configure logging directly.
- :param pool_logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
- id.
-
:param echo_pool=False: if True, the connection pool will log
all checkouts/checkins to the logging stream, which defaults to
sys.stdout. This flag ultimately controls a Python logger; see
@@ -178,6 +169,20 @@ def create_engine(*args, **kwargs):
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
+
+ :param listeners: A list of one or more
+ :class:`~sqlalchemy.interfaces.PoolListener` objects which will
+ receive connection pool events.
+
+ :param logging_name: String identifier which will be used within
+ the "name" field of logging records generated within the
+ "sqlalchemy.engine" logger. Defaults to a hexstring of the
+ object's id.
+
+ :param max_overflow=10: the number of connections to allow in
+ connection pool "overflow", that is connections that can be
+ opened above and beyond the pool_size setting, which defaults
+ to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
:param module=None: used by database implementations which
support multiple DBAPI modules, this is a reference to a DBAPI2
@@ -199,10 +204,10 @@ def create_engine(*args, **kwargs):
instantiate the pool in this case, you just indicate what type
of pool to be used.
- :param max_overflow=10: the number of connections to allow in
- connection pool "overflow", that is connections that can be
- opened above and beyond the pool_size setting, which defaults
- to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
+ :param pool_logging_name: String identifier which will be used within
+ the "name" field of logging records generated within the
+ "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
+ id.
:param pool_size=5: the number of connections to keep open
inside the connection pool. This used with :class:`~sqlalchemy.pool.QueuePool` as
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 095f7a960..dc42ed957 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1420,6 +1420,9 @@ class Engine(Connectable, log.Identified):
"""
Connects a :class:`~sqlalchemy.pool.Pool` and :class:`~sqlalchemy.engine.base.Dialect`
together to provide a source of database connectivity and behavior.
+
+ An :class:`Engine` object is instantiated publically using the :func:`~sqlalchemy.create_engine`
+ function.
"""
@@ -1569,7 +1572,7 @@ class Engine(Connectable, log.Identified):
if not schema:
schema = self.dialect.default_schema_name
try:
- return self.dialect.table_names(conn, schema)
+ return self.dialect.get_table_names(conn, schema)
finally:
if connection is None:
conn.close()
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 720edf66c..6fb0a14a5 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -381,7 +381,10 @@ class DefaultExecutionContext(base.ExecutionContext):
self.execution_options = self.execution_options.union(connection._execution_options)
self.cursor = self.create_cursor()
-
+ @util.memoized_property
+ def is_crud(self):
+ return self.isinsert or self.isupdate or self.isdelete
+
@util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get('autocommit',
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index 3226b0efd..dde49e232 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -165,7 +165,7 @@ A big part of using the compiler extension is subclassing SQLAlchemy expression
def compiles(class_, *specs):
def decorate(fn):
- existing = getattr(class_, '_compiler_dispatcher', None)
+ existing = class_.__dict__.get('_compiler_dispatcher', None)
if not existing:
existing = _dispatcher()
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index 775efbff1..1f4658b60 100644
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -507,7 +507,7 @@ Mapped instances then make usage of
from sqlalchemy.schema import Table, Column, MetaData
from sqlalchemy.orm import synonym as _orm_synonym, mapper, comparable_property, class_mapper
from sqlalchemy.orm.interfaces import MapperProperty
-from sqlalchemy.orm.properties import PropertyLoader, ColumnProperty
+from sqlalchemy.orm.properties import RelationshipProperty, ColumnProperty
from sqlalchemy.orm.util import _is_mapped_class
from sqlalchemy import util, exceptions
from sqlalchemy.sql import util as sql_util
@@ -531,31 +531,41 @@ def instrument_declarative(cls, registry, metadata):
def _as_declarative(cls, classname, dict_):
- # doing it this way enables these attributes to be descriptors,
- # see below...
- get_mapper_args = '__mapper_args__' in dict_
- get_table_args = '__table_args__' in dict_
-
# dict_ will be a dictproxy, which we can't write to, and we need to!
dict_ = dict(dict_)
column_copies = dict()
-
+ unmapped_mixins = False
for base in cls.__bases__:
names = dir(base)
if not _is_mapped_class(base):
+ unmapped_mixins = True
for name in names:
- obj = getattr(base,name)
+ obj = getattr(base,name, None)
if isinstance(obj, Column):
+ if obj.foreign_keys:
+ raise exceptions.InvalidRequestError(
+ "Columns with foreign keys to other columns "
+ "are not allowed on declarative mixins at this time."
+ )
dict_[name]=column_copies[obj]=obj.copy()
- get_mapper_args = get_mapper_args or getattr(base,'__mapper_args__',None)
- get_table_args = get_table_args or getattr(base,'__table_args__',None)
- tablename = getattr(base,'__tablename__',None)
- if tablename:
- # subtle: if tablename is a descriptor here, we actually
- # put the wrong value in, but it serves as a marker to get
- # the right value value...
- dict_['__tablename__']=tablename
+ elif isinstance(obj, RelationshipProperty):
+ raise exceptions.InvalidRequestError(
+ "relationships are not allowed on "
+ "declarative mixins at this time.")
+
+ # doing it this way enables these attributes to be descriptors
+ get_mapper_args = '__mapper_args__' in dict_
+ get_table_args = '__table_args__' in dict_
+ if unmapped_mixins:
+ get_mapper_args = get_mapper_args or getattr(cls,'__mapper_args__',None)
+ get_table_args = get_table_args or getattr(cls,'__table_args__',None)
+ tablename = getattr(cls,'__tablename__',None)
+ if tablename:
+ # subtle: if tablename is a descriptor here, we actually
+ # put the wrong value in, but it serves as a marker to get
+ # the right value value...
+ dict_['__tablename__']=tablename
# now that we know whether or not to get these, get them from the class
# if we should, enabling them to be decorators
@@ -777,7 +787,7 @@ def _deferred_relationship(cls, prop):
prop.parent, arg, n.args[0], cls))
return return_cls
- if isinstance(prop, PropertyLoader):
+ if isinstance(prop, RelationshipProperty):
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
new file mode 100644
index 000000000..78e3f5953
--- /dev/null
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -0,0 +1,125 @@
+# horizontal_shard.py
+# Copyright (C) the SQLAlchemy authors and contributors
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Horizontal sharding support.
+
+Defines a rudimental 'horizontal sharding' system which allows a Session to
+distribute queries and persistence operations across multiple databases.
+
+For a usage example, see the :ref:`examples_sharding` example included in
+the source distrbution.
+
+"""
+
+import sqlalchemy.exceptions as sa_exc
+from sqlalchemy import util
+from sqlalchemy.orm.session import Session
+from sqlalchemy.orm.query import Query
+
+__all__ = ['ShardedSession', 'ShardedQuery']
+
+
+class ShardedSession(Session):
+ def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, **kwargs):
+ """Construct a ShardedSession.
+
+ :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a
+ SQL clause, returns a shard ID. This id may be based off of the
+ attributes present within the object, or on some round-robin
+ scheme. If the scheme is based on a selection, it should set
+ whatever state on the instance to mark it in the future as
+ participating in that shard.
+
+ :param id_chooser: A callable, passed a query and a tuple of identity values, which
+ should return a list of shard ids where the ID might reside. The
+ databases will be queried in the order of this listing.
+
+ :param query_chooser: For a given Query, returns the list of shard_ids where the query
+ should be issued. Results from all shards returned will be combined
+ together into a single listing.
+
+ :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.base.Engine`
+ objects.
+
+ """
+ super(ShardedSession, self).__init__(**kwargs)
+ self.shard_chooser = shard_chooser
+ self.id_chooser = id_chooser
+ self.query_chooser = query_chooser
+ self.__binds = {}
+ self._mapper_flush_opts = {'connection_callable':self.connection}
+ self._query_cls = ShardedQuery
+ if shards is not None:
+ for k in shards:
+ self.bind_shard(k, shards[k])
+
+ def connection(self, mapper=None, instance=None, shard_id=None, **kwargs):
+ if shard_id is None:
+ shard_id = self.shard_chooser(mapper, instance)
+
+ if self.transaction is not None:
+ return self.transaction.connection(mapper, shard_id=shard_id)
+ else:
+ return self.get_bind(mapper,
+ shard_id=shard_id,
+ instance=instance).contextual_connect(**kwargs)
+
+ def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
+ if shard_id is None:
+ shard_id = self.shard_chooser(mapper, instance, clause=clause)
+ return self.__binds[shard_id]
+
+ def bind_shard(self, shard_id, bind):
+ self.__binds[shard_id] = bind
+
+class ShardedQuery(Query):
+ def __init__(self, *args, **kwargs):
+ super(ShardedQuery, self).__init__(*args, **kwargs)
+ self.id_chooser = self.session.id_chooser
+ self.query_chooser = self.session.query_chooser
+ self._shard_id = None
+
+ def set_shard(self, shard_id):
+ """return a new query, limited to a single shard ID.
+
+ all subsequent operations with the returned query will
+ be against the single shard regardless of other state.
+ """
+
+ q = self._clone()
+ q._shard_id = shard_id
+ return q
+
+ def _execute_and_instances(self, context):
+ if self._shard_id is not None:
+ result = self.session.connection(
+ mapper=self._mapper_zero(),
+ shard_id=self._shard_id).execute(context.statement, self._params)
+ return self.instances(result, context)
+ else:
+ partial = []
+ for shard_id in self.query_chooser(self):
+ result = self.session.connection(
+ mapper=self._mapper_zero(),
+ shard_id=shard_id).execute(context.statement, self._params)
+ partial = partial + list(self.instances(result, context))
+
+ # if some kind of in memory 'sorting'
+ # were done, this is where it would happen
+ return iter(partial)
+
+ def get(self, ident, **kwargs):
+ if self._shard_id is not None:
+ return super(ShardedQuery, self).get(ident)
+ else:
+ ident = util.to_list(ident)
+ for shard_id in self.id_chooser(self, ident):
+ o = self.set_shard(shard_id).get(ident, **kwargs)
+ if o is not None:
+ return o
+ else:
+ return None
+
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index db0bd2a4e..0d2c3ae5d 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -1,67 +1,92 @@
"""A custom list that manages index/position information for its children.
-``orderinglist`` is a custom list collection implementation for mapped
-relationships that keeps an arbitrary "position" attribute on contained objects in
-sync with each object's position in the Python list.
-
-The collection acts just like a normal Python ``list``, with the added
-behavior that as you manipulate the list (via ``insert``, ``pop``, assignment,
-deletion, what have you), each of the objects it contains is updated as needed
-to reflect its position. This is very useful for managing ordered relationships
-which have a user-defined, serialized order::
-
- >>> from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
- >>> from sqlalchemy.orm import mapper, relationship
- >>> from sqlalchemy.ext.orderinglist import ordering_list
-
-A simple model of users their "top 10" things::
-
- >>> metadata = MetaData()
- >>> users = Table('users', metadata,
- ... Column('id', Integer, primary_key=True))
- >>> blurbs = Table('user_top_ten_list', metadata,
- ... Column('id', Integer, primary_key=True),
- ... Column('user_id', Integer, ForeignKey('users.id')),
- ... Column('position', Integer),
- ... Column('blurb', String(80)))
- >>> class User(object):
- ... pass
- ...
- >>> class Blurb(object):
- ... def __init__(self, blurb):
- ... self.blurb = blurb
- ...
- >>> mapper(User, users, properties={
- ... 'topten': relationship(Blurb, collection_class=ordering_list('position'),
- ... order_by=[blurbs.c.position])})
- <Mapper ...>
- >>> mapper(Blurb, blurbs)
- <Mapper ...>
-
-Acts just like a regular list::
-
- >>> u = User()
- >>> u.topten.append(Blurb('Number one!'))
- >>> u.topten.append(Blurb('Number two!'))
-
-But the ``.position`` attibute is set automatically behind the scenes::
-
- >>> assert [blurb.position for blurb in u.topten] == [0, 1]
-
-The objects will be renumbered automaticaly after any list-changing operation,
-for example an ``insert()``::
-
- >>> u.topten.insert(1, Blurb('I am the new Number Two.'))
- >>> assert [blurb.position for blurb in u.topten] == [0, 1, 2]
- >>> assert u.topten[1].blurb == 'I am the new Number Two.'
- >>> assert u.topten[1].position == 1
-
-Numbering and serialization are both highly configurable. See the docstrings
-in this module and the main SQLAlchemy documentation for more information and
-examples.
-
-The :class:`~sqlalchemy.ext.orderinglist.ordering_list` factory function is the
-ORM-compatible constructor for `OrderingList` instances.
+:author: Jason Kirtland
+
+``orderinglist`` is a helper for mutable ordered relationships. It will intercept
+list operations performed on a relationship collection and automatically
+synchronize changes in list position with an attribute on the related objects.
+(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.)
+
+Example: Two tables that store slides in a presentation. Each slide
+has a number of bullet points, displayed in order by the 'position'
+column on the bullets table. These bullets can be inserted and re-ordered
+by your end users, and you need to update the 'position' column of all
+affected rows when changes are made.
+
+.. sourcecode:: python+sql
+
+ slides_table = Table('Slides', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('name', String))
+
+ bullets_table = Table('Bullets', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('slide_id', Integer, ForeignKey('Slides.id')),
+ Column('position', Integer),
+ Column('text', String))
+
+ class Slide(object):
+ pass
+ class Bullet(object):
+ pass
+
+ mapper(Slide, slides_table, properties={
+ 'bullets': relationship(Bullet, order_by=[bullets_table.c.position])
+ })
+ mapper(Bullet, bullets_table)
+
+The standard relationship mapping will produce a list-like attribute on each Slide
+containing all related Bullets, but coping with changes in ordering is totally
+your responsibility. If you insert a Bullet into that list, there is no
+magic- it won't have a position attribute unless you assign it it one, and
+you'll need to manually renumber all the subsequent Bullets in the list to
+accommodate the insert.
+
+An ``orderinglist`` can automate this and manage the 'position' attribute on all
+related bullets for you.
+
+.. sourcecode:: python+sql
+
+ mapper(Slide, slides_table, properties={
+ 'bullets': relationship(Bullet,
+ collection_class=ordering_list('position'),
+ order_by=[bullets_table.c.position])
+ })
+ mapper(Bullet, bullets_table)
+
+ s = Slide()
+ s.bullets.append(Bullet())
+ s.bullets.append(Bullet())
+ s.bullets[1].position
+ >>> 1
+ s.bullets.insert(1, Bullet())
+ s.bullets[2].position
+ >>> 2
+
+Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
+(as in the mapper example above). This implementation depends on the list
+starting in the proper order, so be SURE to put an order_by on your relationship.
+
+.. warning:: ``ordering_list`` only provides limited functionality when a primary
+ key column or unique column is the target of the sort. Since changing the order of
+ entries often means that two rows must trade values, this is not possible when
+ the value is constrained by a primary key or unique constraint, since one of the rows
+ would temporarily have to point to a third available value so that the other row
+ could take its old value. ``ordering_list`` doesn't do any of this for you,
+ nor does SQLAlchemy itself.
+
+``ordering_list`` takes the name of the related object's ordering attribute as
+an argument. By default, the zero-based integer index of the object's
+position in the ``ordering_list`` is synchronized with the ordering attribute:
+index 0 will get position 0, index 1 position 1, etc. To start numbering at 1
+or some other integer, provide ``count_from=1``.
+
+Ordering values are not limited to incrementing integers. Almost any scheme
+can implemented by supplying a custom ``ordering_func`` that maps a Python list
+index to any value you require.
+
+
+
"""
from sqlalchemy.orm.collections import collection
@@ -288,7 +313,3 @@ class OrderingList(list):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
-if __name__ == '__main__':
- import doctest
- doctest.testmod(optionflags=doctest.ELLIPSIS)
-
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 3337287d8..206c8d0c2 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -83,6 +83,8 @@ __all__ = (
'eagerload_all',
'extension',
'join',
+ 'joinedload',
+ 'joinedload_all',
'lazyload',
'mapper',
'make_transient',
@@ -96,6 +98,8 @@ __all__ = (
'relation',
'scoped_session',
'sessionmaker',
+ 'subqueryload',
+ 'subqueryload_all',
'synonym',
'undefer',
'undefer_group',
@@ -226,24 +230,32 @@ def relationship(argument, secondary=None, **kwargs):
Available cascades are:
- ``save-update`` - cascade the "add()" operation (formerly
- known as save() and update())
+ * ``save-update`` - cascade the :meth:`~sqlalchemy.orm.session.Session.add`
+ operation. This cascade applies both to future and
+ past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
+ meaning new items added to a collection or scalar relationship
+ get placed into the same session as that of the parent, and
+ also applies to items which have been removed from this
+ relationship but are still part of unflushed history.
- ``merge`` - cascade the "merge()" operation
+ * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
+ operation
- ``expunge`` - cascade the "expunge()" operation
+ * ``expunge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.expunge`
+ operation
- ``delete`` - cascade the "delete()" operation
+ * ``delete`` - cascade the :meth:`~sqlalchemy.orm.session.Session.delete`
+ operation
- ``delete-orphan`` - if an item of the child's type with no
+ * ``delete-orphan`` - if an item of the child's type with no
parent is detected, mark it for deletion. Note that this
option prevents a pending item of the child's class from being
persisted without a parent present.
- ``refresh-expire`` - cascade the expire() and refresh()
- operations
+ * ``refresh-expire`` - cascade the :meth:`~sqlalchemy.orm.session.Session.expire`
+ and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
- ``all`` - shorthand for "save-update,merge, refresh-expire,
+ * ``all`` - shorthand for "save-update,merge, refresh-expire,
expunge, delete"
:param collection_class:
@@ -263,7 +275,6 @@ def relationship(argument, secondary=None, **kwargs):
change the value used in the operation.
:param foreign_keys:
-
a list of columns which are to be used as "foreign key" columns.
this parameter should be used in conjunction with explicit
``primaryjoin`` and ``secondaryjoin`` (if needed) arguments, and
@@ -276,7 +287,7 @@ def relationship(argument, secondary=None, **kwargs):
the table-defined foreign keys.
:param innerjoin=False:
- when ``True``, eager loads will use an inner join to join
+ when ``True``, joined eager loads will use an inner join to join
against related tables instead of an outer join. The purpose
of this option is strictly one of performance, as inner joins
generally perform better than outer joins. This flag can
@@ -287,33 +298,47 @@ def relationship(argument, secondary=None, **kwargs):
:param join_depth:
when non-``None``, an integer value indicating how many levels
- deep eagerload joins should be constructed on a self-referring
- or cyclical relationship. The number counts how many times the
- same Mapper shall be present in the loading condition along a
- particular join branch. When left at its default of ``None``,
- eager loads will automatically stop chaining joins when they
- encounter a mapper which is already higher up in the chain.
-
- :param lazy=(True|False|None|'dynamic'):
- specifies how the related items should be loaded. Values include:
-
- True - items should be loaded lazily when the property is first
- accessed.
-
- False - items should be loaded "eagerly" in the same query as
- that of the parent, using a JOIN or LEFT OUTER JOIN.
-
- None - no loading should occur at any time. This is to support
- "write-only" attributes, or attributes which are
- populated in some manner specific to the application.
-
- 'dynamic' - a ``DynaLoader`` will be attached, which returns a
- ``Query`` object for all read operations. The
- dynamic- collection supports only ``append()`` and
- ``remove()`` for write operations; changes to the
- dynamic property will not be visible until the data
- is flushed to the database.
-
+ deep "eager" loaders should join on a self-referring or cyclical
+ relationship. The number counts how many times the same Mapper
+ shall be present in the loading condition along a particular join
+ branch. When left at its default of ``None``, eager loaders
+ will stop chaining when they encounter a the same target mapper
+ which is already higher up in the chain. This option applies
+ both to joined- and subquery- eager loaders.
+
+ :param lazy=('select'|'joined'|'subquery'|'noload'|'dynamic'): specifies
+ how the related items should be loaded. Values include:
+
+ * 'select' - items should be loaded lazily when the property is first
+ accessed.
+
+ * 'joined' - items should be loaded "eagerly" in the same query as
+ that of the parent, using a JOIN or LEFT OUTER JOIN.
+
+ * 'subquery' - items should be loaded "eagerly" within the same
+ query as that of the parent, using a second SQL statement
+ which issues a JOIN to a subquery of the original
+ statement.
+
+ * 'noload' - no loading should occur at any time. This is to
+ support "write-only" attributes, or attributes which are
+ populated in some manner specific to the application.
+
+ * 'dynamic' - the attribute will return a pre-configured
+ :class:`~sqlalchemy.orm.query.Query` object for all read
+ operations, onto which further filtering operations can be
+ applied before iterating the results. The dynamic
+ collection supports a limited set of mutation operations,
+ allowing ``append()`` and ``remove()``. Changes to the
+ collection will not be visible until flushed
+ to the database, where it is then refetched upon iteration.
+
+ * True - a synonym for 'select'
+
+ * False - a synonyn for 'joined'
+
+ * None - a synonym for 'noload'
+
:param order_by:
indicates the ordering that should be applied when loading these
items.
@@ -904,76 +929,148 @@ def extension(ext):
return ExtensionOption(ext)
@sa_util.accepts_a_list_as_starargs(list_deprecation='deprecated')
-def eagerload(*keys, **kw):
+def joinedload(*keys, **kw):
"""Return a ``MapperOption`` that will convert the property of the given
- name into an eager load.
+ name into an joined eager load.
+
+ .. note:: This function is known as :func:`eagerload` in all versions
+ of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 series.
+ :func:`eagerload` will remain available for
+ the foreseeable future in order to enable cross-compatibility.
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
examples::
- # eagerload the "orders" colleciton on "User"
- query(User).options(eagerload(User.orders))
+ # joined-load the "orders" colleciton on "User"
+ query(User).options(joinedload(User.orders))
- # eagerload the "keywords" collection on each "Item",
+ # joined-load the "keywords" collection on each "Item",
# but not the "items" collection on "Order" - those
# remain lazily loaded.
- query(Order).options(eagerload(Order.items, Item.keywords))
+ query(Order).options(joinedload(Order.items, Item.keywords))
- # to eagerload across both, use eagerload_all()
- query(Order).options(eagerload_all(Order.items, Item.keywords))
+ # to joined-load across both, use joinedload_all()
+ query(Order).options(joinedload_all(Order.items, Item.keywords))
- :func:`eagerload` also accepts a keyword argument `innerjoin=True` which
+ :func:`joinedload` also accepts a keyword argument `innerjoin=True` which
indicates using an inner join instead of an outer::
- query(Order).options(eagerload(Order.user, innerjoin=True))
+ query(Order).options(joinedload(Order.user, innerjoin=True))
- Note that the join created by :func:`eagerload` is aliased such that
- no other aspects of the query will affect what it loads. To use eager
+ Note that the join created by :func:`joinedload` is aliased such that
+ no other aspects of the query will affect what it loads. To use joined eager
loading with a join that is constructed manually using :meth:`~sqlalchemy.orm.query.Query.join`
or :func:`~sqlalchemy.orm.join`, see :func:`contains_eager`.
+ See also: :func:`subqueryload`, :func:`lazyload`
+
"""
innerjoin = kw.pop('innerjoin', None)
if innerjoin is not None:
return (
- strategies.EagerLazyOption(keys, lazy=False),
+ strategies.EagerLazyOption(keys, lazy='joined'),
strategies.EagerJoinOption(keys, innerjoin)
)
else:
- return strategies.EagerLazyOption(keys, lazy=False)
+ return strategies.EagerLazyOption(keys, lazy='joined')
@sa_util.accepts_a_list_as_starargs(list_deprecation='deprecated')
-def eagerload_all(*keys, **kw):
+def joinedload_all(*keys, **kw):
"""Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path into an eager load.
+ given dot-separated path into an joined eager load.
+
+ .. note:: This function is known as :func:`eagerload_all` in all versions
+ of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4 series.
+ :func:`eagerload_all` will remain available for
+ the foreseeable future in order to enable cross-compatibility.
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
For example::
- query.options(eagerload_all('orders.items.keywords'))...
+ query.options(joinedload_all('orders.items.keywords'))...
will set all of 'orders', 'orders.items', and 'orders.items.keywords' to
- load in one eager load.
+ load in one joined eager load.
Individual descriptors are accepted as arguments as well::
- query.options(eagerload_all(User.orders, Order.items, Item.keywords))
+ query.options(joinedload_all(User.orders, Order.items, Item.keywords))
The keyword arguments accept a flag `innerjoin=True|False` which will
override the value of the `innerjoin` flag specified on the relationship().
+ See also: :func:`subqueryload_all`, :func:`lazyload`
+
"""
innerjoin = kw.pop('innerjoin', None)
if innerjoin is not None:
return (
- strategies.EagerLazyOption(keys, lazy=False, chained=True),
+ strategies.EagerLazyOption(keys, lazy='joined', chained=True),
strategies.EagerJoinOption(keys, innerjoin, chained=True)
)
else:
- return strategies.EagerLazyOption(keys, lazy=False, chained=True)
+ return strategies.EagerLazyOption(keys, lazy='joined', chained=True)
+
+def eagerload(*args, **kwargs):
+ """A synonym for :func:`joinedload()`."""
+ return joinedload(*args, **kwargs)
+
+def eagerload_all(*args, **kwargs):
+ """A synonym for :func:`joinedload_all()`"""
+ return joinedload_all(*args, **kwargs)
+
+def subqueryload(*keys):
+ """Return a ``MapperOption`` that will convert the property
+ of the given name into an subquery eager load.
+
+ .. note:: This function is new as of SQLAlchemy version 0.6beta3.
+
+ Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+ examples::
+
+ # subquery-load the "orders" colleciton on "User"
+ query(User).options(subqueryload(User.orders))
+
+ # subquery-load the "keywords" collection on each "Item",
+ # but not the "items" collection on "Order" - those
+ # remain lazily loaded.
+ query(Order).options(subqueryload(Order.items, Item.keywords))
+
+ # to subquery-load across both, use subqueryload_all()
+ query(Order).options(subqueryload_all(Order.items, Item.keywords))
+
+ See also: :func:`joinedload`, :func:`lazyload`
+
+ """
+ return strategies.EagerLazyOption(keys, lazy="subquery")
+
+def subqueryload_all(*keys):
+ """Return a ``MapperOption`` that will convert all properties along the
+ given dot-separated path into a subquery eager load.
+
+ .. note:: This function is new as of SQLAlchemy version 0.6beta3.
+
+ Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+
+ For example::
+
+ query.options(subqueryload_all('orders.items.keywords'))...
+
+ will set all of 'orders', 'orders.items', and 'orders.items.keywords' to
+ load in one subquery eager load.
+
+ Individual descriptors are accepted as arguments as well::
+
+ query.options(subqueryload_all(User.orders, Order.items, Item.keywords))
+
+ See also: :func:`joinedload_all`, :func:`lazyload`
+
+ """
+ return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
+
@sa_util.accepts_a_list_as_starargs(list_deprecation='deprecated')
def lazyload(*keys):
"""Return a ``MapperOption`` that will convert the property of the given
@@ -981,6 +1078,8 @@ def lazyload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+ See also: :func:`eagerload`, :func:`subqueryload`
+
"""
return strategies.EagerLazyOption(keys, lazy=True)
@@ -990,6 +1089,8 @@ def noload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
+ See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
+
"""
return strategies.EagerLazyOption(keys, lazy=None)
@@ -1041,7 +1142,7 @@ def contains_eager(*keys, **kwargs):
raise exceptions.ArgumentError("Invalid kwargs for contains_eager: %r" % kwargs.keys())
return (
- strategies.EagerLazyOption(keys, lazy=False, propagate_to_loaders=False),
+ strategies.EagerLazyOption(keys, lazy='joined', propagate_to_loaders=False),
strategies.LoadEagerFromAliasOption(keys, alias=alias)
)
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 412fabc23..ca9676469 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -482,6 +482,12 @@ class MapperProperty(object):
self.do_init()
self._compile_finished = True
+ @property
+ def class_attribute(self):
+ """Return the class-bound descriptor corresponding to this MapperProperty."""
+
+ return getattr(self.parent.class_, self.key)
+
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation steps.
@@ -623,7 +629,7 @@ class StrategizedProperty(MapperProperty):
"""
- def __get_context_strategy(self, context, path):
+ def _get_context_strategy(self, context, path):
cls = context.attributes.get(("loaderstrategy", _reduce_path(path)), None)
if cls:
try:
@@ -645,11 +651,11 @@ class StrategizedProperty(MapperProperty):
return strategy
def setup(self, context, entity, path, adapter, **kwargs):
- self.__get_context_strategy(context, path + (self.key,)).\
+ self._get_context_strategy(context, path + (self.key,)).\
setup_query(context, entity, path, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
- return self.__get_context_strategy(context, path + (self.key,)).\
+ return self._get_context_strategy(context, path + (self.key,)).\
create_row_processor(context, path, mapper, row, adapter)
def do_init(self):
@@ -734,33 +740,13 @@ class PropertyOption(MapperOption):
self._process(query, False)
def _process(self, query, raiseerr):
- paths, mappers = self.__get_paths(query, raiseerr)
+ paths, mappers = self._get_paths(query, raiseerr)
if paths:
self.process_query_property(query, paths, mappers)
def process_query_property(self, query, paths, mappers):
pass
- def __find_entity(self, query, mapper, raiseerr):
- from sqlalchemy.orm.util import _class_to_mapper, _is_aliased_class
-
- if _is_aliased_class(mapper):
- searchfor = mapper
- isa = False
- else:
- searchfor = _class_to_mapper(mapper)
- isa = True
-
- for ent in query._mapper_entities:
- if searchfor is ent.path_entity or (isa and searchfor.common_parent(ent.path_entity)):
- return ent
- else:
- if raiseerr:
- raise sa_exc.ArgumentError("Can't find entity %s in Query. Current list: %r"
- % (searchfor, [str(m.path_entity) for m in query._entities]))
- else:
- return None
-
def __getstate__(self):
d = self.__dict__.copy()
d['key'] = ret = []
@@ -782,7 +768,32 @@ class PropertyOption(MapperOption):
state['key'] = tuple(ret)
self.__dict__ = state
- def __get_paths(self, query, raiseerr):
+ def _find_entity(self, query, mapper, raiseerr):
+ from sqlalchemy.orm.util import _class_to_mapper, _is_aliased_class
+
+ if _is_aliased_class(mapper):
+ searchfor = mapper
+ isa = False
+ else:
+ searchfor = _class_to_mapper(mapper)
+ isa = True
+
+ for ent in query._mapper_entities:
+ if searchfor is ent.path_entity or (
+ isa and
+ searchfor.common_parent(ent.path_entity)):
+ return ent
+ else:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Can't find entity %s in Query. Current list: %r"
+ % (searchfor, [
+ str(m.path_entity) for m in query._entities
+ ]))
+ else:
+ return None
+
+ def _get_paths(self, query, raiseerr):
path = None
entity = None
l = []
@@ -792,61 +803,71 @@ class PropertyOption(MapperOption):
# with an existing path
current_path = list(query._current_path)
- if self.mapper:
- entity = self.__find_entity(query, self.mapper, raiseerr)
- mapper = entity.mapper
- path_element = entity.path_entity
-
+ tokens = []
for key in util.to_list(self.key):
if isinstance(key, basestring):
- tokens = key.split('.')
+ tokens += key.split('.')
else:
- tokens = [key]
- for token in tokens:
- if isinstance(token, basestring):
- if not entity:
- entity = query._entity_zero()
- path_element = entity.path_entity
- mapper = entity.mapper
- mappers.append(mapper)
- prop = mapper.get_property(token, resolve_synonyms=True, raiseerr=raiseerr)
- key = token
- elif isinstance(token, PropComparator):
- prop = token.property
- if not entity:
- entity = self.__find_entity(query, token.parententity, raiseerr)
- if not entity:
- return [], []
- path_element = entity.path_entity
- mappers.append(prop.parent)
- key = prop.key
- else:
- raise sa_exc.ArgumentError("mapper option expects string key "
- "or list of attributes")
-
- if current_path and key == current_path[1]:
- current_path = current_path[2:]
- continue
+ tokens += [key]
+
+ for token in tokens:
+ if isinstance(token, basestring):
+ if not entity:
+ if current_path:
+ if current_path[1] == token:
+ current_path = current_path[2:]
+ continue
- if prop is None:
- return [], []
-
- path = build_path(path_element, prop.key, path)
- l.append(path)
- if getattr(token, '_of_type', None):
- path_element = mapper = token._of_type
- else:
- path_element = mapper = getattr(prop, 'mapper', None)
-
- if path_element:
- path_element = path_element
+ entity = query._entity_zero()
+ path_element = entity.path_entity
+ mapper = entity.mapper
+ mappers.append(mapper)
+ prop = mapper.get_property(
+ token,
+ resolve_synonyms=True,
+ raiseerr=raiseerr)
+ key = token
+ elif isinstance(token, PropComparator):
+ prop = token.property
+ if not entity:
+ if current_path:
+ if current_path[0:2] == [token.parententity, prop.key]:
+ current_path = current_path[2:]
+ continue
+
+ entity = self._find_entity(
+ query,
+ token.parententity,
+ raiseerr)
+ if not entity:
+ return [], []
+ path_element = entity.path_entity
+ mapper = entity.mapper
+ mappers.append(prop.parent)
+ key = prop.key
+ else:
+ raise sa_exc.ArgumentError("mapper option expects string key "
+ "or list of attributes")
+
+ if prop is None:
+ return [], []
+
+ path = build_path(path_element, prop.key, path)
+ l.append(path)
+ if getattr(token, '_of_type', None):
+ path_element = mapper = token._of_type
+ else:
+ path_element = mapper = getattr(prop, 'mapper', None)
+
+ if path_element:
+ path_element = path_element
# if current_path tokens remain, then
# we didn't have an exact path match.
if current_path:
return [], []
-
+
return l, mappers
class AttributeExtension(object):
@@ -894,16 +915,15 @@ class StrategizedOption(PropertyOption):
for an operation by a StrategizedProperty.
"""
- def is_chained(self):
- return False
+ is_chained = False
def process_query_property(self, query, paths, mappers):
- # __get_context_strategy may receive the path in terms of
+ # _get_context_strategy may receive the path in terms of
# a base mapper - e.g. options(eagerload_all(Company.employees, Engineer.machines))
# in the polymorphic tests leads to "(Person, 'machines')" in
# the path due to the mechanics of how the eager strategy builds
# up the path
- if self.is_chained():
+ if self.is_chained:
for path in paths:
query._attributes[("loaderstrategy", _reduce_path(path))] = \
self.get_strategy_class()
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 7de02d3f0..ec21b27d6 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -391,19 +391,15 @@ class RelationshipProperty(StrategizedProperty):
self.comparator_factory = comparator_factory or RelationshipProperty.Comparator
self.comparator = self.comparator_factory(self, None)
util.set_creation_order(self)
-
+
if strategy_class:
self.strategy_class = strategy_class
- elif self.lazy == 'dynamic':
+ elif self.lazy== 'dynamic':
from sqlalchemy.orm import dynamic
self.strategy_class = dynamic.DynaLoader
- elif self.lazy is False:
- self.strategy_class = strategies.EagerLoader
- elif self.lazy is None:
- self.strategy_class = strategies.NoLoader
else:
- self.strategy_class = strategies.LazyLoader
-
+ self.strategy_class = strategies.factory(self.lazy)
+
self._reverse_property = set()
if cascade is not False:
@@ -411,8 +407,12 @@ class RelationshipProperty(StrategizedProperty):
else:
self.cascade = CascadeOptions("save-update, merge")
- if self.passive_deletes == 'all' and ("delete" in self.cascade or "delete-orphan" in self.cascade):
- raise sa_exc.ArgumentError("Can't set passive_deletes='all' in conjunction with 'delete' or 'delete-orphan' cascade")
+ if self.passive_deletes == 'all' and \
+ ("delete" in self.cascade or
+ "delete-orphan" in self.cascade):
+ raise sa_exc.ArgumentError(
+ "Can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade")
self.order_by = order_by
@@ -420,7 +420,9 @@ class RelationshipProperty(StrategizedProperty):
if self.back_populates:
if backref:
- raise sa_exc.ArgumentError("backref and back_populates keyword arguments are mutually exclusive")
+ raise sa_exc.ArgumentError(
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
self.backref = None
else:
self.backref = backref
@@ -467,7 +469,10 @@ class RelationshipProperty(StrategizedProperty):
return op(self, *other, **kwargs)
def of_type(self, cls):
- return RelationshipProperty.Comparator(self.property, self.mapper, cls, adapter=self.adapter)
+ return RelationshipProperty.Comparator(
+ self.property,
+ self.mapper,
+ cls, adapter=self.adapter)
def in_(self, other):
raise NotImplementedError("in_() not yet supported for relationships. For a "
@@ -480,11 +485,21 @@ class RelationshipProperty(StrategizedProperty):
if self.property.direction in [ONETOMANY, MANYTOMANY]:
return ~self._criterion_exists()
else:
- return _orm_annotate(self.property._optimized_compare(None, adapt_source=self.adapter))
+ return _orm_annotate(
+ self.property._optimized_compare(
+ None,
+ adapt_source=self.adapter)
+ )
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection to an object or collection; use contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection to an object or "
+ "collection; use contains() to test for membership.")
else:
- return _orm_annotate(self.property._optimized_compare(other, adapt_source=self.adapter))
+ return _orm_annotate(
+ self.property._optimized_compare(
+ other,
+ adapt_source=self.adapter)
+ )
def _criterion_exists(self, criterion=None, **kwargs):
if getattr(self, '_of_type', None):
@@ -508,7 +523,10 @@ class RelationshipProperty(StrategizedProperty):
source_selectable = None
pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True, dest_selectable=to_selectable, source_selectable=source_selectable)
+ self.property._create_joins(
+ dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
for k in kwargs:
crit = self.property.mapper.class_manager[k] == kwargs[k]
@@ -517,9 +535,9 @@ class RelationshipProperty(StrategizedProperty):
else:
criterion = criterion & crit
- # annotate the *local* side of the join condition, in the case of pj + sj this
- # is the full primaryjoin, in the case of just pj its the local side of
- # the primaryjoin.
+ # annotate the *local* side of the join condition, in the case
+ # of pj + sj this is the full primaryjoin, in the case of just
+ # pj its the local side of the primaryjoin.
if sj is not None:
j = _orm_annotate(pj) & sj
else:
@@ -529,8 +547,10 @@ class RelationshipProperty(StrategizedProperty):
# limit this adapter to annotated only?
criterion = target_adapter.traverse(criterion)
- # only have the "joined left side" of what we return be subject to Query adaption. The right
- # side of it is used for an exists() subquery and should not correlate or otherwise reach out
+ # only have the "joined left side" of what we
+ # return be subject to Query adaption. The right
+ # side of it is used for an exists() subquery and
+ # should not correlate or otherwise reach out
# to anything in the enclosing query.
if criterion is not None:
criterion = criterion._annotate({'_halt_adapt': True})
@@ -541,18 +561,25 @@ class RelationshipProperty(StrategizedProperty):
def any(self, criterion=None, **kwargs):
if not self.property.uselist:
- raise sa_exc.InvalidRequestError("'any()' not implemented for scalar attributes. Use has().")
+ raise sa_exc.InvalidRequestError(
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
return self._criterion_exists(criterion, **kwargs)
def has(self, criterion=None, **kwargs):
if self.property.uselist:
- raise sa_exc.InvalidRequestError("'has()' not implemented for collections. Use any().")
+ raise sa_exc.InvalidRequestError(
+ "'has()' not implemented for collections. "
+ "Use any().")
return self._criterion_exists(criterion, **kwargs)
def contains(self, other, **kwargs):
if not self.property.uselist:
- raise sa_exc.InvalidRequestError("'contains' not implemented for scalar attributes. Use ==")
+ raise sa_exc.InvalidRequestError(
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
clause = self.property._optimized_compare(other, adapt_source=self.adapter)
if self.property.secondaryjoin is not None:
@@ -563,7 +590,6 @@ class RelationshipProperty(StrategizedProperty):
def __negated_contains_or_equals(self, other):
if self.property.direction == MANYTOONE:
state = attributes.instance_state(other)
- strategy = self.property._get_strategy(strategies.LazyLoader)
def state_bindparam(state, col):
o = state.obj() # strong ref
@@ -575,14 +601,20 @@ class RelationshipProperty(StrategizedProperty):
else:
return col
- if strategy.use_get:
+ if self.property._use_get:
return sql.and_(*[
sql.or_(
adapt(x) != state_bindparam(state, y),
adapt(x) == None)
for (x, y) in self.property.local_remote_pairs])
- criterion = sql.and_(*[x==y for (x, y) in zip(self.property.mapper.primary_key, self.property.mapper.primary_key_from_instance(other))])
+ criterion = sql.and_(*[x==y for (x, y) in
+ zip(
+ self.property.mapper.primary_key,
+ self.property.\
+ mapper.\
+ primary_key_from_instance(other))
+ ])
return ~self._criterion_exists(criterion)
def __ne__(self, other):
@@ -592,7 +624,9 @@ class RelationshipProperty(StrategizedProperty):
else:
return self._criterion_exists()
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection to an object or collection; use contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection to an object or "
+ "collection; use contains() to test for membership.")
else:
return self.__negated_contains_or_equals(other)
@@ -629,7 +663,13 @@ class RelationshipProperty(StrategizedProperty):
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
- def merge(self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive):
+ def merge(self,
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
if load:
# TODO: no test coverage for recursive check
for r in self._reverse_property:
@@ -702,6 +742,8 @@ class RelationshipProperty(StrategizedProperty):
else:
instances = state.value_as_iterable(self.key, passive=passive)
+ skip_pending = type_ == 'refresh-expire' and 'delete-orphan' not in self.cascade
+
if instances:
for c in instances:
if c is not None and \
@@ -717,12 +759,17 @@ class RelationshipProperty(StrategizedProperty):
str(self.parent.class_),
str(c.__class__)
))
+ instance_state = attributes.instance_state(c)
+
+ if skip_pending and not instance_state.key:
+ continue
+
visited_instances.add(c)
# cascade using the mapper local to this
# object, so that its individual properties are located
- instance_mapper = object_mapper(c)
- yield (c, instance_mapper, attributes.instance_state(c))
+ instance_mapper = instance_state.manager.mapper
+ yield (c, instance_mapper, instance_state)
def _add_reverse_property(self, key):
other = self.mapper._get_property(key)
@@ -870,7 +917,10 @@ class RelationshipProperty(StrategizedProperty):
]
if not eq_pairs:
- if not self.viewonly and criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=self._foreign_keys, any_operator=True):
+ if not self.viewonly and criterion_as_pairs(
+ self.primaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True):
raise sa_exc.ArgumentError("Could not locate any equated, locally "
"mapped column pairs for primaryjoin condition '%s' on relationship %s. "
"For more relaxed rules on join conditions, the relationship may be "
@@ -891,11 +941,24 @@ class RelationshipProperty(StrategizedProperty):
self.synchronize_pairs = eq_pairs
if self.secondaryjoin is not None:
- sq_pairs = criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=self._foreign_keys, any_operator=self.viewonly)
- sq_pairs = [(l, r) for l, r in sq_pairs if (self._col_is_part_of_mappings(l) and self._col_is_part_of_mappings(r)) or r in self._foreign_keys]
+ sq_pairs = criterion_as_pairs(
+ self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=self.viewonly)
+
+ sq_pairs = [
+ (l, r)
+ for l, r in sq_pairs
+ if (self._col_is_part_of_mappings(l) and
+ self._col_is_part_of_mappings(r)) or
+ r in self._foreign_keys
+ ]
if not sq_pairs:
- if not self.viewonly and criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=self._foreign_keys, any_operator=True):
+ if not self.viewonly and criterion_as_pairs(
+ self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True):
raise sa_exc.ArgumentError("Could not locate any equated, locally mapped "
"column pairs for secondaryjoin condition '%s' on relationship %s. "
"For more relaxed rules on join conditions, the "
@@ -1004,17 +1067,29 @@ class RelationshipProperty(StrategizedProperty):
if self.secondaryjoin is not None:
eq_pairs += self.secondary_synchronize_pairs
else:
- eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=self._foreign_keys, any_operator=True)
+ eq_pairs = criterion_as_pairs(
+ self.primaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True)
if self.secondaryjoin is not None:
- eq_pairs += criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=self._foreign_keys, any_operator=True)
- eq_pairs = [(l, r) for l, r in eq_pairs if self._col_is_part_of_mappings(l) and self._col_is_part_of_mappings(r)]
+ eq_pairs += criterion_as_pairs(
+ self.secondaryjoin,
+ consider_as_foreign_keys=self._foreign_keys,
+ any_operator=True)
+
+ eq_pairs = [
+ (l, r) for l, r in eq_pairs
+ if self._col_is_part_of_mappings(l) and
+ self._col_is_part_of_mappings(r)
+ ]
if self.direction is MANYTOONE:
self.local_remote_pairs = [(r, l) for l, r in eq_pairs]
else:
self.local_remote_pairs = eq_pairs
elif self.remote_side:
- raise sa_exc.ArgumentError("remote_side argument is redundant against more detailed _local_remote_side argument.")
+ raise sa_exc.ArgumentError("remote_side argument is redundant "
+ "against more detailed _local_remote_side argument.")
for l, r in self.local_remote_pairs:
@@ -1028,16 +1103,20 @@ class RelationshipProperty(StrategizedProperty):
"Specify remote_side argument to indicate which column lazy "
"join condition should bind." % (r, self.mapper))
- self.local_side, self.remote_side = [util.ordered_column_set(x) for x in zip(*list(self.local_remote_pairs))]
+ self.local_side, self.remote_side = [
+ util.ordered_column_set(x) for x in
+ zip(*list(self.local_remote_pairs))]
def _assert_is_primary(self):
if not self.is_primary() and \
- not mapper.class_mapper(self.parent.class_, compile=False)._get_property(self.key, raiseerr=False):
+ not mapper.class_mapper(self.parent.class_, compile=False).\
+ _get_property(self.key, raiseerr=False):
raise sa_exc.ArgumentError("Attempting to assign a new relationship '%s' to "
"a non-primary mapper on class '%s'. New relationships can only be "
"added to the primary mapper, i.e. the very first "
- "mapper created for class '%s' " % (self.key, self.parent.class_.__name__, self.parent.class_.__name__))
+ "mapper created for class '%s' " %
+ (self.key, self.parent.class_.__name__, self.parent.class_.__name__))
def _generate_backref(self):
if not self.is_primary():
@@ -1093,17 +1172,27 @@ class RelationshipProperty(StrategizedProperty):
def _post_init(self):
self.logger.info("%s setup primary join %s", self, self.primaryjoin)
self.logger.info("%s setup secondary join %s", self, self.secondaryjoin)
- self.logger.info("%s synchronize pairs [%s]", self, ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
- self.logger.info("%s secondary synchronize pairs [%s]", self, ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
- self.logger.info("%s local/remote pairs [%s]", self, ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
+ self.logger.info("%s synchronize pairs [%s]", self,
+ ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs))
+ self.logger.info("%s secondary synchronize pairs [%s]", self,
+ ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])))
+ self.logger.info("%s local/remote pairs [%s]", self,
+ ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs))
self.logger.info("%s relationship direction %s", self, self.direction)
if self.uselist is None:
self.uselist = self.direction is not MANYTOONE
-
+
if not self.viewonly:
self._dependency_processor = dependency.create_dependency_processor(self)
-
+
+ @util.memoized_property
+ def _use_get(self):
+ """memoize the 'use_get' attribute of this RelationshipLoader's lazyloader."""
+
+ strategy = self._get_strategy(strategies.LazyLoader)
+ return strategy.use_get
+
def _refers_to_parent_table(self):
for c, f in self.synchronize_pairs:
if c.table is f.table:
@@ -1114,7 +1203,9 @@ class RelationshipProperty(StrategizedProperty):
def _is_self_referential(self):
return self.mapper.common_parent(self.parent)
- def _create_joins(self, source_polymorphic=False, source_selectable=None, dest_polymorphic=False, dest_selectable=None, of_type=None):
+ def _create_joins(self, source_polymorphic=False,
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
if source_selectable is None:
if source_polymorphic and self.parent.with_polymorphic:
source_selectable = self.parent._with_polymorphic_selectable
@@ -1157,7 +1248,10 @@ class RelationshipProperty(StrategizedProperty):
secondary = secondary.alias()
primary_aliasizer = ClauseAdapter(secondary)
if dest_selectable is not None:
- secondary_aliasizer = ClauseAdapter(dest_selectable, equivalents=self.mapper._equivalent_columns).chain(primary_aliasizer)
+ secondary_aliasizer = \
+ ClauseAdapter(dest_selectable,
+ equivalents=self.mapper._equivalent_columns).\
+ chain(primary_aliasizer)
else:
secondary_aliasizer = primary_aliasizer
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 682aa2bbf..e98ad8937 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -84,6 +84,7 @@ class Query(object):
_params = util.frozendict()
_attributes = util.frozendict()
_with_options = ()
+ _with_hints = ()
def __init__(self, entities, session=None):
self.session = session
@@ -114,7 +115,8 @@ class Query(object):
mapper, selectable, is_aliased_class = _entity_info(entity)
if not is_aliased_class and mapper.with_polymorphic:
with_polymorphic = mapper._with_polymorphic_mappers
- self.__mapper_loads_polymorphically_with(mapper,
+ if mapper.mapped_table not in self._polymorphic_adapters:
+ self.__mapper_loads_polymorphically_with(mapper,
sql_util.ColumnAdapter(selectable, mapper._equivalent_columns))
adapter = None
elif is_aliased_class:
@@ -133,7 +135,7 @@ class Query(object):
self._polymorphic_adapters[m.mapped_table] = self._polymorphic_adapters[m.local_table] = adapter
def _set_select_from(self, *obj):
-
+
fa = []
for from_obj in obj:
if isinstance(from_obj, expression._SelectBaseMixin):
@@ -142,9 +144,8 @@ class Query(object):
self._from_obj = tuple(fa)
- # TODO: only use this adapter for from_self() ? right
- # now its usage is somewhat arbitrary.
- if len(self._from_obj) == 1 and isinstance(self._from_obj[0], expression.Alias):
+ if len(self._from_obj) == 1 and \
+ isinstance(self._from_obj[0], expression.Alias):
equivs = self.__all_equivs()
self._from_obj_alias = sql_util.ColumnAdapter(self._from_obj[0], equivs)
@@ -198,7 +199,13 @@ class Query(object):
@_generative()
def _adapt_all_clauses(self):
self._disable_orm_filtering = True
-
+
+ def _adapt_col_list(self, cols):
+ return [
+ self._adapt_clause(expression._literal_as_text(o), True, True)
+ for o in cols
+ ]
+
def _adapt_clause(self, clause, as_filter, orm_only):
adapters = []
if as_filter and self._filter_aliases:
@@ -375,7 +382,8 @@ class Query(object):
statement._annotate({'_halt_adapt': True})
def subquery(self):
- """return the full SELECT statement represented by this Query, embedded within an Alias.
+ """return the full SELECT statement represented by this Query,
+ embedded within an Alias.
Eager JOIN generation within the query is disabled.
@@ -391,11 +399,14 @@ class Query(object):
@_generative()
def enable_eagerloads(self, value):
- """Control whether or not eager joins are rendered.
+ """Control whether or not eager joins and subqueries are
+ rendered.
When set to False, the returned Query will not render
- eager joins regardless of eagerload() options
- or mapper-level lazy=False configurations.
+ eager joins regardless of :func:`~sqlalchemy.orm.joinedload`,
+ :func:`~sqlalchemy.orm.subqueryload` options
+ or mapper-level ``lazy='joined'``/``lazy='subquery'``
+ configurations.
This is used primarily when nesting the Query's
statement into a subquery or other
@@ -502,13 +513,16 @@ class Query(object):
overwritten.
In particular, it's usually impossible to use this setting with
- eagerly loaded collections (i.e. any lazy=False) since those
- collections will be cleared for a new load when encountered in a
- subsequent result batch.
+ eagerly loaded collections (i.e. any lazy='joined' or 'subquery')
+ since those collections will be cleared for a new load when
+ encountered in a subsequent result batch. In the case of 'subquery'
+ loading, the full result for all rows is fetched which generally
+ defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`.
Also note that many DBAPIs do not "stream" results, pre-buffering
all rows before making them available, including mysql-python and
- psycopg2. yield_per() will also set the ``stream_results`` execution
+ psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also
+ set the ``stream_results`` execution
option to ``True``, which currently is only understood by psycopg2
and causes server side cursors to be used.
@@ -618,17 +632,20 @@ class Query(object):
those being selected.
"""
- fromclause = self.with_labels().enable_eagerloads(False).statement.correlate(None)
+ fromclause = self.with_labels().enable_eagerloads(False).\
+ statement.correlate(None)
q = self._from_selectable(fromclause)
if entities:
q._set_entities(entities)
return q
-
+
@_generative()
def _from_selectable(self, fromclause):
- self._statement = self._criterion = None
- self._order_by = self._group_by = self._distinct = False
- self._limit = self._offset = None
+ for attr in ('_statement', '_criterion', '_order_by', '_group_by',
+ '_limit', '_offset', '_joinpath', '_joinpoint',
+ '_distinct'
+ ):
+ self.__dict__.pop(attr, None)
self._set_select_from(fromclause)
old_entities = self._entities
self._entities = []
@@ -659,16 +676,25 @@ class Query(object):
return None
@_generative()
- def add_column(self, column):
- """Add a SQL ColumnElement to the list of result columns to be returned."""
+ def add_columns(self, *column):
+ """Add one or more column expressions to the list
+ of result columns to be returned."""
self._entities = list(self._entities)
l = len(self._entities)
- _ColumnEntity(self, column)
+ for c in column:
+ _ColumnEntity(self, c)
# _ColumnEntity may add many entities if the
# given arg is a FROM clause
self._setup_aliasizers(self._entities[l:])
+ @util.pending_deprecation("add_column() superceded by add_columns()")
+ def add_column(self, column):
+ """Add a column expression to the list of result columns
+ to be returned."""
+
+ return self.add_columns(column)
+
def options(self, *args):
"""Return a new Query object, applying the given list of
MapperOptions.
@@ -694,6 +720,21 @@ class Query(object):
opt.process_query(self)
@_generative()
+ def with_hint(self, selectable, text, dialect_name=None):
+ """Add an indexing hint for the given entity or selectable to
+ this :class:`Query`.
+
+ Functionality is passed straight through to
+ :meth:`~sqlalchemy.sql.expression.Select.with_hint`,
+ with the addition that ``selectable`` can be a
+ :class:`Table`, :class:`Alias`, or ORM entity / mapped class
+ /etc.
+ """
+ mapper, selectable, is_aliased_class = _entity_info(selectable)
+
+ self._with_hints += ((selectable, text, dialect_name),)
+
+ @_generative()
def execution_options(self, **kwargs):
""" Set non-SQL options which take effect during execution.
@@ -761,7 +802,6 @@ class Query(object):
return self.filter(sql.and_(*clauses))
-
@_generative(_no_statement_condition, _no_limit_offset)
@util.accepts_a_list_as_starargs(list_deprecation='deprecated')
def order_by(self, *criterion):
@@ -770,7 +810,7 @@ class Query(object):
if len(criterion) == 1 and criterion[0] is None:
self._order_by = None
else:
- criterion = [self._adapt_clause(expression._literal_as_text(o), True, True) for o in criterion]
+ criterion = self._adapt_col_list(criterion)
if self._order_by is False or self._order_by is None:
self._order_by = criterion
@@ -784,7 +824,7 @@ class Query(object):
criterion = list(chain(*[_orm_columns(c) for c in criterion]))
- criterion = [self._adapt_clause(expression._literal_as_text(o), True, True) for o in criterion]
+ criterion = self._adapt_col_list(criterion)
if self._group_by is False:
self._group_by = criterion
@@ -1013,6 +1053,18 @@ class Query(object):
descriptor, prop = _entity_descriptor(left_entity, onclause)
onclause = descriptor
+
+ # check for q.join(Class.propname, from_joinpoint=True)
+ # and Class is that of the current joinpoint
+ elif from_joinpoint and isinstance(onclause, interfaces.PropComparator):
+ left_entity = onclause.parententity
+
+ left_mapper, left_selectable, left_is_aliased = \
+ _entity_info(self._joinpoint_zero())
+ if left_mapper is left_entity:
+ left_entity = self._joinpoint_zero()
+ descriptor, prop = _entity_descriptor(left_entity, onclause.key)
+ onclause = descriptor
if isinstance(onclause, interfaces.PropComparator):
if right_entity is None:
@@ -1022,7 +1074,7 @@ class Query(object):
right_entity = of_type
else:
right_entity = onclause.property.mapper
-
+
left_entity = onclause.parententity
prop = onclause.property
@@ -1051,6 +1103,12 @@ class Query(object):
if left is None:
left = self._joinpoint_zero()
+ if left is right and \
+ not create_aliases:
+ raise sa_exc.InvalidRequestError(
+ "Can't construct a join from %s to %s, they are the same entity" %
+ (left, right))
+
left_mapper, left_selectable, left_is_aliased = _entity_info(left)
right_mapper, right_selectable, is_aliased_class = _entity_info(right)
@@ -1312,7 +1370,7 @@ class Query(object):
first() applies a limit of one within the generated SQL, so that
only one primary entity row is generated on the server side
- (note this may consist of multiple result rows if eagerly loaded
+ (note this may consist of multiple result rows if join-loaded
collections are present).
Calling ``first()`` results in an execution of the underlying query.
@@ -2011,7 +2069,10 @@ class Query(object):
order_by=context.order_by,
**self._select_args
)
-
+
+ for hint in self._with_hints:
+ inner = inner.with_hint(*hint)
+
if self._correlate:
inner = inner.correlate(*self._correlate)
@@ -2066,6 +2127,10 @@ class Query(object):
order_by=context.order_by,
**self._select_args
)
+
+ for hint in self._with_hints:
+ statement = statement.with_hint(*hint)
+
if self._execution_options:
statement = statement.execution_options(**self._execution_options)
@@ -2166,14 +2231,14 @@ class _MapperEntity(_QueryEntity):
query._entities.append(self)
def _get_entity_clauses(self, query, context):
-
+
adapter = None
if not self.is_aliased_class and query._polymorphic_adapters:
adapter = query._polymorphic_adapters.get(self.mapper, None)
if not adapter and self.adapter:
adapter = self.adapter
-
+
if adapter:
if query._from_obj_alias:
ret = adapter.wrap(query._from_obj_alias)
@@ -2247,7 +2312,6 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
-
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 0a3fbe79e..0810175bf 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -883,7 +883,7 @@ class Session(object):
state.commit_all(dict_, self.identity_map)
def refresh(self, instance, attribute_names=None, lockmode=None):
- """Refresh the attributes on the given instance.
+ """Expire and refresh the attributes on the given instance.
A query will be issued to the database and all attributes will be
refreshed with their current database value.
@@ -907,7 +907,9 @@ class Session(object):
state = attributes.instance_state(instance)
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
- self._validate_persistent(state)
+
+ self._expire_state(state, attribute_names)
+
if self.query(_object_mapper(instance))._get(
state.key, refresh_state=state,
lockmode=lockmode,
@@ -939,18 +941,31 @@ class Session(object):
state = attributes.instance_state(instance)
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
+ self._expire_state(state, attribute_names)
+
+ def _expire_state(self, state, attribute_names):
self._validate_persistent(state)
if attribute_names:
_expire_state(state, state.dict,
- attribute_names=attribute_names, instance_dict=self.identity_map)
+ attribute_names=attribute_names,
+ instance_dict=self.identity_map)
else:
# pre-fetch the full cascade since the expire is going to
# remove associations
cascaded = list(_cascade_state_iterator('refresh-expire', state))
- _expire_state(state, state.dict, None, instance_dict=self.identity_map)
+ self._conditional_expire(state)
for (state, m, o) in cascaded:
- _expire_state(state, state.dict, None, instance_dict=self.identity_map)
-
+ self._conditional_expire(state)
+
+ def _conditional_expire(self, state):
+ """Expire a state if persistent, else expunge if pending"""
+
+ if state.key:
+ _expire_state(state, state.dict, None, instance_dict=self.identity_map)
+ elif state in self._new:
+ self._new.pop(state)
+ state.detach()
+
def prune(self):
"""Remove unreferenced instances cached in the identity map.
diff --git a/lib/sqlalchemy/orm/shard.py b/lib/sqlalchemy/orm/shard.py
index b6026bbc3..9cb26db79 100644
--- a/lib/sqlalchemy/orm/shard.py
+++ b/lib/sqlalchemy/orm/shard.py
@@ -4,114 +4,12 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Horizontal sharding support.
-
-Defines a rudimental 'horizontal sharding' system which allows a Session to
-distribute queries and persistence operations across multiple databases.
-
-For a usage example, see the file ``examples/sharding/attribute_shard.py``
-included in the source distrbution.
-
-"""
-
-import sqlalchemy.exceptions as sa_exc
from sqlalchemy import util
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.query import Query
-
-__all__ = ['ShardedSession', 'ShardedQuery']
-
-
-class ShardedSession(Session):
- def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, **kwargs):
- """Construct a ShardedSession.
-
- shard_chooser
- A callable which, passed a Mapper, a mapped instance, and possibly a
- SQL clause, returns a shard ID. This id may be based off of the
- attributes present within the object, or on some round-robin
- scheme. If the scheme is based on a selection, it should set
- whatever state on the instance to mark it in the future as
- participating in that shard.
-
- id_chooser
- A callable, passed a query and a tuple of identity values, which
- should return a list of shard ids where the ID might reside. The
- databases will be queried in the order of this listing.
-
- query_chooser
- For a given Query, returns the list of shard_ids where the query
- should be issued. Results from all shards returned will be combined
- together into a single listing.
-
- """
- super(ShardedSession, self).__init__(**kwargs)
- self.shard_chooser = shard_chooser
- self.id_chooser = id_chooser
- self.query_chooser = query_chooser
- self.__binds = {}
- self._mapper_flush_opts = {'connection_callable':self.connection}
- self._query_cls = ShardedQuery
- if shards is not None:
- for k in shards:
- self.bind_shard(k, shards[k])
-
- def connection(self, mapper=None, instance=None, shard_id=None, **kwargs):
- if shard_id is None:
- shard_id = self.shard_chooser(mapper, instance)
-
- if self.transaction is not None:
- return self.transaction.connection(mapper, shard_id=shard_id)
- else:
- return self.get_bind(mapper, shard_id=shard_id, instance=instance).contextual_connect(**kwargs)
-
- def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
- if shard_id is None:
- shard_id = self.shard_chooser(mapper, instance, clause=clause)
- return self.__binds[shard_id]
- def bind_shard(self, shard_id, bind):
- self.__binds[shard_id] = bind
+util.warn_deprecated(
+ "Horizontal sharding is now importable via "
+ "'import sqlalchemy.ext.horizontal_shard"
+)
-class ShardedQuery(Query):
- def __init__(self, *args, **kwargs):
- super(ShardedQuery, self).__init__(*args, **kwargs)
- self.id_chooser = self.session.id_chooser
- self.query_chooser = self.session.query_chooser
- self._shard_id = None
-
- def set_shard(self, shard_id):
- """return a new query, limited to a single shard ID.
-
- all subsequent operations with the returned query will
- be against the single shard regardless of other state.
- """
-
- q = self._clone()
- q._shard_id = shard_id
- return q
-
- def _execute_and_instances(self, context):
- if self._shard_id is not None:
- result = self.session.connection(mapper=self._mapper_zero(), shard_id=self._shard_id).execute(context.statement, self._params)
- return self.instances(result, context)
- else:
- partial = []
- for shard_id in self.query_chooser(self):
- result = self.session.connection(mapper=self._mapper_zero(), shard_id=shard_id).execute(context.statement, self._params)
- partial = partial + list(self.instances(result, context))
- # if some kind of in memory 'sorting' were done, this is where it would happen
- return iter(partial)
+from sqlalchemy.ext.horizontal_shard import *
- def get(self, ident, **kwargs):
- if self._shard_id is not None:
- return super(ShardedQuery, self).get(ident)
- else:
- ident = util.to_list(ident)
- for shard_id in self.id_chooser(self, ident):
- o = self.set_shard(shard_id).get(ident, **kwargs)
- if o is not None:
- return o
- else:
- return None
-
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index ce19667c6..93b1170f4 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -4,7 +4,8 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""sqlalchemy.orm.interfaces.LoaderStrategy implementations, and related MapperOptions."""
+"""sqlalchemy.orm.interfaces.LoaderStrategy
+ implementations, and related MapperOptions."""
from sqlalchemy import exc as sa_exc
from sqlalchemy import sql, util, log
@@ -17,6 +18,7 @@ from sqlalchemy.orm.interfaces import (
)
from sqlalchemy.orm import session as sessionlib
from sqlalchemy.orm import util as mapperutil
+import itertools
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
@@ -38,7 +40,9 @@ def _register_attribute(strategy, mapper, useobject,
attribute_ext.insert(0, _SingleParentValidator(prop))
if prop.key in prop.parent._validators:
- attribute_ext.insert(0, mapperutil.Validator(prop.key, prop.parent._validators[prop.key]))
+ attribute_ext.insert(0,
+ mapperutil.Validator(prop.key, prop.parent._validators[prop.key])
+ )
if useobject:
attribute_ext.append(sessionlib.UOWEventHandler(prop.key))
@@ -66,7 +70,7 @@ def _register_attribute(strategy, mapper, useobject,
)
class UninstrumentedColumnLoader(LoaderStrategy):
- """Represent the strategy for a MapperProperty that doesn't instrument the class.
+ """Represent the a non-instrumented MapperProperty.
The polymorphic_on argument of mapper() often results in this,
if the argument is against the with_polymorphic selectable.
@@ -75,14 +79,15 @@ class UninstrumentedColumnLoader(LoaderStrategy):
def init(self):
self.columns = self.parent_property.columns
- def setup_query(self, context, entity, path, adapter, column_collection=None, **kwargs):
+ def setup_query(self, context, entity, path, adapter,
+ column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
def create_row_processor(self, selectcontext, path, mapper, row, adapter):
- return (None, None)
+ return None, None
class ColumnLoader(LoaderStrategy):
"""Strategize the loading of a plain column-based MapperProperty."""
@@ -91,7 +96,8 @@ class ColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
- def setup_query(self, context, entity, path, adapter, column_collection=None, **kwargs):
+ def setup_query(self, context, entity, path, adapter,
+ column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
@@ -135,7 +141,8 @@ class CompositeColumnLoader(ColumnLoader):
def copy(obj):
if obj is None:
return None
- return self.parent_property.composite_class(*obj.__composite_values__())
+ return self.parent_property.\
+ composite_class(*obj.__composite_values__())
def compare(a, b):
if a is None or b is None:
@@ -156,7 +163,8 @@ class CompositeColumnLoader(ColumnLoader):
#active_history ?
)
- def create_row_processor(self, selectcontext, path, mapper, row, adapter):
+ def create_row_processor(self, selectcontext, path, mapper,
+ row, adapter):
key = self.key
columns = self.columns
composite_class = self.parent_property.composite_class
@@ -203,7 +211,8 @@ class DeferredColumnLoader(LoaderStrategy):
def init(self):
if hasattr(self.parent_property, 'composite_class'):
- raise NotImplementedError("Deferred loading for composite types not implemented yet")
+ raise NotImplementedError("Deferred loading for composite "
+ "types not implemented yet")
self.columns = self.parent_property.columns
self.group = self.parent_property.group
@@ -218,13 +227,15 @@ class DeferredColumnLoader(LoaderStrategy):
expire_missing=False
)
- def setup_query(self, context, entity, path, adapter, only_load_props=None, **kwargs):
- if \
- (self.group is not None and context.attributes.get(('undefer', self.group), False)) or \
- (only_load_props and self.key in only_load_props):
-
+ def setup_query(self, context, entity, path, adapter,
+ only_load_props=None, **kwargs):
+ if (
+ self.group is not None and
+ context.attributes.get(('undefer', self.group), False)
+ ) or (only_load_props and self.key in only_load_props):
self.parent_property._get_strategy(ColumnLoader).\
- setup_query(context, entity, path, adapter, **kwargs)
+ setup_query(context, entity,
+ path, adapter, **kwargs)
def _class_level_loader(self, state):
if not mapperutil._state_has_identity(state):
@@ -276,14 +287,15 @@ class LoadDeferredColumns(object):
session = sessionlib._state_session(state)
if session is None:
raise orm_exc.DetachedInstanceError(
- "Parent instance %s is not bound to a Session; "
- "deferred load operation of attribute '%s' cannot proceed" %
- (mapperutil.state_str(state), self.key)
- )
+ "Parent instance %s is not bound to a Session; "
+ "deferred load operation of attribute '%s' cannot proceed" %
+ (mapperutil.state_str(state), self.key)
+ )
query = session.query(localparent)
ident = state.key[1]
- query._get(None, ident=ident, only_load_props=group, refresh_state=state)
+ query._get(None, ident=ident,
+ only_load_props=group, refresh_state=state)
return attributes.ATTR_WAS_SET
class DeferredOption(StrategizedOption):
@@ -309,7 +321,7 @@ class UndeferGroupOption(MapperOption):
query._attributes[('undefer', self.group)] = True
class AbstractRelationshipLoader(LoaderStrategy):
- """LoaderStratgies which deal with related objects as opposed to scalars."""
+ """LoaderStratgies which deal with related objects."""
def init(self):
self.mapper = self.parent_property.mapper
@@ -363,31 +375,47 @@ class LazyLoader(AbstractRelationshipLoader):
for c in self.mapper._equivalent_columns[col]:
self._equated_columns[c] = self._equated_columns[col]
- self.logger.info("%s will use query.get() to optimize instance loads" % self)
+ self.logger.info("%s will use query.get() to "
+ "optimize instance loads" % self)
def init_class_attribute(self, mapper):
self.is_class_level = True
- # MANYTOONE currently only needs the "old" value for delete-orphan
- # cascades. the required _SingleParentValidator will enable active_history
- # in that case. otherwise we don't need the "old" value during backref operations.
+ # MANYTOONE currently only needs the
+ # "old" value for delete-orphan
+ # cascades. the required _SingleParentValidator
+ # will enable active_history
+ # in that case. otherwise we don't need the
+ # "old" value during backref operations.
_register_attribute(self,
mapper,
useobject=True,
callable_=self._class_level_loader,
uselist = self.parent_property.uselist,
typecallable = self.parent_property.collection_class,
- active_history = self.parent_property.direction is not interfaces.MANYTOONE or not self.use_get,
+ active_history = \
+ self.parent_property.direction is not \
+ interfaces.MANYTOONE or \
+ not self.use_get,
)
- def lazy_clause(self, state, reverse_direction=False, alias_secondary=False, adapt_source=None):
+ def lazy_clause(self, state, reverse_direction=False,
+ alias_secondary=False, adapt_source=None):
if state is None:
- return self._lazy_none_clause(reverse_direction, adapt_source=adapt_source)
+ return self._lazy_none_clause(
+ reverse_direction,
+ adapt_source=adapt_source)
if not reverse_direction:
- (criterion, bind_to_col, rev) = (self.__lazywhere, self.__bind_to_col, self._equated_columns)
+ criterion, bind_to_col, rev = \
+ self.__lazywhere, \
+ self.__bind_to_col, \
+ self._equated_columns
else:
- (criterion, bind_to_col, rev) = LazyLoader._create_lazy_clause(self.parent_property, reverse_direction=reverse_direction)
+ criterion, bind_to_col, rev = \
+ LazyLoader._create_lazy_clause(
+ self.parent_property,
+ reverse_direction=reverse_direction)
if reverse_direction:
mapper = self.parent_property.mapper
@@ -396,25 +424,38 @@ class LazyLoader(AbstractRelationshipLoader):
def visit_bindparam(bindparam):
if bindparam.key in bind_to_col:
- # use the "committed" (database) version to get query column values
- # also its a deferred value; so that when used by Query, the committed value is used
+ # use the "committed" (database) version to get
+ # query column values
+ # also its a deferred value; so that when used
+ # by Query, the committed value is used
# after an autoflush occurs
o = state.obj() # strong ref
- bindparam.value = lambda: mapper._get_committed_attr_by_column(o, bind_to_col[bindparam.key])
+ bindparam.value = \
+ lambda: mapper._get_committed_attr_by_column(
+ o, bind_to_col[bindparam.key])
if self.parent_property.secondary is not None and alias_secondary:
- criterion = sql_util.ClauseAdapter(self.parent_property.secondary.alias()).traverse(criterion)
+ criterion = sql_util.ClauseAdapter(
+ self.parent_property.secondary.alias()).\
+ traverse(criterion)
- criterion = visitors.cloned_traverse(criterion, {}, {'bindparam':visit_bindparam})
+ criterion = visitors.cloned_traverse(
+ criterion, {}, {'bindparam':visit_bindparam})
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
if not reverse_direction:
- (criterion, bind_to_col, rev) = (self.__lazywhere, self.__bind_to_col, self._equated_columns)
+ criterion, bind_to_col, rev = \
+ self.__lazywhere, \
+ self.__bind_to_col,\
+ self._equated_columns
else:
- (criterion, bind_to_col, rev) = LazyLoader._create_lazy_clause(self.parent_property, reverse_direction=reverse_direction)
+ criterion, bind_to_col, rev = \
+ LazyLoader._create_lazy_clause(
+ self.parent_property,
+ reverse_direction=reverse_direction)
criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
@@ -432,22 +473,30 @@ class LazyLoader(AbstractRelationshipLoader):
key = self.key
if not self.is_class_level:
def new_execute(state, dict_, row):
- # we are not the primary manager for this attribute on this class - set up a
- # per-instance lazyloader, which will override the class-level behavior.
- # this currently only happens when using a "lazyload" option on a "no load"
- # attribute - "eager" attributes always have a class-level lazyloader
- # installed.
+ # we are not the primary manager for this attribute
+ # on this class - set up a
+ # per-instance lazyloader, which will override the
+ # class-level behavior.
+ # this currently only happens when using a
+ # "lazyload" option on a "no load"
+ # attribute - "eager" attributes always have a
+ # class-level lazyloader installed.
state.set_callable(dict_, key, LoadLazyAttribute(state, key))
else:
def new_execute(state, dict_, row):
- # we are the primary manager for this attribute on this class - reset its
- # per-instance attribute state, so that the class-level lazy loader is
- # executed when next referenced on this instance. this is needed in
- # populate_existing() types of scenarios to reset any existing state.
+ # we are the primary manager for this attribute on
+ # this class - reset its
+ # per-instance attribute state, so that the class-level
+ # lazy loader is
+ # executed when next referenced on this instance.
+ # this is needed in
+ # populate_existing() types of scenarios to reset
+ # any existing state.
state.reset(dict_, key)
return new_execute, None
-
+
+ @classmethod
def _create_lazy_clause(cls, prop, reverse_direction=False):
binds = util.column_dict()
lookup = util.column_dict()
@@ -477,18 +526,19 @@ class LazyLoader(AbstractRelationshipLoader):
lazywhere = prop.primaryjoin
if prop.secondaryjoin is None or not reverse_direction:
- lazywhere = visitors.replacement_traverse(lazywhere, {}, col_to_bind)
+ lazywhere = visitors.replacement_traverse(
+ lazywhere, {}, col_to_bind)
if prop.secondaryjoin is not None:
secondaryjoin = prop.secondaryjoin
if reverse_direction:
- secondaryjoin = visitors.replacement_traverse(secondaryjoin, {}, col_to_bind)
+ secondaryjoin = visitors.replacement_traverse(
+ secondaryjoin, {}, col_to_bind)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = dict((binds[col].key, col) for col in binds)
- return (lazywhere, bind_to_col, equated_columns)
- _create_lazy_clause = classmethod(_create_lazy_clause)
+ return lazywhere, bind_to_col, equated_columns
log.class_logger(LazyLoader)
@@ -510,12 +560,14 @@ class LoadLazyAttribute(object):
prop = instance_mapper.get_property(self.key)
strategy = prop._get_strategy(LazyLoader)
- if kw.get('passive') is attributes.PASSIVE_NO_FETCH and not strategy.use_get:
+ if kw.get('passive') is attributes.PASSIVE_NO_FETCH and \
+ not strategy.use_get:
return attributes.PASSIVE_NO_RESULT
if strategy._should_log_debug():
strategy.logger.debug("loading %s",
- mapperutil.state_attribute_str(state, self.key))
+ mapperutil.state_attribute_str(
+ state, self.key))
session = sessionlib._state_session(state)
if session is None:
@@ -536,8 +588,11 @@ class LoadLazyAttribute(object):
ident = []
allnulls = True
for primary_key in prop.mapper.primary_key:
- val = instance_mapper._get_committed_state_attr_by_column(
- state, strategy._equated_columns[primary_key], **kw)
+ val = instance_mapper.\
+ _get_committed_state_attr_by_column(
+ state,
+ strategy._equated_columns[primary_key],
+ **kw)
if val is attributes.PASSIVE_NO_RESULT:
return val
allnulls = allnulls and val is None
@@ -556,8 +611,17 @@ class LoadLazyAttribute(object):
if prop.order_by:
q = q.order_by(*util.to_list(prop.order_by))
+ for rev in prop._reverse_property:
+ # reverse props that are MANYTOONE are loading *this*
+ # object from get(), so don't need to eager out to those.
+ if rev.direction is interfaces.MANYTOONE and \
+ rev._use_get and \
+ not isinstance(rev.strategy, LazyLoader):
+ q = q.options(EagerLazyOption(rev.key, lazy='select'))
+
if state.load_options:
q = q._conditional_options(*state.load_options)
+
q = q.filter(strategy.lazy_clause(state))
result = q.all()
@@ -569,24 +633,244 @@ class LoadLazyAttribute(object):
if l > 1:
util.warn(
"Multiple rows returned with "
- "uselist=False for lazily-loaded attribute '%s' " % prop)
+ "uselist=False for lazily-loaded attribute '%s' "
+ % prop)
return result[0]
else:
return None
+class SubqueryLoader(AbstractRelationshipLoader):
+ def init(self):
+ super(SubqueryLoader, self).init()
+ self.join_depth = self.parent_property.join_depth
+
+ def init_class_attribute(self, mapper):
+ self.parent_property.\
+ _get_strategy(LazyLoader).\
+ init_class_attribute(mapper)
+
+ def setup_query(self, context, entity,
+ path, adapter, column_collection=None,
+ parentmapper=None, **kwargs):
+
+ if not context.query._enable_eagerloads:
+ return
+
+ path = path + (self.key, )
+
+ # build up a path indicating the path from the leftmost
+ # entity to the thing we're subquery loading.
+ subq_path = context.attributes.get(('subquery_path', None), ())
+
+ subq_path = subq_path + path
+
+ reduced_path = interfaces._reduce_path(path)
+
+ # join-depth / recursion check
+ if ("loaderstrategy", reduced_path) not in context.attributes:
+ if self.join_depth:
+ if len(path) / 2 > self.join_depth:
+ return
+ else:
+ if self.mapper.base_mapper in interfaces._reduce_path(subq_path):
+ return
+
+ orig_query = context.attributes.get(
+ ("orig_query", SubqueryLoader),
+ context.query)
+
+ # determine attributes of the leftmost mapper
+ if self.parent.isa(subq_path[0]) and self.key==subq_path[1]:
+ leftmost_mapper, leftmost_prop = \
+ self.parent, self.parent_property
+ else:
+ leftmost_mapper, leftmost_prop = \
+ subq_path[0], \
+ subq_path[0].get_property(subq_path[1])
+ leftmost_cols, remote_cols = self._local_remote_columns(leftmost_prop)
+
+ leftmost_attr = [
+ leftmost_mapper._get_col_to_prop(c).class_attribute
+ for c in leftmost_cols
+ ]
+
+ # reformat the original query
+ # to look only for significant columns
+ q = orig_query._clone()
+ # TODO: why does polymporphic etc. require hardcoding
+ # into _adapt_col_list ? Does query.add_columns(...) work
+ # with polymorphic loading ?
+ q._set_entities(q._adapt_col_list(leftmost_attr))
+
+ # don't need ORDER BY if no limit/offset
+ if q._limit is None and q._offset is None:
+ q._order_by = None
+
+ # the original query now becomes a subquery
+ # which we'll join onto.
+ embed_q = q.with_labels().subquery()
+ left_alias = mapperutil.AliasedClass(leftmost_mapper, embed_q)
+
+ # q becomes a new query. basically doing a longhand
+ # "from_self()". (from_self() itself not quite industrial
+ # strength enough for all contingencies...but very close)
+
+ q = q.session.query(self.mapper)
+ q._attributes = {
+ ("orig_query", SubqueryLoader): orig_query,
+ ('subquery_path', None) : subq_path
+ }
+
+ # figure out what's being joined. a.k.a. the fun part
+ to_join = [
+ (subq_path[i], subq_path[i+1])
+ for i in xrange(0, len(subq_path), 2)
+ ]
+
+ if len(to_join) < 2:
+ parent_alias = left_alias
+ else:
+ parent_alias = mapperutil.AliasedClass(self.parent)
+
+ local_cols, remote_cols = \
+ self._local_remote_columns(self.parent_property)
+
+ local_attr = [
+ getattr(parent_alias, self.parent._get_col_to_prop(c).key)
+ for c in local_cols
+ ]
+ q = q.order_by(*local_attr)
+ q = q.add_columns(*local_attr)
+
+ for i, (mapper, key) in enumerate(to_join):
+
+ # we need to use query.join() as opposed to
+ # orm.join() here because of the
+ # rich behavior it brings when dealing with
+ # "with_polymorphic" mappers. "aliased"
+ # and "from_joinpoint" take care of most of
+ # the chaining and aliasing for us.
+
+ first = i == 0
+ middle = i < len(to_join) - 1
+ second_to_last = i == len(to_join) - 2
+
+ if first:
+ attr = getattr(left_alias, key)
+ else:
+ attr = key
+
+ if second_to_last:
+ q = q.join((parent_alias, attr), from_joinpoint=True)
+ else:
+ q = q.join(attr, aliased=middle, from_joinpoint=True)
+
+ # propagate loader options etc. to the new query.
+ # these will fire relative to subq_path.
+ q = q._with_current_path(subq_path)
+ q = q._conditional_options(*orig_query._with_options)
+
+ if self.parent_property.order_by:
+ # if there's an ORDER BY, alias it the same
+ # way joinedloader does, but we have to pull out
+ # the "eagerjoin" from the query.
+ # this really only picks up the "secondary" table
+ # right now.
+ eagerjoin = q._from_obj[0]
+ eager_order_by = \
+ eagerjoin._target_adapter.\
+ copy_and_process(
+ util.to_list(
+ self.parent_property.order_by
+ )
+ )
+ q = q.order_by(*eager_order_by)
+
+ # add new query to attributes to be picked up
+ # by create_row_processor
+ context.attributes[('subquery', reduced_path)] = q
+
+ def _local_remote_columns(self, prop):
+ if prop.secondary is None:
+ return zip(*prop.local_remote_pairs)
+ else:
+ return \
+ [p[0] for p in prop.synchronize_pairs],\
+ [
+ p[0] for p in prop.
+ secondary_synchronize_pairs
+ ]
+
+ def create_row_processor(self, context, path, mapper, row, adapter):
+ path = path + (self.key,)
+
+ path = interfaces._reduce_path(path)
+
+ if ('subquery', path) not in context.attributes:
+ return None, None
+
+ local_cols, remote_cols = self._local_remote_columns(self.parent_property)
+
+ remote_attr = [
+ self.mapper._get_col_to_prop(c).key
+ for c in remote_cols]
+
+ q = context.attributes[('subquery', path)]
+
+ collections = dict(
+ (k, [v[0] for v in v])
+ for k, v in itertools.groupby(
+ q,
+ lambda x:x[1:]
+ ))
+
+ if adapter:
+ local_cols = [adapter.columns[c] for c in local_cols]
+
+ if self.uselist:
+ def execute(state, dict_, row):
+ collection = collections.get(
+ tuple([row[col] for col in local_cols]),
+ ()
+ )
+ state.get_impl(self.key).\
+ set_committed_value(state, dict_, collection)
+ else:
+ def execute(state, dict_, row):
+ collection = collections.get(
+ tuple([row[col] for col in local_cols]),
+ (None,)
+ )
+ if len(collection) > 1:
+ util.warn(
+ "Multiple rows returned with "
+ "uselist=False for eagerly-loaded attribute '%s' "
+ % self)
+
+ scalar = collection[0]
+ state.get_impl(self.key).\
+ set_committed_value(state, dict_, scalar)
+
+ return execute, None
+
+log.class_logger(SubqueryLoader)
+
class EagerLoader(AbstractRelationshipLoader):
- """Strategize a relationship() that loads within the process of the parent object being selected."""
+ """Strategize a relationship() that loads within the process
+ of the parent object being selected."""
def init(self):
super(EagerLoader, self).init()
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self, mapper):
- self.parent_property._get_strategy(LazyLoader).init_class_attribute(mapper)
+ self.parent_property.\
+ _get_strategy(LazyLoader).init_class_attribute(mapper)
def setup_query(self, context, entity, path, adapter, \
- column_collection=None, parentmapper=None, **kwargs):
+ column_collection=None, parentmapper=None,
+ **kwargs):
"""Add a left outer join to the statement thats being constructed."""
if not context.query._enable_eagerloads:
@@ -597,16 +881,21 @@ class EagerLoader(AbstractRelationshipLoader):
reduced_path = interfaces._reduce_path(path)
# check for user-defined eager alias
- if ("user_defined_eager_row_processor", reduced_path) in context.attributes:
- clauses = context.attributes[("user_defined_eager_row_processor", reduced_path)]
+ if ("user_defined_eager_row_processor", reduced_path) in\
+ context.attributes:
+ clauses = context.attributes[
+ ("user_defined_eager_row_processor",
+ reduced_path)]
adapter = entity._get_entity_clauses(context.query, context)
if adapter and clauses:
- context.attributes[("user_defined_eager_row_processor", reduced_path)] = \
- clauses = clauses.wrap(adapter)
+ context.attributes[
+ ("user_defined_eager_row_processor",
+ reduced_path)] = clauses = clauses.wrap(adapter)
elif adapter:
- context.attributes[("user_defined_eager_row_processor", reduced_path)] = \
- clauses = adapter
+ context.attributes[
+ ("user_defined_eager_row_processor",
+ reduced_path)] = clauses = adapter
add_to_collection = context.primary_columns
@@ -622,18 +911,24 @@ class EagerLoader(AbstractRelationshipLoader):
if self.mapper.base_mapper in reduced_path:
return
- clauses = mapperutil.ORMAdapter(mapperutil.AliasedClass(self.mapper),
- equivalents=self.mapper._equivalent_columns, adapt_required=True)
+ clauses = mapperutil.ORMAdapter(
+ mapperutil.AliasedClass(self.mapper),
+ equivalents=self.mapper._equivalent_columns,
+ adapt_required=True)
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
context.create_eager_joins.append(
- (self._create_eager_join, context, entity, path, adapter, parentmapper, clauses)
+ (self._create_eager_join, context,
+ entity, path, adapter,
+ parentmapper, clauses)
)
add_to_collection = context.secondary_columns
- context.attributes[("eager_row_processor", reduced_path)] = clauses
+ context.attributes[
+ ("eager_row_processor", reduced_path)
+ ] = clauses
for value in self.mapper._iterate_polymorphic_properties():
value.setup(
@@ -644,7 +939,8 @@ class EagerLoader(AbstractRelationshipLoader):
parentmapper=self.mapper,
column_collection=add_to_collection)
- def _create_eager_join(self, context, entity, path, adapter, parentmapper, clauses):
+ def _create_eager_join(self, context, entity,
+ path, adapter, parentmapper, clauses):
if parentmapper is None:
localparent = entity.mapper
@@ -662,12 +958,13 @@ class EagerLoader(AbstractRelationshipLoader):
not should_nest_selectable and \
context.from_clause:
index, clause = \
- sql_util.find_join_source(context.from_clause, entity.selectable)
+ sql_util.find_join_source(
+ context.from_clause, entity.selectable)
if clause is not None:
# join to an existing FROM clause on the query.
# key it to its list index in the eager_joins dict.
- # Query._compile_context will adapt as needed and append to the
- # FROM clause of the select().
+ # Query._compile_context will adapt as needed and
+ # append to the FROM clause of the select().
entity_key, default_towrap = index, clause
if entity_key is None:
@@ -678,28 +975,38 @@ class EagerLoader(AbstractRelationshipLoader):
join_to_left = False
if adapter:
if getattr(adapter, 'aliased_class', None):
- onclause = getattr(adapter.aliased_class, self.key, self.parent_property)
+ onclause = getattr(
+ adapter.aliased_class, self.key,
+ self.parent_property)
else:
- onclause = getattr(mapperutil.AliasedClass(self.parent, adapter.selectable),
- self.key, self.parent_property)
+ onclause = getattr(
+ mapperutil.AliasedClass(
+ self.parent,
+ adapter.selectable
+ ),
+ self.key, self.parent_property
+ )
if onclause is self.parent_property:
- # TODO: this is a temporary hack to account for polymorphic eager loads where
+ # TODO: this is a temporary hack to
+ # account for polymorphic eager loads where
# the eagerload is referencing via of_type().
join_to_left = True
else:
onclause = self.parent_property
- innerjoin = context.attributes.get(("eager_join_type", path),
- self.parent_property.innerjoin)
+ innerjoin = context.attributes.get(
+ ("eager_join_type", path),
+ self.parent_property.innerjoin)
- context.eager_joins[entity_key] = eagerjoin = mapperutil.join(
- towrap,
- clauses.aliased_class,
- onclause,
- join_to_left=join_to_left,
- isouter=not innerjoin
- )
+ context.eager_joins[entity_key] = eagerjoin = \
+ mapperutil.join(
+ towrap,
+ clauses.aliased_class,
+ onclause,
+ join_to_left=join_to_left,
+ isouter=not innerjoin
+ )
# send a hint to the Query as to where it may "splice" this join
eagerjoin.stop_on = entity.selectable
@@ -707,11 +1014,14 @@ class EagerLoader(AbstractRelationshipLoader):
if self.parent_property.secondary is None and \
not parentmapper:
# for parentclause that is the non-eager end of the join,
- # ensure all the parent cols in the primaryjoin are actually in the
+ # ensure all the parent cols in the primaryjoin are actually
+ # in the
# columns clause (i.e. are not deferred), so that aliasing applied
- # by the Query propagates those columns outward. This has the effect
+ # by the Query propagates those columns outward.
+ # This has the effect
# of "undefering" those columns.
- for col in sql_util.find_columns(self.parent_property.primaryjoin):
+ for col in sql_util.find_columns(
+ self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
col = adapter.columns[col]
@@ -721,22 +1031,29 @@ class EagerLoader(AbstractRelationshipLoader):
context.eager_order_by += \
eagerjoin._target_adapter.\
copy_and_process(
- util.to_list(self.parent_property.order_by)
+ util.to_list(
+ self.parent_property.order_by
+ )
)
def _create_eager_adapter(self, context, row, adapter, path):
reduced_path = interfaces._reduce_path(path)
- if ("user_defined_eager_row_processor", reduced_path) in context.attributes:
- decorator = context.attributes[("user_defined_eager_row_processor", reduced_path)]
- # user defined eagerloads are part of the "primary" portion of the load.
+ if ("user_defined_eager_row_processor", reduced_path) in \
+ context.attributes:
+ decorator = context.attributes[
+ ("user_defined_eager_row_processor",
+ reduced_path)]
+ # user defined eagerloads are part of the "primary"
+ # portion of the load.
# the adapters applied to the Query should be honored.
if context.adapter and decorator:
decorator = decorator.wrap(context.adapter)
elif context.adapter:
decorator = context.adapter
elif ("eager_row_processor", reduced_path) in context.attributes:
- decorator = context.attributes[("eager_row_processor", reduced_path)]
+ decorator = context.attributes[
+ ("eager_row_processor", reduced_path)]
else:
return False
@@ -751,7 +1068,10 @@ class EagerLoader(AbstractRelationshipLoader):
def create_row_processor(self, context, path, mapper, row, adapter):
path = path + (self.key,)
- eager_adapter = self._create_eager_adapter(context, row, adapter, path)
+ eager_adapter = self._create_eager_adapter(
+ context,
+ row,
+ adapter, path)
if eager_adapter is not False:
key = self.key
@@ -780,8 +1100,8 @@ class EagerLoader(AbstractRelationshipLoader):
return new_execute, existing_execute
else:
def new_execute(state, dict_, row):
- collection = attributes.init_state_collection(state, dict_,
- key)
+ collection = attributes.init_state_collection(
+ state, dict_, key)
result_list = util.UniqueAppender(collection,
'append_without_event')
context.attributes[(state, key)] = result_list
@@ -797,36 +1117,56 @@ class EagerLoader(AbstractRelationshipLoader):
# distinct sets of result columns
collection = attributes.init_state_collection(state,
dict_, key)
- result_list = util.UniqueAppender(collection,
- 'append_without_event')
+ result_list = util.UniqueAppender(
+ collection,
+ 'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
return new_execute, existing_execute
else:
- return self.parent_property._get_strategy(LazyLoader).\
- create_row_processor(context, path, mapper, row, adapter)
+ return self.parent_property.\
+ _get_strategy(LazyLoader).\
+ create_row_processor(
+ context, path,
+ mapper, row, adapter)
log.class_logger(EagerLoader)
class EagerLazyOption(StrategizedOption):
-
- def __init__(self, key, lazy=True, chained=False, mapper=None, propagate_to_loaders=True):
- super(EagerLazyOption, self).__init__(key, mapper)
+ def __init__(self, key, lazy=True, chained=False,
+ propagate_to_loaders=True
+ ):
+ super(EagerLazyOption, self).__init__(key)
self.lazy = lazy
self.chained = chained
self.propagate_to_loaders = propagate_to_loaders
-
+ self.strategy_cls = factory(lazy)
+
+ @property
+ def is_eager(self):
+ return self.lazy in (False, 'joined', 'subquery')
+
+ @property
def is_chained(self):
- return not self.lazy and self.chained
-
- def get_strategy_class(self):
- if self.lazy:
- return LazyLoader
- elif self.lazy is False:
- return EagerLoader
- elif self.lazy is None:
- return NoLoader
+ return self.is_eager and self.chained
+ def get_strategy_class(self):
+ return self.strategy_cls
+
+def factory(identifier):
+ if identifier is False or identifier == 'joined':
+ return EagerLoader
+ elif identifier is None or identifier == 'noload':
+ return NoLoader
+ elif identifier is False or identifier == 'select':
+ return LazyLoader
+ elif identifier == 'subquery':
+ return SubqueryLoader
+ else:
+ return LazyLoader
+
+
+
class EagerJoinOption(PropertyOption):
def __init__(self, key, innerjoin, chained=False):
@@ -881,8 +1221,10 @@ class _SingleParentValidator(interfaces.AttributeExtension):
if value is not None:
hasparent = initiator.hasparent(attributes.instance_state(value))
if hasparent and oldvalue is not value:
- raise sa_exc.InvalidRequestError("Instance %s is already associated with an instance "
- "of %s via its %s attribute, and is only allowed a single parent." %
+ raise sa_exc.InvalidRequestError(
+ "Instance %s is already associated with an instance "
+ "of %s via its %s attribute, and is only allowed a "
+ "single parent." %
(mapperutil.instance_str(value), state.class_, self.prop)
)
return value
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 3be63ced3..31ab7facc 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -747,35 +747,10 @@ class StaticPool(Pool):
"""
- def __init__(self, creator, **params):
- """
- Construct a StaticPool.
-
- :param creator: a callable function that returns a DB-API
- connection object. The function will be called with
- parameters.
-
- :param echo: If True, connections being pulled and retrieved
- from the pool will be logged to the standard output, as well
- as pool sizing information. Echoing can also be achieved by
- enabling logging for the "sqlalchemy.pool"
- namespace. Defaults to False.
-
- :param reset_on_return: If true, reset the database state of
- connections returned to the pool. This is typically a
- ROLLBACK to release locks and transaction resources.
- Disable at your own peril. Defaults to True.
-
- :param listeners: A list of
- :class:`~sqlalchemy.interfaces.PoolListener`-like objects or
- dictionaries of callables that receive events when DB-API
- connections are created, checked out and checked in to the
- pool.
+ @memoized_property
+ def _conn(self):
+ return self._creator()
- """
- Pool.__init__(self, creator, **params)
- self._conn = creator()
-
@memoized_property
def connection(self):
return _ConnectionRecord(self)
@@ -784,8 +759,9 @@ class StaticPool(Pool):
return "StaticPool"
def dispose(self):
- self._conn.close()
- self._conn = None
+ if '_conn' in self.__dict__:
+ self._conn.close()
+ self._conn = None
def recreate(self):
self.logger.info("Pool recreating")
@@ -837,7 +813,8 @@ class AssertionPool(Pool):
def dispose(self):
self._checked_out = False
- self._conn.close()
+ if self._conn:
+ self._conn.close()
def recreate(self):
self.logger.info("Pool recreating")
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 4e9175ae8..78c65771b 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -305,11 +305,13 @@ class SQLCompiler(engine.Compiled):
def visit_grouping(self, grouping, asfrom=False, **kwargs):
return "(" + self.process(grouping.element, **kwargs) + ")"
- def visit_label(self, label, result_map=None, within_columns_clause=False, **kw):
+ def visit_label(self, label, result_map=None,
+ within_label_clause=False,
+ within_columns_clause=False, **kw):
# only render labels within the columns clause
# or ORDER BY clause of a select. dialect-specific compilers
# can modify this behavior.
- if within_columns_clause:
+ if within_columns_clause and not within_label_clause:
labelname = isinstance(label.name, sql._generated_label) and \
self._truncated_identifier("colident", label.name) or label.name
@@ -318,13 +320,14 @@ class SQLCompiler(engine.Compiled):
(label.name, (label, label.element, labelname), label.element.type)
return self.process(label.element,
- within_columns_clause=within_columns_clause,
+ within_columns_clause=True,
+ within_label_clause=True,
**kw) + \
OPERATORS[operators.as_] + \
self.preparer.format_label(label, labelname)
else:
return self.process(label.element,
- within_columns_clause=within_columns_clause,
+ within_columns_clause=False,
**kw)
def visit_column(self, column, result_map=None, **kwargs):
@@ -625,13 +628,22 @@ class SQLCompiler(engine.Compiled):
else:
return self.bindtemplate % {'name':name}
- def visit_alias(self, alias, asfrom=False, **kwargs):
- if asfrom:
+ def visit_alias(self, alias, asfrom=False, ashint=False, fromhints=None, **kwargs):
+ if asfrom or ashint:
alias_name = isinstance(alias.name, sql._generated_label) and \
self._truncated_identifier("alias", alias.name) or alias.name
-
- return self.process(alias.original, asfrom=True, **kwargs) + " AS " + \
+ if ashint:
+ return self.preparer.format_alias(alias, alias_name)
+ elif asfrom:
+ ret = self.process(alias.original, asfrom=True, **kwargs) + " AS " + \
self.preparer.format_alias(alias, alias_name)
+
+ if fromhints and alias in fromhints:
+ hinttext = self.get_from_hint_text(alias, fromhints[alias])
+ if hinttext:
+ ret += " " + hinttext
+
+ return ret
else:
return self.process(alias.original, **kwargs)
@@ -658,8 +670,15 @@ class SQLCompiler(engine.Compiled):
else:
return column
+ def get_select_hint_text(self, byfroms):
+ return None
+
+ def get_from_hint_text(self, table, text):
+ return None
+
def visit_select(self, select, asfrom=False, parens=True,
- iswrapper=False, compound_index=1, **kwargs):
+ iswrapper=False, fromhints=None,
+ compound_index=1, **kwargs):
entry = self.stack and self.stack[-1] or {}
@@ -694,6 +713,18 @@ class SQLCompiler(engine.Compiled):
]
text = "SELECT " # we're off to a good start !
+
+ if select._hints:
+ byfrom = dict([
+ (from_, hinttext % {'name':self.process(from_, ashint=True)})
+ for (from_, dialect), hinttext in
+ select._hints.iteritems()
+ if dialect in ('*', self.dialect.name)
+ ])
+ hint_text = self.get_select_hint_text(byfrom)
+ if hint_text:
+ text += hint_text + " "
+
if select._prefixes:
text += " ".join(self.process(x, **kwargs) for x in select._prefixes) + " "
text += self.get_select_precolumns(select)
@@ -701,7 +732,16 @@ class SQLCompiler(engine.Compiled):
if froms:
text += " \nFROM "
- text += ', '.join(self.process(f, asfrom=True, **kwargs) for f in froms)
+
+ if select._hints:
+ text += ', '.join([self.process(f,
+ asfrom=True, fromhints=byfrom,
+ **kwargs)
+ for f in froms])
+ else:
+ text += ', '.join([self.process(f,
+ asfrom=True, **kwargs)
+ for f in froms])
else:
text += self.default_from()
@@ -764,20 +804,26 @@ class SQLCompiler(engine.Compiled):
text += " OFFSET " + str(select._offset)
return text
- def visit_table(self, table, asfrom=False, **kwargs):
- if asfrom:
+ def visit_table(self, table, asfrom=False, ashint=False, fromhints=None, **kwargs):
+ if asfrom or ashint:
if getattr(table, "schema", None):
- return self.preparer.quote_schema(table.schema, table.quote_schema) + \
+ ret = self.preparer.quote_schema(table.schema, table.quote_schema) + \
"." + self.preparer.quote(table.name, table.quote)
else:
- return self.preparer.quote(table.name, table.quote)
+ ret = self.preparer.quote(table.name, table.quote)
+ if fromhints and table in fromhints:
+ hinttext = self.get_from_hint_text(table, fromhints[table])
+ if hinttext:
+ ret += " " + hinttext
+ return ret
else:
return ""
def visit_join(self, join, asfrom=False, **kwargs):
- return (self.process(join.left, asfrom=True) + \
+ return (self.process(join.left, asfrom=True, **kwargs) + \
(join.isouter and " LEFT OUTER JOIN " or " JOIN ") + \
- self.process(join.right, asfrom=True) + " ON " + self.process(join.onclause))
+ self.process(join.right, asfrom=True, **kwargs) + " ON " + \
+ self.process(join.onclause, **kwargs))
def visit_sequence(self, seq):
return None
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 1e02ba96a..3aaa06fd6 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -3557,6 +3557,7 @@ class Select(_SelectBaseMixin, FromClause):
__visit_name__ = 'select'
_prefixes = ()
+ _hints = util.frozendict()
def __init__(self,
columns,
@@ -3659,7 +3660,34 @@ class Select(_SelectBaseMixin, FromClause):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
-
+
+ @_generative
+ def with_hint(self, selectable, text, dialect_name=None):
+ """Add an indexing hint for the given selectable to this :class:`Select`.
+
+ The text of the hint is written specific to a specific backend, and
+ typically uses Python string substitution syntax to render the name
+ of the table or alias, such as for Oracle::
+
+ select([mytable]).with_hint(mytable, "+ index(%(name)s ix_mytable)")
+
+ Would render SQL as::
+
+ select /*+ index(mytable ix_mytable) */ ... from mytable
+
+ The ``dialect_name`` option will limit the rendering of a particular hint
+ to a particular backend. Such as, to add hints for both Oracle and
+ Sybase simultaneously::
+
+ select([mytable]).\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\
+ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
+
+ """
+ if not dialect_name:
+ dialect_name = '*'
+ self._hints = self._hints.union({(selectable, dialect_name):text})
+
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 74651a9d1..d5575e0e7 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -579,7 +579,7 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
return None
elif self.exclude and col in self.exclude:
return None
-
+
return self._corresponding_column(col, True)
class ColumnAdapter(ClauseAdapter):
@@ -587,11 +587,13 @@ class ColumnAdapter(ClauseAdapter):
Provides the ability to "wrap" this ClauseAdapter
around another, a columns dictionary which returns
- cached, adapted elements given an original, and an
+ adapted elements given an original, and an
adapted_row() factory.
"""
- def __init__(self, selectable, equivalents=None, chain_to=None, include=None, exclude=None, adapt_required=False):
+ def __init__(self, selectable, equivalents=None,
+ chain_to=None, include=None,
+ exclude=None, adapt_required=False):
ClauseAdapter.__init__(self, selectable, equivalents, include, exclude)
if chain_to:
self.chain(chain_to)
@@ -617,7 +619,7 @@ class ColumnAdapter(ClauseAdapter):
return locate
def _locate_col(self, col):
- c = self._corresponding_column(col, False)
+ c = self._corresponding_column(col, True)
if c is None:
c = self.adapt_clause(col)
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index 4a54375f8..799486c02 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -40,16 +40,17 @@ class VisitableType(type):
# set up an optimized visit dispatch function
# for use by the compiler
- visit_name = cls.__visit_name__
- if isinstance(visit_name, str):
- getter = operator.attrgetter("visit_%s" % visit_name)
- def _compiler_dispatch(self, visitor, **kw):
- return getter(visitor)(self, **kw)
- else:
- def _compiler_dispatch(self, visitor, **kw):
- return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw)
-
- cls._compiler_dispatch = _compiler_dispatch
+ if '__visit_name__' in cls.__dict__:
+ visit_name = cls.__visit_name__
+ if isinstance(visit_name, str):
+ getter = operator.attrgetter("visit_%s" % visit_name)
+ def _compiler_dispatch(self, visitor, **kw):
+ return getter(visitor)(self, **kw)
+ else:
+ def _compiler_dispatch(self, visitor, **kw):
+ return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw)
+
+ cls._compiler_dispatch = _compiler_dispatch
super(VisitableType, cls).__init__(clsname, bases, clsdict)
diff --git a/lib/sqlalchemy/test/requires.py b/lib/sqlalchemy/test/requires.py
index c4c745c54..bf911c2c2 100644
--- a/lib/sqlalchemy/test/requires.py
+++ b/lib/sqlalchemy/test/requires.py
@@ -149,6 +149,18 @@ def sequences(fn):
no_support('sybase', 'no SEQUENCE support'),
)
+def update_nowait(fn):
+ """Target database must support SELECT...FOR UPDATE NOWAIT"""
+ return _chain_decorators_on(
+ fn,
+ no_support('access', 'no FOR UPDATE NOWAIT support'),
+ no_support('firebird', 'no FOR UPDATE NOWAIT support'),
+ no_support('mssql', 'no FOR UPDATE NOWAIT support'),
+ no_support('mysql', 'no FOR UPDATE NOWAIT support'),
+ no_support('sqlite', 'no FOR UPDATE NOWAIT support'),
+ no_support('sybase', 'no FOR UPDATE NOWAIT support'),
+ )
+
def subqueries(fn):
"""Target database must support subqueries."""
return _chain_decorators_on(
@@ -224,6 +236,7 @@ def unicode_ddl(fn):
no_support('maxdb', 'database support flakey'),
no_support('oracle', 'FIXME: no support in database?'),
no_support('sybase', 'FIXME: guessing, needs confirmation'),
+ no_support('mssql+pymssql', 'no FreeTDS support'),
exclude('mysql', '<', (4, 1, 1), 'no unicode connection support'),
)
diff --git a/lib/sqlalchemy/topological.py b/lib/sqlalchemy/topological.py
index 3f2ff6399..324995889 100644
--- a/lib/sqlalchemy/topological.py
+++ b/lib/sqlalchemy/topological.py
@@ -141,3 +141,36 @@ def sort(tuples, allitems):
queue.append(childnode)
return output
+
+def _find_cycles(edges):
+ cycles = {}
+
+ def traverse(node, cycle, goal):
+ for (n, key) in edges.edges_by_parent(node):
+ if key in cycle:
+ continue
+ cycle.add(key)
+ if key is goal:
+ cycset = set(cycle)
+ for x in cycle:
+ if x in cycles:
+ existing_set = cycles[x]
+ existing_set.update(cycset)
+ for y in existing_set:
+ cycles[y] = existing_set
+ cycset = existing_set
+ else:
+ cycles[x] = cycset
+ else:
+ traverse(key, cycle, goal)
+ cycle.pop()
+
+ for parent in edges.get_parents():
+ traverse(parent, set(), parent)
+
+ unique_cycles = set(tuple(s) for s in cycles.values())
+
+ for cycle in unique_cycles:
+ edgecollection = [edge for edge in edges
+ if edge[0] in cycle and edge[1] in cycle]
+ yield edgecollection
diff --git a/setup.cfg b/setup.cfg
index e380eadd2..df713b739 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,5 @@
[egg_info]
tag_build = dev
-tag_svn_revision = true
[nosetests]
with-sqlalchemy = true
diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py
index 0c090acb7..d62a87e7d 100644
--- a/test/aaa_profiling/test_zoomark.py
+++ b/test/aaa_profiling/test_zoomark.py
@@ -343,7 +343,7 @@ class ZooMarkTest(TestBase):
def test_profile_4_expressions(self):
self.test_baseline_4_expressions()
- @profiling.function_call_count(1311, {'2.4': 904})
+ @profiling.function_call_count(1311, {'2.4': 904, '2.6+cextension':1226})
def test_profile_5_aggregates(self):
self.test_baseline_5_aggregates()
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index 8304c9383..5b962b695 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -300,7 +300,7 @@ class ZooMarkTest(TestBase):
self.test_baseline_2_insert()
# this number...
- @profiling.function_call_count(6783, {'2.6':7194})
+ @profiling.function_call_count(6783, {'2.6':7194, '2.6+cextension':7184})
def test_profile_3_properties(self):
self.test_baseline_3_properties()
@@ -309,7 +309,7 @@ class ZooMarkTest(TestBase):
def test_profile_4_expressions(self):
self.test_baseline_4_expressions()
- @profiling.function_call_count(1313)
+ @profiling.function_call_count(1313, {'2.6+cextension':1236})
def test_profile_5_aggregates(self):
self.test_baseline_5_aggregates()
diff --git a/test/dialect/test_mssql.py b/test/dialect/test_mssql.py
index 264636ba7..21395bd36 100644
--- a/test/dialect/test_mssql.py
+++ b/test/dialect/test_mssql.py
@@ -49,8 +49,7 @@ class CompileTest(TestBase, AssertsCompiledSQL):
)
]:
self.assert_compile(expr, compile, dialect=mxodbc_dialect)
-
-
+
def test_in_with_subqueries(self):
"""Test that when using subqueries in a binary expression
the == and != are changed to IN and NOT IN respectively.
@@ -763,7 +762,7 @@ class TypesTest(TestBase, AssertsExecutionResults, ComparesTables):
def teardown(self):
metadata.drop_all()
-
+
@testing.fails_on_everything_except('mssql+pyodbc', 'this is some pyodbc-specific feature')
def test_decimal_notation(self):
import decimal
diff --git a/test/dialect/test_mxodbc.py b/test/dialect/test_mxodbc.py
new file mode 100644
index 000000000..938d457fb
--- /dev/null
+++ b/test/dialect/test_mxodbc.py
@@ -0,0 +1,69 @@
+from sqlalchemy import *
+from sqlalchemy.test.testing import eq_, TestBase
+from sqlalchemy.test import engines
+
+# TODO: we should probably build mock bases for
+# these to share with test_reconnect, test_parseconnect
+class MockDBAPI(object):
+ paramstyle = 'qmark'
+ def __init__(self):
+ self.log = []
+ def connect(self, *args, **kwargs):
+ return MockConnection(self)
+
+class MockConnection(object):
+ def __init__(self, parent):
+ self.parent = parent
+ def cursor(self):
+ return MockCursor(self)
+ def close(self):
+ pass
+ def rollback(self):
+ pass
+ def commit(self):
+ pass
+
+class MockCursor(object):
+ description = None
+ rowcount = None
+ def __init__(self, parent):
+ self.parent = parent
+ def execute(self, *args, **kwargs):
+ self.parent.parent.log.append('execute')
+ def executedirect(self, *args, **kwargs):
+ self.parent.parent.log.append('executedirect')
+ def close(self):
+ pass
+
+
+class MxODBCTest(TestBase):
+ def test_native_odbc_execute(self):
+ t1 = Table('t1', MetaData(), Column('c1', Integer))
+
+ dbapi = MockDBAPI()
+ engine = engines.testing_engine(
+ 'mssql+mxodbc://localhost',
+ options={'module':dbapi,
+ '_initialize':False}
+ )
+ conn = engine.connect()
+
+ # crud: uses execute
+ conn.execute(t1.insert().values(c1='foo'))
+ conn.execute(t1.delete().where(t1.c.c1=='foo'))
+ conn.execute(t1.update().where(t1.c.c1=='foo').values(c1='bar'))
+
+ # select: uses executedirect
+ conn.execute(t1.select())
+
+ # manual flagging
+ conn.execution_options(native_odbc_execute=True).execute(t1.select())
+ conn.execution_options(native_odbc_execute=False).execute(t1.insert().values(c1='foo'))
+
+ eq_(
+ dbapi.log,
+ ['execute', 'execute', 'execute',
+ 'executedirect', 'execute', 'executedirect']
+ )
+
+ \ No newline at end of file
diff --git a/test/dialect/test_mysql.py b/test/dialect/test_mysql.py
index 23a85b6bb..b0859210d 100644
--- a/test/dialect/test_mysql.py
+++ b/test/dialect/test_mysql.py
@@ -399,8 +399,17 @@ class TypesTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
roundtrip([False, False, 0, 0, 0], [False, False, 0, 0, 0])
meta2 = MetaData(testing.db)
- # replace with reflected
table = Table('mysql_bool', meta2, autoload=True)
+ eq_(colspec(table.c.b3), 'b3 TINYINT(1)')
+ eq_(colspec(table.c.b4), 'b4 TINYINT(1) UNSIGNED')
+
+ meta2 = MetaData(testing.db)
+ table = Table('mysql_bool', meta2,
+ Column('b1', BOOLEAN),
+ Column('b2', Boolean),
+ Column('b3', BOOLEAN),
+ Column('b4', BOOLEAN),
+ autoload=True)
eq_(colspec(table.c.b3), 'b3 BOOL')
eq_(colspec(table.c.b4), 'b4 BOOL')
diff --git a/test/dialect/test_oracle.py b/test/dialect/test_oracle.py
index f7f349250..bcb34b05c 100644
--- a/test/dialect/test_oracle.py
+++ b/test/dialect/test_oracle.py
@@ -33,8 +33,16 @@ create or replace procedure foo(x_in IN number, x_out OUT number, y_out OUT numb
def test_out_params(self):
result = testing.db.execute(text("begin foo(:x_in, :x_out, :y_out, :z_out); end;",
- bindparams=[bindparam('x_in', Numeric), outparam('x_out', Integer), outparam('y_out', Numeric), outparam('z_out', String)]), x_in=5)
- assert result.out_parameters == {'x_out':10, 'y_out':75, 'z_out':None}, result.out_parameters
+ bindparams=[
+ bindparam('x_in', Numeric),
+ outparam('x_out', Integer),
+ outparam('y_out', Numeric),
+ outparam('z_out', String)]),
+ x_in=5)
+ eq_(
+ result.out_parameters,
+ {'x_out':10, 'y_out':75, 'z_out':None}
+ )
assert isinstance(result.out_parameters['x_out'], int)
@classmethod
@@ -54,7 +62,9 @@ class CompileTest(TestBase, AssertsCompiledSQL):
Column('parent_id', Integer, ForeignKey('ed.parent.id')),
schema = 'ed')
- self.assert_compile(parent.join(child), "ed.parent JOIN ed.child ON ed.parent.id = ed.child.parent_id")
+ self.assert_compile(
+ parent.join(child),
+ "ed.parent JOIN ed.child ON ed.parent.id = ed.child.parent_id")
def test_subquery(self):
t = table('sometable', column('col1'), column('col2'))
@@ -62,7 +72,8 @@ class CompileTest(TestBase, AssertsCompiledSQL):
s = select([s.c.col1, s.c.col2])
self.assert_compile(s, "SELECT col1, col2 FROM (SELECT "
- "sometable.col1 AS col1, sometable.col2 AS col2 FROM sometable)")
+ "sometable.col1 AS col1, sometable.col2 "
+ "AS col2 FROM sometable)")
def test_limit(self):
t = table('sometable', column('col1'), column('col2'))
@@ -73,7 +84,8 @@ class CompileTest(TestBase, AssertsCompiledSQL):
s = select([t]).limit(10).offset(20)
- self.assert_compile(s, "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
+ self.assert_compile(s,
+ "SELECT col1, col2 FROM (SELECT col1, col2, ROWNUM AS ora_rn "
"FROM (SELECT sometable.col1 AS col1, sometable.col2 AS col2 "
"FROM sometable) WHERE ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1"
)
@@ -128,15 +140,17 @@ class CompileTest(TestBase, AssertsCompiledSQL):
anon = a_table.alias()
self.assert_compile(
-
select([other_table, anon]).select_from(
other_table.outerjoin(anon)
).apply_labels(),
"SELECT other_thirty_characters_table_.id AS other_thirty_characters__1, "
- "other_thirty_characters_table_.thirty_characters_table_id AS other_thirty_characters__2, "
- "thirty_characters_table__1.id AS thirty_characters_table__3 FROM other_thirty_characters_table_ "
- "LEFT OUTER JOIN thirty_characters_table_xxxxxx AS thirty_characters_table__1 ON "
- "thirty_characters_table__1.id = other_thirty_characters_table_.thirty_characters_table_id",
+ "other_thirty_characters_table_.thirty_characters_table_id AS "
+ "other_thirty_characters__2, "
+ "thirty_characters_table__1.id AS thirty_characters_table__3 FROM "
+ "other_thirty_characters_table_ "
+ "LEFT OUTER JOIN thirty_characters_table_xxxxxx AS thirty_characters_table__1 "
+ "ON thirty_characters_table__1.id = "
+ "other_thirty_characters_table_.thirty_characters_table_id",
dialect=dialect
)
self.assert_compile(
@@ -145,10 +159,13 @@ class CompileTest(TestBase, AssertsCompiledSQL):
other_table.outerjoin(anon)
).apply_labels(),
"SELECT other_thirty_characters_table_.id AS other_thirty_characters__1, "
- "other_thirty_characters_table_.thirty_characters_table_id AS other_thirty_characters__2, "
- "thirty_characters_table__1.id AS thirty_characters_table__3 FROM other_thirty_characters_table_ "
+ "other_thirty_characters_table_.thirty_characters_table_id AS "
+ "other_thirty_characters__2, "
+ "thirty_characters_table__1.id AS thirty_characters_table__3 FROM "
+ "other_thirty_characters_table_ "
"LEFT OUTER JOIN thirty_characters_table_xxxxxx thirty_characters_table__1 ON "
- "thirty_characters_table__1.id = other_thirty_characters_table_.thirty_characters_table_id",
+ "thirty_characters_table__1.id = "
+ "other_thirty_characters_table_.thirty_characters_table_id",
dialect=ora_dialect
)
@@ -185,57 +202,84 @@ class CompileTest(TestBase, AssertsCompiledSQL):
"SELECT mytable.myid, mytable.name, mytable.description, myothertable.otherid, "
"myothertable.othername FROM mytable, myothertable WHERE "
"(mytable.name = :name_1 OR mytable.myid = :myid_1 OR "
- "myothertable.othername != :othername_1 OR EXISTS (select yay from foo where boo = lar)) "
+ "myothertable.othername != :othername_1 OR EXISTS (select yay "
+ "from foo where boo = lar)) "
"AND mytable.myid = myothertable.otherid(+)",
dialect=oracle.OracleDialect(use_ansi = False))
- query = table1.outerjoin(table2, table1.c.myid==table2.c.otherid).outerjoin(table3, table3.c.userid==table2.c.otherid)
- self.assert_compile(query.select(), "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid, thirdtable.otherstuff "
- "FROM mytable LEFT OUTER JOIN myothertable ON mytable.myid = myothertable.otherid LEFT OUTER "
- "JOIN thirdtable ON thirdtable.userid = myothertable.otherid")
- self.assert_compile(query.select(), "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid, thirdtable.otherstuff FROM "
- "mytable, myothertable, thirdtable WHERE thirdtable.userid(+) = myothertable.otherid AND "
- "mytable.myid = myothertable.otherid(+)", dialect=oracle.dialect(use_ansi=False))
-
- query = table1.join(table2, table1.c.myid==table2.c.otherid).join(table3, table3.c.userid==table2.c.otherid)
- self.assert_compile(query.select(), "SELECT mytable.myid, mytable.name, mytable.description, "
- "myothertable.otherid, myothertable.othername, thirdtable.userid, thirdtable.otherstuff FROM "
- "mytable, myothertable, thirdtable WHERE thirdtable.userid = myothertable.otherid AND "
+ query = table1.outerjoin(table2, table1.c.myid==table2.c.otherid).\
+ outerjoin(table3, table3.c.userid==table2.c.otherid)
+ self.assert_compile(query.select(),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, myothertable.othername, thirdtable.userid,"
+ " thirdtable.otherstuff "
+ "FROM mytable LEFT OUTER JOIN myothertable ON mytable.myid ="
+ " myothertable.otherid LEFT OUTER "
+ "JOIN thirdtable ON thirdtable.userid = myothertable.otherid")
+
+ self.assert_compile(query.select(),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, myothertable.othername, thirdtable.userid,"
+ " thirdtable.otherstuff FROM "
+ "mytable, myothertable, thirdtable WHERE thirdtable.userid(+) ="
+ " myothertable.otherid AND "
+ "mytable.myid = myothertable.otherid(+)",
+ dialect=oracle.dialect(use_ansi=False))
+
+ query = table1.join(table2, table1.c.myid==table2.c.otherid).\
+ join(table3, table3.c.userid==table2.c.otherid)
+ self.assert_compile(query.select(),
+ "SELECT mytable.myid, mytable.name, mytable.description, "
+ "myothertable.otherid, myothertable.othername, thirdtable.userid, "
+ "thirdtable.otherstuff FROM "
+ "mytable, myothertable, thirdtable WHERE thirdtable.userid = "
+ "myothertable.otherid AND "
"mytable.myid = myothertable.otherid", dialect=oracle.dialect(use_ansi=False))
- query = table1.join(table2, table1.c.myid==table2.c.otherid).outerjoin(table3, table3.c.userid==table2.c.otherid)
+ query = table1.join(table2, table1.c.myid==table2.c.otherid).\
+ outerjoin(table3, table3.c.userid==table2.c.otherid)
self.assert_compile(query.select().order_by(table1.c.name).limit(10).offset(5),
"SELECT myid, name, description, otherid, othername, userid, "
"otherstuff FROM (SELECT myid, name, description, "
- "otherid, othername, userid, otherstuff, ROWNUM AS ora_rn FROM (SELECT "
- "mytable.myid AS myid, mytable.name AS name, mytable.description AS description, "
- "myothertable.otherid AS otherid, myothertable.othername AS othername, "
- "thirdtable.userid AS userid, thirdtable.otherstuff AS otherstuff FROM mytable, "
- "myothertable, thirdtable WHERE thirdtable.userid(+) = myothertable.otherid AND "
- "mytable.myid = myothertable.otherid ORDER BY mytable.name) WHERE "
- "ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1", dialect=oracle.dialect(use_ansi=False))
+ "otherid, othername, userid, otherstuff, "
+ "ROWNUM AS ora_rn FROM (SELECT "
+ "mytable.myid AS myid, mytable.name AS name, "
+ "mytable.description AS description, "
+ "myothertable.otherid AS otherid, myothertable.othername "
+ "AS othername, "
+ "thirdtable.userid AS userid, thirdtable.otherstuff AS "
+ "otherstuff FROM mytable, "
+ "myothertable, thirdtable WHERE thirdtable.userid(+) = "
+ "myothertable.otherid AND "
+ "mytable.myid = myothertable.otherid ORDER BY "
+ "mytable.name) WHERE "
+ "ROWNUM <= :ROWNUM_1) WHERE ora_rn > :ora_rn_1",
+ dialect=oracle.dialect(use_ansi=False))
subq = select([table1]).\
- select_from(table1.outerjoin(table2, table1.c.myid==table2.c.otherid)).alias()
- q = select([table3]).select_from(table3.outerjoin(subq, table3.c.userid==subq.c.myid))
+ select_from(
+ table1.outerjoin(table2, table1.c.myid==table2.c.otherid)
+ ).alias()
+ q = select([table3]).select_from(
+ table3.outerjoin(subq, table3.c.userid==subq.c.myid)
+ )
self.assert_compile(q, "SELECT thirdtable.userid, thirdtable.otherstuff "
- "FROM thirdtable LEFT OUTER JOIN (SELECT mytable.myid AS myid, mytable.name"
+ "FROM thirdtable LEFT OUTER JOIN (SELECT mytable.myid AS "
+ "myid, mytable.name"
" AS name, mytable.description AS description "
"FROM mytable LEFT OUTER JOIN myothertable ON mytable.myid = "
"myothertable.otherid) anon_1 ON thirdtable.userid = anon_1.myid",
- dialect=oracle.dialect(use_ansi=True))
+ dialect=oracle.dialect(use_ansi=True))
self.assert_compile(q, "SELECT thirdtable.userid, thirdtable.otherstuff "
- "FROM thirdtable, (SELECT mytable.myid AS myid, mytable.name AS name, "
- "mytable.description AS description FROM mytable, myothertable "
- "WHERE mytable.myid = myothertable.otherid(+)) anon_1 "
- "WHERE thirdtable.userid = anon_1.myid(+)",
- dialect=oracle.dialect(use_ansi=False))
+ "FROM thirdtable, (SELECT mytable.myid AS myid, mytable.name AS name, "
+ "mytable.description AS description FROM mytable, myothertable "
+ "WHERE mytable.myid = myothertable.otherid(+)) anon_1 "
+ "WHERE thirdtable.userid = anon_1.myid(+)",
+ dialect=oracle.dialect(use_ansi=False))
def test_alias_outer_join(self):
address_types = table('address_types',
@@ -251,13 +295,20 @@ class CompileTest(TestBase, AssertsCompiledSQL):
at_alias = address_types.alias()
s = select([at_alias, addresses]).\
- select_from(addresses.outerjoin(at_alias, addresses.c.address_type_id==at_alias.c.id)).\
+ select_from(
+ addresses.outerjoin(at_alias,
+ addresses.c.address_type_id==at_alias.c.id)
+ ).\
where(addresses.c.user_id==7).\
order_by(addresses.c.id, address_types.c.id)
- self.assert_compile(s, "SELECT address_types_1.id, address_types_1.name, addresses.id, addresses.user_id, "
- "addresses.address_type_id, addresses.email_address FROM addresses LEFT OUTER JOIN address_types address_types_1 "
- "ON addresses.address_type_id = address_types_1.id WHERE addresses.user_id = :user_id_1 ORDER BY addresses.id, "
- "address_types.id")
+ self.assert_compile(s,
+ "SELECT address_types_1.id, address_types_1.name, addresses.id, "
+ "addresses.user_id, "
+ "addresses.address_type_id, addresses.email_address FROM addresses "
+ "LEFT OUTER JOIN address_types address_types_1 "
+ "ON addresses.address_type_id = address_types_1.id WHERE "
+ "addresses.user_id = :user_id_1 ORDER BY addresses.id, "
+ "address_types.id")
def test_compound(self):
t1 = table('t1', column('c1'), column('c2'), column('c3'), )
@@ -295,8 +346,10 @@ create table test_schema.child(
create synonym test_schema.ptable for test_schema.parent;
create synonym test_schema.ctable for test_schema.child;
--- can't make a ref from local schema to the remote schema's table without this,
--- *and* cant give yourself a grant ! so we give it to public. ideas welcome.
+-- can't make a ref from local schema to the
+-- remote schema's table without this,
+-- *and* cant give yourself a grant !
+-- so we give it to public. ideas welcome.
grant references on test_schema.parent to public;
grant references on test_schema.child to public;
""".split(";"):
@@ -357,8 +410,12 @@ drop synonym test_schema.ptable;
parent = Table('parent', meta, autoload=True, schema='test_schema')
child = Table('child', meta, autoload=True, schema='test_schema')
- self.assert_compile(parent.join(child), "test_schema.parent JOIN test_schema.child ON test_schema.parent.id = test_schema.child.parent_id")
- select([parent, child]).select_from(parent.join(child)).execute().fetchall()
+ self.assert_compile(parent.join(child),
+ "test_schema.parent JOIN test_schema.child ON "
+ "test_schema.parent.id = test_schema.child.parent_id")
+ select([parent, child]).\
+ select_from(parent.join(child)).\
+ execute().fetchall()
def test_reflect_local_to_remote(self):
testing.db.execute("CREATE TABLE localtable "
@@ -368,8 +425,12 @@ drop synonym test_schema.ptable;
meta = MetaData(testing.db)
lcl = Table('localtable', meta, autoload=True)
parent = meta.tables['test_schema.parent']
- self.assert_compile(parent.join(lcl), "test_schema.parent JOIN localtable ON test_schema.parent.id = localtable.parent_id")
- select([parent, lcl]).select_from(parent.join(lcl)).execute().fetchall()
+ self.assert_compile(parent.join(lcl),
+ "test_schema.parent JOIN localtable ON "
+ "test_schema.parent.id = localtable.parent_id")
+ select([parent, lcl]).\
+ select_from(parent.join(lcl)).\
+ execute().fetchall()
finally:
testing.db.execute("DROP TABLE localtable")
@@ -378,7 +439,9 @@ drop synonym test_schema.ptable;
parent = Table('parent', meta, autoload=True, schema='test_schema')
child = Table('child', meta, autoload=True, schema='test_schema')
- self.assert_compile(parent.join(child), "test_schema.parent JOIN test_schema.child ON test_schema.parent.id = test_schema.child.parent_id")
+ self.assert_compile(parent.join(child),
+ "test_schema.parent JOIN test_schema.child ON "
+ "test_schema.parent.id = test_schema.child.parent_id")
select([parent, child]).select_from(parent.join(child)).execute().fetchall()
def test_reflect_alt_owner_synonyms(self):
@@ -389,44 +452,54 @@ drop synonym test_schema.ptable;
meta = MetaData(testing.db)
lcl = Table('localtable', meta, autoload=True, oracle_resolve_synonyms=True)
parent = meta.tables['test_schema.ptable']
- self.assert_compile(parent.join(lcl), "test_schema.ptable JOIN localtable ON test_schema.ptable.id = localtable.parent_id")
+ self.assert_compile(parent.join(lcl),
+ "test_schema.ptable JOIN localtable ON "
+ "test_schema.ptable.id = localtable.parent_id")
select([parent, lcl]).select_from(parent.join(lcl)).execute().fetchall()
finally:
testing.db.execute("DROP TABLE localtable")
def test_reflect_remote_synonyms(self):
meta = MetaData(testing.db)
- parent = Table('ptable', meta, autoload=True, schema='test_schema', oracle_resolve_synonyms=True)
- child = Table('ctable', meta, autoload=True, schema='test_schema', oracle_resolve_synonyms=True)
- self.assert_compile(parent.join(child), "test_schema.ptable JOIN test_schema.ctable ON test_schema.ptable.id = test_schema.ctable.parent_id")
+ parent = Table('ptable', meta, autoload=True,
+ schema='test_schema',
+ oracle_resolve_synonyms=True)
+ child = Table('ctable', meta, autoload=True,
+ schema='test_schema',
+ oracle_resolve_synonyms=True)
+ self.assert_compile(parent.join(child),
+ "test_schema.ptable JOIN test_schema.ctable ON "
+ "test_schema.ptable.id = test_schema.ctable.parent_id")
select([parent, child]).select_from(parent.join(child)).execute().fetchall()
class ConstraintTest(TestBase):
__only_on__ = 'oracle'
- def test_oracle_has_no_on_update_cascade(self):
- m = MetaData(testing.db)
+ def setup(self):
+ global metadata
+ metadata = MetaData(testing.db)
- foo = Table('foo', m,
+ foo = Table('foo', metadata,
Column('id', Integer, primary_key=True),
)
foo.create(checkfirst=True)
- try:
- bar = Table('bar', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer, ForeignKey('foo.id', onupdate="CASCADE"))
- )
- assert_raises(exc.SAWarning, bar.create)
+
+ def teardown(self):
+ metadata.drop_all()
- bat = Table('bat', m,
- Column('id', Integer, primary_key=True),
- Column('foo_id', Integer),
- ForeignKeyConstraint(['foo_id'], ['foo.id'], onupdate="CASCADE")
- )
- assert_raises(exc.SAWarning, bat.create)
-
- finally:
- m.drop_all()
+ def test_oracle_has_no_on_update_cascade(self):
+ bar = Table('bar', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer, ForeignKey('foo.id', onupdate="CASCADE"))
+ )
+ assert_raises(exc.SAWarning, bar.create)
+
+ bat = Table('bat', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('foo_id', Integer),
+ ForeignKeyConstraint(['foo_id'], ['foo.id'], onupdate="CASCADE")
+ )
+ assert_raises(exc.SAWarning, bat.create)
class TypesTest(TestBase, AssertsCompiledSQL):
@@ -434,8 +507,9 @@ class TypesTest(TestBase, AssertsCompiledSQL):
__dialect__ = oracle.OracleDialect()
def test_no_clobs_for_string_params(self):
- """test that simple string params get a DBAPI type of VARCHAR, not CLOB.
- this is to prevent setinputsizes from setting up cx_oracle.CLOBs on
+ """test that simple string params get a DBAPI type of
+ VARCHAR, not CLOB. This is to prevent setinputsizes
+ from setting up cx_oracle.CLOBs on
string-based bind params [ticket:793]."""
class FakeDBAPI(object):
@@ -497,7 +571,8 @@ class TypesTest(TestBase, AssertsCompiledSQL):
(NCHAR(), cx_oracle._OracleNVarChar),
(oracle.RAW(50), cx_oracle._OracleRaw),
]:
- assert isinstance(start.dialect_impl(dialect), test), "wanted %r got %r" % (test, start.dialect_impl(dialect))
+ assert isinstance(start.dialect_impl(dialect), test), \
+ "wanted %r got %r" % (test, start.dialect_impl(dialect))
@testing.requires.returning
def test_int_not_float(self):
@@ -516,14 +591,24 @@ class TypesTest(TestBase, AssertsCompiledSQL):
finally:
t1.drop()
- @testing.fails_on('+zxjdbc', 'Not yet known how to pass values of the INTERVAL type')
+ @testing.fails_on('+zxjdbc',
+ 'Not yet known how to pass values of the INTERVAL type')
def test_interval(self):
for type_, expected in [
(oracle.INTERVAL(), "INTERVAL DAY TO SECOND"),
- (oracle.INTERVAL(day_precision=3), "INTERVAL DAY(3) TO SECOND"),
- (oracle.INTERVAL(second_precision=5), "INTERVAL DAY TO SECOND(5)"),
- (oracle.INTERVAL(day_precision=2, second_precision=5), "INTERVAL DAY(2) TO SECOND(5)"),
+ (
+ oracle.INTERVAL(day_precision=3),
+ "INTERVAL DAY(3) TO SECOND"
+ ),
+ (
+ oracle.INTERVAL(second_precision=5),
+ "INTERVAL DAY TO SECOND(5)"
+ ),
+ (
+ oracle.INTERVAL(day_precision=2, second_precision=5),
+ "INTERVAL DAY(2) TO SECOND(5)"
+ ),
]:
self.assert_compile(type_, expected)
@@ -591,15 +676,39 @@ class TypesTest(TestBase, AssertsCompiledSQL):
finally:
t1.drop()
-
+ def test_reflect_dates(self):
+ metadata = MetaData(testing.db)
+ Table(
+ "date_types", metadata,
+ Column('d1', DATE),
+ Column('d2', TIMESTAMP),
+ Column('d3', TIMESTAMP(timezone=True)),
+ Column('d4', oracle.INTERVAL(second_precision=5)),
+ )
+ metadata.create_all()
+ try:
+ m = MetaData(testing.db)
+ t1 = Table(
+ "date_types", m,
+ autoload=True)
+ assert isinstance(t1.c.d1.type, DATE)
+ assert isinstance(t1.c.d2.type, TIMESTAMP)
+ assert not t1.c.d2.type.timezone
+ assert isinstance(t1.c.d3.type, TIMESTAMP)
+ assert t1.c.d3.type.timezone
+ assert isinstance(t1.c.d4.type, oracle.INTERVAL)
+
+ finally:
+ metadata.drop_all()
+
def test_reflect_raw(self):
- types_table = Table(
- 'all_types', MetaData(testing.db),
+ types_table = Table('all_types', MetaData(testing.db),
Column('owner', String(30), primary_key=True),
Column('type_name', String(30), primary_key=True),
autoload=True, oracle_resolve_synonyms=True
)
- [[row[k] for k in row.keys()] for row in types_table.select().execute().fetchall()]
+ for row in types_table.select().execute().fetchall():
+ [row[k] for k in row.keys()]
def test_reflect_nvarchar(self):
metadata = MetaData(testing.db)
@@ -626,7 +735,47 @@ class TypesTest(TestBase, AssertsCompiledSQL):
assert isinstance(res, unicode)
finally:
metadata.drop_all()
-
+
+ def test_char_length(self):
+ self.assert_compile(
+ VARCHAR(50),
+ "VARCHAR(50 CHAR)",
+ )
+
+ oracle8dialect = oracle.dialect()
+ oracle8dialect.supports_char_length = False
+ self.assert_compile(
+ VARCHAR(50),
+ "VARCHAR(50)",
+ dialect=oracle8dialect
+ )
+
+ self.assert_compile(
+ NVARCHAR(50),
+ "NVARCHAR2(50)",
+ )
+ self.assert_compile(
+ CHAR(50),
+ "CHAR(50)",
+ )
+ metadata = MetaData(testing.db)
+ t1 = Table('t1', metadata,
+ Column("c1", VARCHAR(50)),
+ Column("c2", NVARCHAR(250)),
+ Column("c3", CHAR(200))
+ )
+ t1.create()
+ try:
+ m2 = MetaData(testing.db)
+ t2 = Table('t1', m2, autoload=True)
+ eq_(t2.c.c1.type.length, 50)
+ eq_(t2.c.c2.type.length, 250)
+ eq_(t2.c.c3.type.length, 200)
+ finally:
+ t1.drop()
+
+
+
def test_longstring(self):
metadata = MetaData(testing.db)
testing.db.execute("""
@@ -651,7 +800,9 @@ class TypesTest(TestBase, AssertsCompiledSQL):
Column('data', Text), Column('bindata', LargeBinary))
t.create(engine)
try:
- engine.execute(t.insert(), id=1, data='this is text', bindata='this is binary')
+ engine.execute(t.insert(), id=1,
+ data='this is text',
+ bindata='this is binary')
row = engine.execute(t.select()).first()
eq_(row['data'].read(), 'this is text')
eq_(row['bindata'].read(), 'this is binary')
@@ -701,7 +852,9 @@ class BufferedColumnTest(TestBase, AssertsCompiledSQL):
Column('data', LargeBinary)
)
meta.create_all()
- stream = os.path.join(os.path.dirname(__file__), "..", 'binary_data_one.dat')
+ stream = os.path.join(
+ os.path.dirname(__file__), "..",
+ 'binary_data_one.dat')
stream = file(stream).read(12000)
for i in range(1, 11):
@@ -727,7 +880,9 @@ class UnsupportedIndexReflectTest(TestBase):
def setup(self):
global metadata
metadata = MetaData(testing.db)
- t1 = Table('test_index_reflect', metadata, Column('data', String(20), primary_key=True))
+ t1 = Table('test_index_reflect', metadata,
+ Column('data', String(20), primary_key=True)
+ )
metadata.create_all()
def teardown(self):
diff --git a/test/dialect/test_postgresql.py b/test/dialect/test_postgresql.py
index 472b12e50..1d39d5653 100644
--- a/test/dialect/test_postgresql.py
+++ b/test/dialect/test_postgresql.py
@@ -13,6 +13,7 @@ from sqlalchemy.test.util import round_decimal
from sqlalchemy.sql import table, column
from sqlalchemy.test.testing import eq_
from test.engine._base import TablesTest
+import logging
class SequenceTest(TestBase, AssertsCompiledSQL):
def test_basic(self):
@@ -395,9 +396,6 @@ class EnumTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
finally:
metadata.drop_all()
-
-
-
class InsertTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
@@ -964,7 +962,6 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults):
finally:
postgresql.PGDialect.ischema_names = ischema_names
-
class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
__only_on__ = 'postgresql'
@@ -1001,6 +998,29 @@ class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
]:
eq_(testing.db.dialect._get_server_version_info(MockConn(string)), version)
+
+ @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature')
+ def test_notice_logging(self):
+ log = logging.getLogger('sqlalchemy.dialects.postgresql')
+ buf = logging.handlers.BufferingHandler(100)
+ lev = log.level
+ log.addHandler(buf)
+ log.setLevel(logging.INFO)
+ try:
+ conn = testing.db.connect()
+ trans = conn.begin()
+ try:
+ conn.execute("create table foo (id serial primary key)")
+ finally:
+ trans.rollback()
+ finally:
+ log.removeHandler(buf)
+ log.setLevel(lev)
+
+ msgs = " ".join(b.msg for b in buf.buffer)
+ assert "will create implicit sequence" in msgs
+ assert "will create implicit index" in msgs
+
def test_pg_weirdchar_reflection(self):
meta1 = MetaData(testing.db)
@@ -1264,7 +1284,7 @@ class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
assert_raises(exception_cls, eng.execute, "show transaction isolation level")
-class TimezoneTest(TestBase, AssertsExecutionResults):
+class TimezoneTest(TestBase):
"""Test timezone-aware datetimes.
psycopg will return a datetime with a tzinfo attached to it, if postgresql
@@ -1292,6 +1312,7 @@ class TimezoneTest(TestBase, AssertsExecutionResults):
Column("name", String(20)),
)
metadata.create_all()
+
@classmethod
def teardown_class(cls):
metadata.drop_all()
@@ -1299,17 +1320,82 @@ class TimezoneTest(TestBase, AssertsExecutionResults):
def test_with_timezone(self):
# get a date with a tzinfo
somedate = testing.db.connect().scalar(func.current_timestamp().select())
+ assert somedate.tzinfo
+
tztable.insert().execute(id=1, name='row1', date=somedate)
- c = tztable.update(tztable.c.id==1).execute(name='newname')
- print tztable.select(tztable.c.id==1).execute().first()
+
+ row = select([tztable.c.date], tztable.c.id==1).execute().first()
+ eq_(row[0], somedate)
+ eq_(somedate.tzinfo.utcoffset(somedate), row[0].tzinfo.utcoffset(row[0]))
+
+ result = tztable.update(tztable.c.id==1).\
+ returning(tztable.c.date).execute(name='newname')
+ row = result.first()
+ assert row[0] >= somedate
def test_without_timezone(self):
# get a date without a tzinfo
- somedate = datetime.datetime(2005, 10,20, 11, 52, 00)
+ somedate = datetime.datetime(2005, 10, 20, 11, 52, 0)
+ assert not somedate.tzinfo
+
notztable.insert().execute(id=1, name='row1', date=somedate)
- c = notztable.update(notztable.c.id==1).execute(name='newname')
- print notztable.select(tztable.c.id==1).execute().first()
+ row = select([notztable.c.date], notztable.c.id==1).execute().first()
+ eq_(row[0], somedate)
+ eq_(row[0].tzinfo, None)
+
+ result = notztable.update(notztable.c.id==1).\
+ returning(notztable.c.date).execute(name='newname')
+ row = result.first()
+ assert row[0] >= somedate
+
+class TimePrecisionTest(TestBase, AssertsCompiledSQL):
+ __dialect__ = postgresql.dialect()
+
+ def test_compile(self):
+ for (type_, expected) in [
+ (postgresql.TIME(), "TIME WITHOUT TIME ZONE"),
+ (postgresql.TIME(precision=5), "TIME(5) WITHOUT TIME ZONE"),
+ (postgresql.TIME(timezone=True, precision=5), "TIME(5) WITH TIME ZONE"),
+ (postgresql.TIMESTAMP(), "TIMESTAMP WITHOUT TIME ZONE"),
+ (postgresql.TIMESTAMP(precision=5), "TIMESTAMP(5) WITHOUT TIME ZONE"),
+ (postgresql.TIMESTAMP(timezone=True, precision=5), "TIMESTAMP(5) WITH TIME ZONE"),
+ ]:
+ self.assert_compile(type_, expected)
+
+ @testing.only_on('postgresql', 'DB specific feature')
+ def test_reflection(self):
+ m1 = MetaData(testing.db)
+ t1 = Table('t1', m1,
+ Column('c1', postgresql.TIME()),
+ Column('c2', postgresql.TIME(precision=5)),
+ Column('c3', postgresql.TIME(timezone=True, precision=5)),
+ Column('c4', postgresql.TIMESTAMP()),
+ Column('c5', postgresql.TIMESTAMP(precision=5)),
+ Column('c6', postgresql.TIMESTAMP(timezone=True, precision=5)),
+
+ )
+ t1.create()
+ try:
+ m2 = MetaData(testing.db)
+ t2 = Table('t1', m2, autoload=True)
+ eq_(t2.c.c1.type.precision, None)
+ eq_(t2.c.c2.type.precision, 5)
+ eq_(t2.c.c3.type.precision, 5)
+ eq_(t2.c.c4.type.precision, None)
+ eq_(t2.c.c5.type.precision, 5)
+ eq_(t2.c.c6.type.precision, 5)
+ eq_(t2.c.c1.type.timezone, False)
+ eq_(t2.c.c2.type.timezone, False)
+ eq_(t2.c.c3.type.timezone, True)
+ eq_(t2.c.c4.type.timezone, False)
+ eq_(t2.c.c5.type.timezone, False)
+ eq_(t2.c.c6.type.timezone, True)
+ finally:
+ t1.drop()
+
+
+
class ArrayTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'
diff --git a/test/dialect/test_sqlite.py b/test/dialect/test_sqlite.py
index bb08fe341..7f5f553bd 100644
--- a/test/dialect/test_sqlite.py
+++ b/test/dialect/test_sqlite.py
@@ -260,7 +260,7 @@ class DialectTest(TestBase, AssertsExecutionResults):
try:
cx.execute('ATTACH DATABASE ":memory:" AS test_schema')
dialect = cx.dialect
- assert dialect.table_names(cx, 'test_schema') == []
+ assert dialect.get_table_names(cx, 'test_schema') == []
meta = MetaData(cx)
Table('created', meta, Column('id', Integer),
@@ -269,7 +269,7 @@ class DialectTest(TestBase, AssertsExecutionResults):
schema='test_schema')
meta.create_all(cx)
- eq_(dialect.table_names(cx, 'test_schema'),
+ eq_(dialect.get_table_names(cx, 'test_schema'),
['created'])
assert len(alt_master.c) > 0
@@ -293,7 +293,7 @@ class DialectTest(TestBase, AssertsExecutionResults):
# note that sqlite_master is cleared, above
meta.drop_all()
- assert dialect.table_names(cx, 'test_schema') == []
+ assert dialect.get_table_names(cx, 'test_schema') == []
finally:
cx.execute('DETACH DATABASE test_schema')
@@ -303,7 +303,7 @@ class DialectTest(TestBase, AssertsExecutionResults):
try:
cx.execute('CREATE TEMPORARY TABLE tempy (id INT)')
- assert 'tempy' in cx.dialect.table_names(cx, None)
+ assert 'tempy' in cx.dialect.get_table_names(cx, None)
meta = MetaData(cx)
tempy = Table('tempy', meta, autoload=True)
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index 93bcae654..6b0b187e6 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -81,7 +81,26 @@ class PoolTest(PoolTestBase):
expected = [(1,)]
for row in cursor:
eq_(row, expected.pop(0))
+
+ def test_no_connect_on_recreate(self):
+ def creator():
+ raise Exception("no creates allowed")
+
+ for cls in (pool.SingletonThreadPool, pool.StaticPool,
+ pool.QueuePool, pool.NullPool, pool.AssertionPool):
+ p = cls(creator=creator)
+ p.dispose()
+ p.recreate()
+ mock_dbapi = MockDBAPI()
+ p = cls(creator=mock_dbapi.connect)
+ conn = p.connect()
+ conn.close()
+ mock_dbapi.throw_error = True
+ p.dispose()
+ p.recreate()
+
+
def testthreadlocal_del(self):
self._do_testthreadlocal(useclose=False)
diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py
index ea1864c92..df51c322b 100644
--- a/test/ext/test_associationproxy.py
+++ b/test/ext/test_associationproxy.py
@@ -91,7 +91,7 @@ class _CollectionOperations(TestBase):
self.name = name
mapper(Parent, parents_table, properties={
- '_children': relationship(Child, lazy=False,
+ '_children': relationship(Child, lazy='joined',
collection_class=collection_class)})
mapper(Child, children_table)
@@ -654,7 +654,7 @@ class ProxyFactoryTest(ListTest):
self.name = name
mapper(Parent, parents_table, properties={
- '_children': relationship(Child, lazy=False,
+ '_children': relationship(Child, lazy='joined',
collection_class=list)})
mapper(Child, children_table)
@@ -701,7 +701,7 @@ class ScalarTest(TestBase):
setattr(self, attr, kw[attr])
mapper(Parent, parents_table, properties={
- 'child': relationship(Child, lazy=False,
+ 'child': relationship(Child, lazy='joined',
backref='parent', uselist=False)})
mapper(Child, children_table)
@@ -836,7 +836,7 @@ class LazyLoadTest(TestBase):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
- '_children': relationship(Child, lazy=True,
+ '_children': relationship(Child, lazy='select',
collection_class=list)})
p = Parent('p')
@@ -854,7 +854,7 @@ class LazyLoadTest(TestBase):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
- '_children': relationship(Child, lazy=False,
+ '_children': relationship(Child, lazy='joined',
collection_class=list)})
p = Parent('p')
@@ -869,7 +869,7 @@ class LazyLoadTest(TestBase):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
- '_children': relationship(Child, lazy=True, uselist=False)})
+ '_children': relationship(Child, lazy='select', uselist=False)})
p = Parent('p')
@@ -884,7 +884,7 @@ class LazyLoadTest(TestBase):
Parent, Child = self.Parent, self.Child
mapper(Parent, self.table, properties={
- '_children': relationship(Child, lazy=False, uselist=False)})
+ '_children': relationship(Child, lazy='joined', uselist=False)})
p = Parent('p')
diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py
index d625ae0ca..2d33b382a 100644
--- a/test/ext/test_compiler.py
+++ b/test/ext/test_compiler.py
@@ -176,6 +176,61 @@ class UserDefinedTest(TestBase, AssertsCompiledSQL):
"timezone('utc', current_timestamp)",
dialect=postgresql.dialect()
)
-
+
+ def test_subclasses_one(self):
+ class Base(FunctionElement):
+ name = 'base'
+
+ class Sub1(Base):
+ name = 'sub1'
+
+ class Sub2(Base):
+ name = 'sub2'
+
+ @compiles(Base)
+ def visit_base(element, compiler, **kw):
+ return element.name
+
+ @compiles(Sub1)
+ def visit_base(element, compiler, **kw):
+ return "FOO" + element.name
+
+ self.assert_compile(
+ select([Sub1(), Sub2()]),
+ 'SELECT FOOsub1, sub2',
+ use_default_dialect=True
+ )
+
+ def test_subclasses_two(self):
+ class Base(FunctionElement):
+ name = 'base'
+ class Sub1(Base):
+ name = 'sub1'
+
+ @compiles(Base)
+ def visit_base(element, compiler, **kw):
+ return element.name
+
+ class Sub2(Base):
+ name = 'sub2'
+
+ class SubSub1(Sub1):
+ name = 'subsub1'
+
+ self.assert_compile(
+ select([Sub1(), Sub2(), SubSub1()]),
+ 'SELECT sub1, sub2, subsub1',
+ use_default_dialect=True
+ )
+
+ @compiles(Sub1)
+ def visit_base(element, compiler, **kw):
+ return "FOO" + element.name
+
+ self.assert_compile(
+ select([Sub1(), Sub2(), SubSub1()]),
+ 'SELECT FOOsub1, sub2, FOOsubsub1',
+ use_default_dialect=True
+ )
\ No newline at end of file
diff --git a/test/ext/test_declarative.py b/test/ext/test_declarative.py
index aa04cdf11..01ce7a635 100644
--- a/test/ext/test_declarative.py
+++ b/test/ext/test_declarative.py
@@ -6,7 +6,9 @@ import sqlalchemy as sa
from sqlalchemy.test import testing
from sqlalchemy import MetaData, Integer, String, ForeignKey, ForeignKeyConstraint, asc, Index
from sqlalchemy.test.schema import Table, Column
-from sqlalchemy.orm import relationship, create_session, class_mapper, eagerload, compile_mappers, backref, clear_mappers, polymorphic_union, deferred
+from sqlalchemy.orm import relationship, create_session, class_mapper, \
+ joinedload, compile_mappers, backref, clear_mappers, \
+ polymorphic_union, deferred
from sqlalchemy.test.testing import eq_
from sqlalchemy.util import classproperty
@@ -75,7 +77,9 @@ class DeclarativeTest(DeclarativeTestBase):
__table__ = t
foo = Column(Integer, primary_key=True)
# can't specify new columns not already in the table
- assert_raises_message(sa.exc.ArgumentError, "Can't add additional column 'foo' when specifying __table__", go)
+ assert_raises_message(sa.exc.ArgumentError,
+ "Can't add additional column 'foo' when specifying __table__",
+ go)
# regular re-mapping works tho
class Bar(Base):
@@ -84,6 +88,33 @@ class DeclarativeTest(DeclarativeTestBase):
assert class_mapper(Bar).get_property('some_data').columns[0] is t.c.data
+ def test_difficult_class(self):
+ """test no getattr() errors with a customized class"""
+
+ # metaclass to mock the way zope.interface breaks getattr()
+ class BrokenMeta(type):
+ def __getattribute__(self, attr):
+ if attr == 'xyzzy':
+ raise AttributeError, 'xyzzy'
+ else:
+ return object.__getattribute__(self,attr)
+
+ # even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
+ # fails
+ class BrokenParent(object):
+ __metaclass__ = BrokenMeta
+ xyzzy = "magic"
+
+ # _as_declarative() inspects obj.__class__.__bases__
+ class User(BrokenParent,ComparableEntity):
+ __tablename__ = 'users'
+ id = Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True)
+ name = Column('name', String(50))
+
+ decl.instrument_declarative(User,{},Base.metadata)
+
+
def test_undefer_column_name(self):
# TODO: not sure if there was an explicit
# test for this elsewhere
@@ -406,7 +437,7 @@ class DeclarativeTest(DeclarativeTestBase):
sess.add(u1)
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).options(eagerload(User.addresses)).all(), [User(name='u1', addresses=[
+ eq_(sess.query(User).options(joinedload(User.addresses)).all(), [User(name='u1', addresses=[
Address(email='one'),
Address(email='two'),
])])
@@ -1883,6 +1914,24 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(obj.name,'testing')
eq_(obj.foo(),'bar1')
eq_(obj.baz,'fu')
+
+ def test_not_allowed(self):
+ class MyMixin:
+ foo = Column(Integer, ForeignKey('bar.id'))
+
+ def go():
+ class MyModel(Base, MyMixin):
+ __tablename__ = 'foo'
+
+ assert_raises(sa.exc.InvalidRequestError, go)
+
+ class MyRelMixin:
+ foo = relationship("Bar")
+ def go():
+ class MyModel(Base, MyRelMixin):
+ __tablename__ = 'foo'
+ assert_raises(sa.exc.InvalidRequestError, go)
+
def test_table_name_inherited(self):
@@ -1927,6 +1976,18 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(MyModel.__table__.name,'mymodel1')
+ def test_table_name_dependent_on_subclass(self):
+ class MyHistoryMixin:
+ @classproperty
+ def __tablename__(cls):
+ return cls.parent_name + '_changelog'
+
+ class MyModel(Base, MyHistoryMixin):
+ parent_name = 'foo'
+ id = Column(Integer, primary_key=True)
+
+ eq_(MyModel.__table__.name, 'foo_changelog')
+
def test_table_args_inherited(self):
class MyMixin:
diff --git a/test/orm/sharding/test_shard.py b/test/ext/test_horizontal_shard.py
index 2418dd8d4..b21768d1f 100644
--- a/test/orm/sharding/test_shard.py
+++ b/test/ext/test_horizontal_shard.py
@@ -2,7 +2,7 @@ import datetime, os
from sqlalchemy import *
from sqlalchemy import sql
from sqlalchemy.orm import *
-from sqlalchemy.orm.shard import ShardedSession
+from sqlalchemy.ext.horizontal_shard import ShardedSession
from sqlalchemy.sql import operators
from sqlalchemy.test import *
from sqlalchemy.test.testing import eq_
diff --git a/test/ext/test_orderinglist.py b/test/ext/test_orderinglist.py
index 479e44d63..96c1c90cf 100644
--- a/test/ext/test_orderinglist.py
+++ b/test/ext/test_orderinglist.py
@@ -77,7 +77,7 @@ class OrderingListTest(TestBase):
return '<Bullet "%s" pos %s>' % (self.text, self.position)
mapper(Slide, slides_table, properties={
- 'bullets': relationship(Bullet, lazy=False,
+ 'bullets': relationship(Bullet, lazy='joined',
collection_class=test_collection_class,
backref='slide',
order_by=[bullets_table.c.position])
diff --git a/test/ext/test_serializer.py b/test/ext/test_serializer.py
index f06677eb9..4e26e5b9a 100644
--- a/test/ext/test_serializer.py
+++ b/test/ext/test_serializer.py
@@ -6,7 +6,7 @@ from sqlalchemy.test import testing
from sqlalchemy import MetaData, Integer, String, ForeignKey, select, desc, func, util
from sqlalchemy.test.schema import Table
from sqlalchemy.test.schema import Column
-from sqlalchemy.orm import relationship, sessionmaker, scoped_session, class_mapper, mapper, eagerload, compile_mappers, aliased
+from sqlalchemy.orm import relationship, sessionmaker, scoped_session, class_mapper, mapper, joinedload, compile_mappers, aliased
from sqlalchemy.test.testing import eq_
from test.orm._base import ComparableEntity, MappedTest
@@ -97,7 +97,7 @@ class SerializeTest(MappedTest):
)
def test_query(self):
- q = Session.query(User).filter(User.name=='ed').options(eagerload(User.addresses))
+ q = Session.query(User).filter(User.name=='ed').options(joinedload(User.addresses))
eq_(q.all(), [User(name='ed', addresses=[Address(id=2), Address(id=3), Address(id=4)])])
q2 = serializer.loads(serializer.dumps(q, -1), users.metadata, Session)
diff --git a/test/orm/_fixtures.py b/test/orm/_fixtures.py
index e9d6ac165..a8df63b4a 100644
--- a/test/orm/_fixtures.py
+++ b/test/orm/_fixtures.py
@@ -378,5 +378,48 @@ class CannedResults(object):
keywords=[]),
Item(id=5,
keywords=[])]
+
+ @property
+ def user_item_keyword_result(self):
+ item1, item2, item3, item4, item5 = \
+ Item(id=1,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='big'),
+ Keyword(name='round')]),\
+ Item(id=2,
+ keywords=[
+ Keyword(name='red'),
+ Keyword(name='small'),
+ Keyword(name='square')]),\
+ Item(id=3,
+ keywords=[
+ Keyword(name='green'),
+ Keyword(name='big'),
+ Keyword(name='round')]),\
+ Item(id=4,
+ keywords=[]),\
+ Item(id=5,
+ keywords=[])
+
+ user_result = [
+ User(id=7,
+ orders=[
+ Order(id=1,
+ items=[item1, item2, item3]),
+ Order(id=3,
+ items=[item3, item4, item5]),
+ Order(id=5,
+ items=[item5])]),
+ User(id=8, orders=[]),
+ User(id=9,
+ orders=[
+ Order(id=2,
+ items=[item1, item2, item3]),
+ Order(id=4,
+ items=[item1, item5])]),
+ User(id=10, orders=[])]
+ return user_result
+
FixtureTest.static = CannedResults()
diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py
index 9477fddab..2f9295e17 100644
--- a/test/orm/inheritance/test_basic.py
+++ b/test/orm/inheritance/test_basic.py
@@ -383,8 +383,8 @@ class EagerLazyTest(_base.MappedTest):
foos = mapper(Foo, foo)
bars = mapper(Bar, bar, inherits=foos)
- bars.add_property('lazy', relationship(foos, bar_foo, lazy=True))
- bars.add_property('eager', relationship(foos, bar_foo, lazy=False))
+ bars.add_property('lazy', relationship(foos, bar_foo, lazy='select'))
+ bars.add_property('eager', relationship(foos, bar_foo, lazy='joined'))
foo.insert().execute(data='foo1')
bar.insert().execute(id=1, data='bar1')
@@ -455,7 +455,7 @@ class EagerTargetingTest(_base.MappedTest):
eq_(node.children[0], B(id=2, name='b2',b_data='l'))
sess.expunge_all()
- node = sess.query(B).options(eagerload(B.children)).filter(B.id==bid).all()[0]
+ node = sess.query(B).options(joinedload(B.children)).filter(B.id==bid).all()[0]
eq_(node, B(id=1, name='b1',b_data='i'))
eq_(node.children[0], B(id=2, name='b2',b_data='l'))
@@ -492,7 +492,7 @@ class FlushTest(_base.MappedTest):
class Admin(User):pass
role_mapper = mapper(Role, roles)
user_mapper = mapper(User, users, properties = {
- 'roles' : relationship(Role, secondary=user_roles, lazy=False)
+ 'roles' : relationship(Role, secondary=user_roles, lazy='joined')
}
)
admin_mapper = mapper(Admin, admins, inherits=user_mapper)
@@ -528,7 +528,7 @@ class FlushTest(_base.MappedTest):
role_mapper = mapper(Role, roles)
user_mapper = mapper(User, users, properties = {
- 'roles' : relationship(Role, secondary=user_roles, lazy=False)
+ 'roles' : relationship(Role, secondary=user_roles, lazy='joined')
}
)
diff --git a/test/orm/inheritance/test_concrete.py b/test/orm/inheritance/test_concrete.py
index be921b00b..a8fe06867 100644
--- a/test/orm/inheritance/test_concrete.py
+++ b/test/orm/inheritance/test_concrete.py
@@ -313,7 +313,7 @@ class ConcreteTest(_base.MappedTest):
self.assert_sql_count(testing.db, go, 2)
session.expunge_all()
def go():
- c2 = session.query(Company).options(eagerload(Company.employees)).get(c.id)
+ c2 = session.query(Company).options(joinedload(Company.employees)).get(c.id)
assert set([repr(x) for x in c2.employees]) == set(["Engineer Kurt knows how to hack", "Manager Tom knows how to manage things"])
self.assert_sql_count(testing.db, go, 1)
@@ -462,7 +462,7 @@ class PropertyInheritanceTest(_base.MappedTest):
def go():
eq_(
[C(many_a=[A(aname='a1'), B(bname='b1'), B(bname='b2')]), C(many_a=[A(aname='a2')])],
- sess.query(C).options(eagerload(C.many_a)).order_by(C.id).all(),
+ sess.query(C).options(joinedload(C.many_a)).order_by(C.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
diff --git a/test/orm/inheritance/test_magazine.py b/test/orm/inheritance/test_magazine.py
index 27f6e3753..36ac7f919 100644
--- a/test/orm/inheritance/test_magazine.py
+++ b/test/orm/inheritance/test_magazine.py
@@ -132,7 +132,7 @@ def generate_round_trip_test(use_unions=False, use_joins=False):
location_name_mapper = mapper(LocationName, location_name_table)
location_mapper = mapper(Location, location_table, properties = {
- 'issue': relationship(Issue, backref=backref('locations', lazy=False, cascade="all, delete-orphan")),
+ 'issue': relationship(Issue, backref=backref('locations', lazy='joined', cascade="all, delete-orphan")),
'_name': relationship(LocationName),
})
diff --git a/test/orm/inheritance/test_manytomany.py b/test/orm/inheritance/test_manytomany.py
index 1bdbe795b..8390e2a1b 100644
--- a/test/orm/inheritance/test_manytomany.py
+++ b/test/orm/inheritance/test_manytomany.py
@@ -56,7 +56,7 @@ class InheritTest(_base.MappedTest):
mapper(Group, groups, inherits=Principal, properties={
'users': relationship(User, secondary=user_group_map,
- lazy=True, backref="groups")
+ lazy='select', backref="groups")
})
g = Group(name="group1")
@@ -116,7 +116,7 @@ class InheritTest2(_base.MappedTest):
pass
mapper(Bar, bar, inherits=Foo, properties={
- 'foos': relationship(Foo, secondary=foo_bar, lazy=False)
+ 'foos': relationship(Foo, secondary=foo_bar, lazy='joined')
})
sess = create_session()
@@ -185,7 +185,7 @@ class InheritTest3(_base.MappedTest):
return "Bar id %d, data %s" % (self.id, self.data)
mapper(Bar, bar, inherits=Foo, properties={
- 'foos' :relationship(Foo, secondary=bar_foo, lazy=True)
+ 'foos' :relationship(Foo, secondary=bar_foo, lazy='select')
})
sess = create_session()
@@ -220,8 +220,8 @@ class InheritTest3(_base.MappedTest):
return "Blub id %d, data %s, bars %s, foos %s" % (self.id, self.data, repr([b for b in self.bars]), repr([f for f in self.foos]))
mapper(Blub, blub, inherits=Bar, properties={
- 'bars':relationship(Bar, secondary=blub_bar, lazy=False),
- 'foos':relationship(Foo, secondary=blub_foo, lazy=False),
+ 'bars':relationship(Bar, secondary=blub_bar, lazy='joined'),
+ 'foos':relationship(Foo, secondary=blub_foo, lazy='joined'),
})
sess = create_session()
diff --git a/test/orm/inheritance/test_poly_linked_list.py b/test/orm/inheritance/test_poly_linked_list.py
index bc954c47d..e16c95555 100644
--- a/test/orm/inheritance/test_poly_linked_list.py
+++ b/test/orm/inheritance/test_poly_linked_list.py
@@ -87,7 +87,7 @@ class PolymorphicCircularTest(_base.MappedTest):
# currently, the "eager" relationships degrade to lazy relationships
# due to the polymorphic load.
- # the "nxt" relationship used to have a "lazy=False" on it, but the EagerLoader raises the "self-referential"
+ # the "nxt" relationship used to have a "lazy='joined'" on it, but the EagerLoader raises the "self-referential"
# exception now. since eager loading would never work for that relationship anyway, its better that the user
# gets an exception instead of it silently not eager loading.
# NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into __next__() for some reason.
@@ -99,7 +99,7 @@ class PolymorphicCircularTest(_base.MappedTest):
'nxt': relationship(Table1,
backref=backref('prev', remote_side=table1.c.id, uselist=False),
uselist=False, primaryjoin=table1.c.id==table1.c.related_id),
- 'data':relationship(mapper(Data, data), lazy=False, order_by=data.c.id)
+ 'data':relationship(mapper(Data, data), lazy='joined', order_by=data.c.id)
},
order_by=table1.c.id
)
diff --git a/test/orm/inheritance/test_polymorph2.py b/test/orm/inheritance/test_polymorph2.py
index 94939b33c..9852e8b09 100644
--- a/test/orm/inheritance/test_polymorph2.py
+++ b/test/orm/inheritance/test_polymorph2.py
@@ -167,14 +167,14 @@ class RelationshipTest2(_base.MappedTest):
if usedata:
mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==managers.c.person_id, polymorphic_identity='manager',
properties={
- 'colleague':relationship(Person, primaryjoin=managers.c.manager_id==people.c.person_id, lazy=True, uselist=False),
+ 'colleague':relationship(Person, primaryjoin=managers.c.manager_id==people.c.person_id, lazy='select', uselist=False),
'data':relationship(Data, uselist=False)
}
)
else:
mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==managers.c.person_id, polymorphic_identity='manager',
properties={
- 'colleague':relationship(Person, primaryjoin=managers.c.manager_id==people.c.person_id, lazy=True, uselist=False)
+ 'colleague':relationship(Person, primaryjoin=managers.c.manager_id==people.c.person_id, lazy='select', uselist=False)
}
)
@@ -384,7 +384,7 @@ class RelationshipTest4(_base.MappedTest):
session.expunge_all()
def go():
- testcar = session.query(Car).options(eagerload('employee')).get(car1.car_id)
+ testcar = session.query(Car).options(joinedload('employee')).get(car1.car_id)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
@@ -407,7 +407,7 @@ class RelationshipTest4(_base.MappedTest):
# and now for the lightning round, eager !
def go():
- testcar = session.query(Car).options(eagerload('employee')).get(car1.car_id)
+ testcar = session.query(Car).options(joinedload('employee')).get(car1.car_id)
assert str(testcar.employee) == "Engineer E4, status X"
self.assert_sql_count(testing.db, go, 1)
@@ -462,7 +462,7 @@ class RelationshipTest5(_base.MappedTest):
person_mapper = mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
engineer_mapper = mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer')
manager_mapper = mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager')
- car_mapper = mapper(Car, cars, properties= {'manager':relationship(manager_mapper, lazy=False)})
+ car_mapper = mapper(Car, cars, properties= {'manager':relationship(manager_mapper, lazy='joined')})
sess = create_session()
car1 = Car()
@@ -505,7 +505,7 @@ class RelationshipTest6(_base.MappedTest):
# to parent.mapped_table
mapper(Manager, managers, inherits=Person, inherit_condition=people.c.person_id==managers.c.person_id,
properties={
- 'colleague':relationship(Manager, primaryjoin=managers.c.colleague_id==managers.c.person_id, lazy=True, uselist=False)
+ 'colleague':relationship(Manager, primaryjoin=managers.c.colleague_id==managers.c.person_id, lazy='select', uselist=False)
}
)
@@ -1049,7 +1049,7 @@ class InheritingEagerTest(_base.MappedTest):
self.label = label
mapper(Person, people, polymorphic_on=people.c._type,polymorphic_identity='person', properties={
- 'tags': relationship(Tag, secondary=peopleTags,backref='people', lazy=False)
+ 'tags': relationship(Tag, secondary=peopleTags,backref='people', lazy='joined')
})
mapper(Employee, employees, inherits=Person,polymorphic_identity='employee')
mapper(Tag, tags)
diff --git a/test/orm/inheritance/test_productspec.py b/test/orm/inheritance/test_productspec.py
index 450900355..dc81d9245 100644
--- a/test/orm/inheritance/test_productspec.py
+++ b/test/orm/inheritance/test_productspec.py
@@ -95,12 +95,12 @@ class InheritTest(_base.MappedTest):
master=relationship(Assembly,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id==products_table.c.product_id,
- lazy=True, backref=backref('specification'),
+ lazy='select', backref=backref('specification'),
uselist=False),
slave=relationship(Product,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- lazy=True, uselist=False),
+ lazy='select', uselist=False),
quantity=specification_table.c.quantity,
)
)
@@ -139,7 +139,7 @@ class InheritTest(_base.MappedTest):
slave=relationship(Product,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
- lazy=True, uselist=False),
+ lazy='select', uselist=False),
)
)
@@ -168,12 +168,12 @@ class InheritTest(_base.MappedTest):
specification_mapper = mapper(SpecLine, specification_table,
properties=dict(
- master=relationship(Assembly, lazy=False, uselist=False,
+ master=relationship(Assembly, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id==products_table.c.product_id,
backref=backref('specification', cascade="all, delete-orphan"),
),
- slave=relationship(Product, lazy=False, uselist=False,
+ slave=relationship(Product, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
),
@@ -187,7 +187,7 @@ class InheritTest(_base.MappedTest):
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
- product=relationship(Product, lazy=True, backref=backref('documents', cascade="all, delete-orphan")),
+ product=relationship(Product, lazy='select', backref=backref('documents', cascade="all, delete-orphan")),
),
)
raster_document_mapper = mapper(RasterDocument, inherits=document_mapper,
@@ -227,7 +227,7 @@ class InheritTest(_base.MappedTest):
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
- product=relationship(Product, lazy=True, backref=backref('documents', cascade="all, delete-orphan")),
+ product=relationship(Product, lazy='select', backref=backref('documents', cascade="all, delete-orphan")),
),
)
raster_document_mapper = mapper(RasterDocument, inherits=document_mapper,
@@ -260,12 +260,12 @@ class InheritTest(_base.MappedTest):
specification_mapper = mapper(SpecLine, specification_table,
properties=dict(
- master=relationship(Assembly, lazy=False, uselist=False,
+ master=relationship(Assembly, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id==products_table.c.product_id,
backref=backref('specification'),
),
- slave=relationship(Product, lazy=False, uselist=False,
+ slave=relationship(Product, lazy='joined', uselist=False,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id==products_table.c.product_id,
),
@@ -276,7 +276,7 @@ class InheritTest(_base.MappedTest):
product_mapper = mapper(Product, products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity='product', properties={
- 'documents' : relationship(Document, lazy=True,
+ 'documents' : relationship(Document, lazy='select',
backref='product', cascade='all, delete-orphan'),
})
diff --git a/test/orm/inheritance/test_query.py b/test/orm/inheritance/test_query.py
index 83297e165..9e944ca6f 100644
--- a/test/orm/inheritance/test_query.py
+++ b/test/orm/inheritance/test_query.py
@@ -185,20 +185,40 @@ def _produce_test(select_type):
eq_(sess.query(Person).all(), all_employees)
self.assert_sql_count(testing.db, go, {'':14, 'Polymorphic':9}.get(select_type, 10))
+ def test_foo(self):
+ sess = create_session()
+
+ def go():
+ eq_(sess.query(Person).options(subqueryload(Engineer.machines)).all(), all_employees)
+ self.assert_sql_count(testing.db, go, {'':14, 'Unions':8, 'Polymorphic':7}.get(select_type, 8))
+
def test_primary_eager_aliasing(self):
sess = create_session()
+
+ # for both joinedload() and subqueryload(), if the original q is not loading
+ # the subclass table, the joinedload doesn't happen.
def go():
- eq_(sess.query(Person).options(eagerload(Engineer.machines))[1:3], all_employees[1:3])
+ eq_(sess.query(Person).options(joinedload(Engineer.machines))[1:3], all_employees[1:3])
self.assert_sql_count(testing.db, go, {'':6, 'Polymorphic':3}.get(select_type, 4))
sess = create_session()
+
+ def go():
+ eq_(sess.query(Person).options(subqueryload(Engineer.machines)).all(), all_employees)
+ self.assert_sql_count(testing.db, go, {'':14, 'Unions':8, 'Polymorphic':7}.get(select_type, 8))
+
+ sess = create_session()
# assert the JOINs dont over JOIN
- assert sess.query(Person).with_polymorphic('*').options(eagerload(Engineer.machines)).limit(2).offset(1).with_labels().subquery().count().scalar() == 2
+ assert sess.query(Person).with_polymorphic('*').options(joinedload(Engineer.machines)).\
+ limit(2).offset(1).with_labels().subquery().count().scalar() == 2
def go():
- eq_(sess.query(Person).with_polymorphic('*').options(eagerload(Engineer.machines))[1:3], all_employees[1:3])
+ eq_(
+ sess.query(Person).with_polymorphic('*').
+ options(joinedload(Engineer.machines))[1:3],
+ all_employees[1:3])
self.assert_sql_count(testing.db, go, 3)
@@ -467,7 +487,9 @@ def _produce_test(select_type):
def test_relationship_to_polymorphic(self):
assert_result = [
Company(name="MegaCorp, Inc.", employees=[
- Engineer(name="dilbert", engineer_name="dilbert", primary_language="java", status="regular engineer", machines=[Machine(name="IBM ThinkPad"), Machine(name="IPhone")]),
+ Engineer(name="dilbert", engineer_name="dilbert",
+ primary_language="java", status="regular engineer",
+ machines=[Machine(name="IBM ThinkPad"), Machine(name="IPhone")]),
Engineer(name="wally", engineer_name="wally", primary_language="c++", status="regular engineer"),
Boss(name="pointy haired boss", golf_swing="fore", manager_name="pointy", status="da boss"),
Manager(name="dogbert", manager_name="dogbert", status="regular manager"),
@@ -486,23 +508,56 @@ def _produce_test(select_type):
sess = create_session()
def go():
- # currently, it doesn't matter if we say Company.employees, or Company.employees.of_type(Engineer). eagerloader doesn't
+ # currently, it doesn't matter if we say Company.employees,
+ # or Company.employees.of_type(Engineer). joinedloader doesn't
# pick up on the "of_type()" as of yet.
- eq_(sess.query(Company).options(eagerload_all(Company.employees.of_type(Engineer), Engineer.machines)).all(), assert_result)
+ eq_(
+ sess.query(Company).options(
+ joinedload_all(Company.employees.of_type(Engineer), Engineer.machines
+ )).all(),
+ assert_result)
+
+ # in the case of select_type='', the joinedload
+ # doesn't take in this case; it joinedloads company->people,
+ # then a load for each of 5 rows, then lazyload of "machines"
+ self.assert_sql_count(testing.db, go,
+ {'':7, 'Polymorphic':1}.get(select_type, 2)
+ )
- # in the case of select_type='', the eagerload doesn't take in this case;
- # it eagerloads company->people, then a load for each of 5 rows, then lazyload of "machines"
- self.assert_sql_count(testing.db, go, {'':7, 'Polymorphic':1}.get(select_type, 2))
+ sess = create_session()
+ def go():
+ eq_(
+ sess.query(Company).options(
+ subqueryload_all(Company.employees.of_type(Engineer), Engineer.machines
+ )).all(),
+ assert_result)
+
+ self.assert_sql_count(
+ testing.db, go,
+ {'':8,
+ 'Joins':4,
+ 'Unions':4,
+ 'Polymorphic':3,
+ 'AliasedJoins':4}[select_type]
+ )
- def test_eagerload_on_subclass(self):
+ def test_joinedload_on_subclass(self):
sess = create_session()
def go():
- # test load People with eagerload to engineers + machines
- eq_(sess.query(Person).with_polymorphic('*').options(eagerload(Engineer.machines)).filter(Person.name=='dilbert').all(),
+ # test load People with joinedload to engineers + machines
+ eq_(sess.query(Person).with_polymorphic('*').options(joinedload(Engineer.machines)).filter(Person.name=='dilbert').all(),
[Engineer(name="dilbert", engineer_name="dilbert", primary_language="java", status="regular engineer", machines=[Machine(name="IBM ThinkPad"), Machine(name="IPhone")])]
)
self.assert_sql_count(testing.db, go, 1)
+ sess = create_session()
+ def go():
+ # test load People with subqueryload to engineers + machines
+ eq_(sess.query(Person).with_polymorphic('*').options(subqueryload(Engineer.machines)).filter(Person.name=='dilbert').all(),
+ [Engineer(name="dilbert", engineer_name="dilbert", primary_language="java", status="regular engineer", machines=[Machine(name="IBM ThinkPad"), Machine(name="IPhone")])]
+ )
+ self.assert_sql_count(testing.db, go, 2)
+
def test_query_subclass_join_to_base_relationship(self):
sess = create_session()
@@ -1128,7 +1183,7 @@ class SelfReferentialM2MTest(_base.MappedTest, AssertsCompiledSQL):
session.add(c1)
session.flush()
- q = session.query(Child1).options(eagerload('left_child2'))
+ q = session.query(Child1).options(joinedload('left_child2'))
# test that the splicing of the join works here, doesnt break in the middle of "parent join child1"
self.assert_compile(q.limit(1).with_labels().statement,
@@ -1146,9 +1201,21 @@ class SelfReferentialM2MTest(_base.MappedTest, AssertsCompiledSQL):
assert q.limit(1).with_labels().subquery().count().scalar() == 1
assert q.first() is c1
-
+
+ def test_subquery_load(self):
+ session = create_session()
+
+ c1 = Child1()
+ c1.left_child2 = Child2()
+ session.add(c1)
+ session.flush()
+ session.expunge_all()
+
+ for row in session.query(Child1).options(subqueryload('left_child2')).all():
+ assert row.left_child2
+
class EagerToSubclassTest(_base.MappedTest):
- """Test eagerloads to subclass mappers"""
+ """Test joinedloads to subclass mappers"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
@@ -1205,11 +1272,11 @@ class EagerToSubclassTest(_base.MappedTest):
sess.flush()
@testing.resolve_artifact_names
- def test_eagerload(self):
+ def test_joinedload(self):
sess = create_session()
def go():
eq_(
- sess.query(Parent).options(eagerload(Parent.children)).all(),
+ sess.query(Parent).options(joinedload(Parent.children)).all(),
[
Parent(data='p1', children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')]),
Parent(data='p2', children=[Sub(data='s4'), Sub(data='s5')])
@@ -1231,8 +1298,8 @@ class EagerToSubclassTest(_base.MappedTest):
)
self.assert_sql_count(testing.db, go, 1)
-class SubClassEagerToSubclassTest(_base.MappedTest):
- """Test eagerloads from subclass to subclass mappers"""
+class SubClassEagerToSubClassTest(_base.MappedTest):
+ """Test joinedloads from subclass to subclass mappers"""
run_setup_classes = 'once'
run_setup_mappers = 'once'
@@ -1282,7 +1349,7 @@ class SubClassEagerToSubclassTest(_base.MappedTest):
def setup_mappers(cls):
mapper(Parent, parent, polymorphic_on=parent.c.type, polymorphic_identity='b')
mapper(Subparent, subparent, inherits=Parent, polymorphic_identity='s', properties={
- 'children':relationship(Sub)
+ 'children':relationship(Sub, order_by=base.c.id)
})
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='b')
mapper(Sub, sub, inherits=Base, polymorphic_identity='s')
@@ -1298,11 +1365,11 @@ class SubClassEagerToSubclassTest(_base.MappedTest):
sess.flush()
@testing.resolve_artifact_names
- def test_eagerload(self):
+ def test_joinedload(self):
sess = create_session()
def go():
eq_(
- sess.query(Subparent).options(eagerload(Subparent.children)).all(),
+ sess.query(Subparent).options(joinedload(Subparent.children)).all(),
[
Subparent(data='p1', children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')]),
Subparent(data='p2', children=[Sub(data='s4'), Sub(data='s5')])
@@ -1313,7 +1380,7 @@ class SubClassEagerToSubclassTest(_base.MappedTest):
sess.expunge_all()
def go():
eq_(
- sess.query(Subparent).options(eagerload("children")).all(),
+ sess.query(Subparent).options(joinedload("children")).all(),
[
Subparent(data='p1', children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')]),
Subparent(data='p2', children=[Sub(data='s4'), Sub(data='s5')])
@@ -1345,4 +1412,27 @@ class SubClassEagerToSubclassTest(_base.MappedTest):
)
self.assert_sql_count(testing.db, go, 1)
+ @testing.resolve_artifact_names
+ def test_subqueryload(self):
+ sess = create_session()
+ def go():
+ eq_(
+ sess.query(Subparent).options(subqueryload(Subparent.children)).all(),
+ [
+ Subparent(data='p1', children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')]),
+ Subparent(data='p2', children=[Sub(data='s4'), Sub(data='s5')])
+ ]
+ )
+ self.assert_sql_count(testing.db, go, 2)
+
+ sess.expunge_all()
+ def go():
+ eq_(
+ sess.query(Subparent).options(subqueryload("children")).all(),
+ [
+ Subparent(data='p1', children=[Sub(data='s1'), Sub(data='s2'), Sub(data='s3')]),
+ Subparent(data='p2', children=[Sub(data='s4'), Sub(data='s5')])
+ ]
+ )
+ self.assert_sql_count(testing.db, go, 2)
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index f54044d64..4b7078eb5 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -286,7 +286,7 @@ class RelationshipToSingleTest(MappedTest):
# eager load join should limit to only "Engineer"
sess.expunge_all()
- eq_(sess.query(Company).options(eagerload('engineers')).order_by(Company.name).all(),
+ eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(),
[
Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
Company(name='c2', engineers=[Engineer(name='Kurt')])
diff --git a/test/orm/sharding/__init__.py b/test/orm/sharding/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/test/orm/sharding/__init__.py
+++ /dev/null
diff --git a/test/orm/test_association.py b/test/orm/test_association.py
index 64915942c..c9b1584bb 100644
--- a/test/orm/test_association.py
+++ b/test/orm/test_association.py
@@ -55,7 +55,7 @@ class AssociationTest(_base.MappedTest):
mapper(Keyword, keywords)
mapper(KeywordAssociation, item_keywords, properties={
- 'keyword':relationship(Keyword, lazy=False)},
+ 'keyword':relationship(Keyword, lazy='joined')},
primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
order_by=[item_keywords.c.data])
diff --git a/test/orm/test_assorted_eager.py b/test/orm/test_assorted_eager.py
index 7fa3b6b8a..20736b8fe 100644
--- a/test/orm/test_assorted_eager.py
+++ b/test/orm/test_assorted_eager.py
@@ -124,7 +124,7 @@ class EagerTest(_base.MappedTest):
eq_(result, [(1, u'Some Category'), (3, u'Some Category')])
@testing.resolve_artifact_names
- def test_withouteagerload(self):
+ def test_withoutjoinedload(self):
s = create_session()
l = (s.query(Thing).
select_from(tests.outerjoin(options,
@@ -139,15 +139,15 @@ class EagerTest(_base.MappedTest):
eq_(result, [u'1 Some Category', u'3 Some Category'])
@testing.resolve_artifact_names
- def test_witheagerload(self):
+ def test_withjoinedload(self):
"""
- Test that an eagerload locates the correct "from" clause with which to
+ Test that an joinedload locates the correct "from" clause with which to
attach to, when presented with a query that already has a complicated
from clause.
"""
s = create_session()
- q=s.query(Thing).options(sa.orm.eagerload('category'))
+ q=s.query(Thing).options(sa.orm.joinedload('category'))
l=(q.select_from(tests.outerjoin(options,
sa.and_(tests.c.id ==
@@ -163,9 +163,9 @@ class EagerTest(_base.MappedTest):
@testing.resolve_artifact_names
def test_dslish(self):
- """test the same as witheagerload except using generative"""
+ """test the same as withjoinedload except using generative"""
s = create_session()
- q = s.query(Thing).options(sa.orm.eagerload('category'))
+ q = s.query(Thing).options(sa.orm.joinedload('category'))
l = q.filter (
sa.and_(tests.c.owner_id == 1,
sa.or_(options.c.someoption == None,
@@ -179,7 +179,7 @@ class EagerTest(_base.MappedTest):
@testing.resolve_artifact_names
def test_without_outerjoin_literal(self):
s = create_session()
- q = s.query(Thing).options(sa.orm.eagerload('category'))
+ q = s.query(Thing).options(sa.orm.joinedload('category'))
l = (q.filter(
(tests.c.owner_id==1) &
('options.someoption is null or options.someoption=%s' % false)).
@@ -191,7 +191,7 @@ class EagerTest(_base.MappedTest):
@testing.resolve_artifact_names
def test_withoutouterjoin(self):
s = create_session()
- q = s.query(Thing).options(sa.orm.eagerload('category'))
+ q = s.query(Thing).options(sa.orm.joinedload('category'))
l = q.filter(
(tests.c.owner_id==1) &
((options.c.someoption==None) | (options.c.someoption==False))
@@ -238,11 +238,11 @@ class EagerTest2(_base.MappedTest):
mapper(Right, right)
mapper(Middle, middle, properties=dict(
left=relationship(Left,
- lazy=False,
- backref=backref('middle',lazy=False)),
+ lazy='joined',
+ backref=backref('middle',lazy='joined')),
right=relationship(Right,
- lazy=False,
- backref=backref('middle', lazy=False)))),
+ lazy='joined',
+ backref=backref('middle', lazy='joined')))),
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
@@ -337,7 +337,7 @@ class EagerTest3(_base.MappedTest):
# now query for Data objects using that above select, adding the
# "order by max desc" separately
q = (session.query(Data).
- options(sa.orm.eagerload('foo')).
+ options(sa.orm.joinedload('foo')).
select_from(datas.join(arb_data, arb_data.c.data_id == datas.c.id)).
order_by(sa.desc(arb_data.c.max)).
limit(10))
@@ -375,7 +375,7 @@ class EagerTest4(_base.MappedTest):
mapper(Employee, employees)
mapper(Department, departments, properties=dict(
employees=relationship(Employee,
- lazy=False,
+ lazy='joined',
backref='department')))
d1 = Department(name='One')
@@ -451,7 +451,7 @@ class EagerTest5(_base.MappedTest):
commentMapper = mapper(Comment, comments)
baseMapper = mapper(Base, base, properties=dict(
- comments=relationship(Comment, lazy=False,
+ comments=relationship(Comment, lazy='joined',
cascade='all, delete-orphan')))
mapper(Derived, derived, inherits=baseMapper)
@@ -528,7 +528,7 @@ class EagerTest6(_base.MappedTest):
p_m = mapper(Part, parts)
mapper(InheritedPart, inherited_part, properties=dict(
- part=relationship(Part, lazy=False)))
+ part=relationship(Part, lazy='joined')))
d_m = mapper(Design, design, properties=dict(
inheritedParts=relationship(InheritedPart,
@@ -538,11 +538,11 @@ class EagerTest6(_base.MappedTest):
mapper(DesignType, design_types)
d_m.add_property(
- "type", relationship(DesignType, lazy=False, backref="designs"))
+ "type", relationship(DesignType, lazy='joined', backref="designs"))
p_m.add_property(
"design", relationship(
- Design, lazy=False,
+ Design, lazy='joined',
backref=backref("parts", cascade="all, delete-orphan")))
@@ -613,10 +613,10 @@ class EagerTest7(_base.MappedTest):
mapper(Address, addresses)
mapper(Company, companies, properties={
- 'addresses' : relationship(Address, lazy=False)})
+ 'addresses' : relationship(Address, lazy='joined')})
mapper(Invoice, invoices, properties={
- 'company': relationship(Company, lazy=False)})
+ 'company': relationship(Company, lazy='joined')})
a1 = Address(address='a1 address')
a2 = Address(address='a2 address')
@@ -646,19 +646,19 @@ class EagerTest7(_base.MappedTest):
mapper(Phone, phone_numbers)
mapper(Address, addresses, properties={
- 'phones': relationship(Phone, lazy=False, backref='address',
+ 'phones': relationship(Phone, lazy='joined', backref='address',
order_by=phone_numbers.c.phone_id)})
mapper(Company, companies, properties={
- 'addresses': relationship(Address, lazy=False, backref='company',
+ 'addresses': relationship(Address, lazy='joined', backref='company',
order_by=addresses.c.address_id)})
mapper(Item, items)
mapper(Invoice, invoices, properties={
- 'items': relationship(Item, lazy=False, backref='invoice',
+ 'items': relationship(Item, lazy='joined', backref='invoice',
order_by=items.c.item_id),
- 'company': relationship(Company, lazy=False, backref='invoices')})
+ 'company': relationship(Company, lazy='joined', backref='invoices')})
c1 = Company(company_name='company 1', addresses=[
Address(address='a1 address',
@@ -776,7 +776,7 @@ class EagerTest8(_base.MappedTest):
jjj = sa.join(task, jj, task.c.id == jj.c.task_id)
mapper(Joined, jjj, properties=dict(
- type=relationship(Task_Type, lazy=False)))
+ type=relationship(Task_Type, lazy='joined')))
session = create_session()
@@ -831,16 +831,16 @@ class EagerTest9(_base.MappedTest):
mapper(Entry, entries, properties=dict(
account=relationship(Account,
uselist=False,
- backref=backref('entries', lazy=True,
+ backref=backref('entries', lazy='select',
order_by=entries.c.entry_id)),
transaction=relationship(Transaction,
uselist=False,
- backref=backref('entries', lazy=False,
+ backref=backref('entries', lazy='joined',
order_by=entries.c.entry_id))))
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
- def test_eagerload_on_path(self):
+ def test_joinedload_on_path(self):
session = create_session()
tx1 = Transaction(name='tx1')
@@ -864,7 +864,7 @@ class EagerTest9(_base.MappedTest):
# "accounts" off the immediate "entries"; only the "accounts" off
# the entries->transaction->entries
acc = (session.query(Account).
- options(sa.orm.eagerload_all('entries.transaction.entries.account')).
+ options(sa.orm.joinedload_all('entries.transaction.entries.account')).
order_by(Account.account_id)).first()
# no sql occurs
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index 2725d7b0e..a7152ecc1 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -403,7 +403,7 @@ class M2OCascadeTest(_base.MappedTest):
extra = relationship(Extra, cascade="all, delete")
))
mapper(User, users, properties = dict(
- pref = relationship(Pref, lazy=False, cascade="all, delete-orphan", single_parent=True ),
+ pref = relationship(Pref, lazy='joined', cascade="all, delete-orphan", single_parent=True ),
foo = relationship(Foo) # straight m2o
))
mapper(Foo, foo)
diff --git a/test/orm/test_compile.py b/test/orm/test_compile.py
index 9d70ece16..101e4143a 100644
--- a/test/orm/test_compile.py
+++ b/test/orm/test_compile.py
@@ -52,17 +52,17 @@ class CompileTest(_base.ORMTest):
polymorphic_on=order_join.c.type,
polymorphic_identity='order',
properties={
- 'orderproducts': relationship(OrderProduct, lazy=True, backref='order')}
+ 'orderproducts': relationship(OrderProduct, lazy='select', backref='order')}
)
mapper(Product, product,
properties={
- 'orderproducts': relationship(OrderProduct, lazy=True, backref='product')}
+ 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
)
mapper(Employee, employee,
properties={
- 'orders': relationship(Order, lazy=True, backref='employee')})
+ 'orders': relationship(Order, lazy='select', backref='employee')})
mapper(OrderProduct, orderproduct)
@@ -105,12 +105,12 @@ class CompileTest(_base.ORMTest):
polymorphic_on=order_join.c.type,
polymorphic_identity='order',
properties={
- 'orderproducts': relationship(OrderProduct, lazy=True, backref='product')}
+ 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
)
mapper(Product, product,
properties={
- 'orderproducts': relationship(OrderProduct, lazy=True, backref='product')}
+ 'orderproducts': relationship(OrderProduct, lazy='select', backref='product')}
)
mapper(OrderProduct, orderproduct)
diff --git a/test/orm/test_cycles.py b/test/orm/test_cycles.py
index fa233f681..c197cb627 100644
--- a/test/orm/test_cycles.py
+++ b/test/orm/test_cycles.py
@@ -47,7 +47,7 @@ class SelfReferentialTest(_base.MappedTest):
'parent':relationship(C1,
primaryjoin=t1.c.parent_c1 == t1.c.c1,
remote_side=t1.c.c1,
- lazy=True,
+ lazy='select',
uselist=False)})
a = C1('head c1')
a.c1s.append(C1('another c1'))
@@ -755,7 +755,7 @@ class SelfReferentialPostUpdateTest(_base.MappedTest):
'children': relationship(
Node,
primaryjoin=node.c.id==node.c.parent_id,
- lazy=True,
+ lazy='select',
cascade="all",
backref=backref("parent", remote_side=node.c.id)
),
@@ -763,13 +763,13 @@ class SelfReferentialPostUpdateTest(_base.MappedTest):
Node,
primaryjoin=node.c.prev_sibling_id==node.c.id,
remote_side=node.c.id,
- lazy=True,
+ lazy='select',
uselist=False),
'next_sibling': relationship(
Node,
primaryjoin=node.c.next_sibling_id==node.c.id,
remote_side=node.c.id,
- lazy=True,
+ lazy='select',
uselist=False,
post_update=True)})
diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py
index 0411a22c8..4c30b3bd9 100644
--- a/test/orm/test_eager_relations.py
+++ b/test/orm/test_eager_relations.py
@@ -3,7 +3,7 @@
from sqlalchemy.test.testing import eq_, is_, is_not_
import sqlalchemy as sa
from sqlalchemy.test import testing
-from sqlalchemy.orm import eagerload, deferred, undefer, eagerload_all, backref
+from sqlalchemy.orm import joinedload, deferred, undefer, joinedload_all, backref
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, func
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, lazyload, aliased
@@ -20,7 +20,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=False, order_by=Address.id)
+ 'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=Address.id)
})
sess = create_session()
q = sess.query(User)
@@ -40,7 +40,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
def go():
eq_(
[User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])],
- sess.query(User).options(eagerload('addresses')).filter(User.id==7).all()
+ sess.query(User).options(joinedload('addresses')).filter(User.id==7).all()
)
self.assert_sql_count(testing.db, go, 1)
@@ -48,20 +48,24 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_no_orphan(self):
"""An eagerly loaded child object is not marked as an orphan"""
+
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade="all,delete-orphan", lazy=False)
+ 'addresses':relationship(Address, cascade="all,delete-orphan", lazy='joined')
})
mapper(Address, addresses)
sess = create_session()
user = sess.query(User).get(7)
- assert getattr(User, 'addresses').hasparent(sa.orm.attributes.instance_state(user.addresses[0]), optimistic=True)
- assert not sa.orm.class_mapper(Address)._is_orphan(sa.orm.attributes.instance_state(user.addresses[0]))
+ assert getattr(User, 'addresses').\
+ hasparent(sa.orm.attributes.instance_state(user.addresses[0]), optimistic=True)
+ assert not sa.orm.class_mapper(Address).\
+ _is_orphan(sa.orm.attributes.instance_state(user.addresses[0]))
@testing.resolve_artifact_names
def test_orderby(self):
mapper(User, users, properties = {
- 'addresses':relationship(mapper(Address, addresses), lazy=False, order_by=addresses.c.email_address),
+ 'addresses':relationship(mapper(Address, addresses),
+ lazy='joined', order_by=addresses.c.email_address),
})
q = create_session().query(User)
eq_([
@@ -82,7 +86,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_orderby_multi(self):
mapper(User, users, properties = {
- 'addresses':relationship(mapper(Address, addresses), lazy=False, order_by=[addresses.c.email_address, addresses.c.id]),
+ 'addresses':relationship(mapper(Address, addresses),
+ lazy='joined',
+ order_by=[addresses.c.email_address, addresses.c.id]),
})
q = create_session().query(User)
eq_([
@@ -102,10 +108,12 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_orderby_related(self):
- """A regular mapper select on a single table can order by a relationship to a second table"""
+ """A regular mapper select on a single table can
+ order by a relationship to a second table"""
+
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=False, order_by=addresses.c.id),
+ addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
))
q = create_session().query(User)
@@ -129,7 +137,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
def test_orderby_desc(self):
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=False,
+ addresses = relationship(Address, lazy='joined',
order_by=[sa.desc(addresses.c.email_address)]),
))
sess = create_session()
@@ -150,14 +158,9 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_deferred_fk_col(self):
- User, Address, Dingaling = self.classes.get_all(
- 'User', 'Address', 'Dingaling')
- users, addresses, dingalings = self.tables.get_all(
- 'users', 'addresses', 'dingalings')
-
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
- 'user':relationship(User, lazy=False)
+ 'user':relationship(User, lazy='joined')
})
mapper(User, users)
@@ -200,7 +203,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
'user_id':deferred(addresses.c.user_id),
})
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy=False)})
+ 'addresses':relationship(Address, lazy='joined')})
for q in [
sess.query(User).filter(User.id==7),
@@ -222,10 +225,10 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
sa.orm.clear_mappers()
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy=False)})
+ 'addresses':relationship(Address, lazy='joined')})
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
- 'dingalings':relationship(Dingaling, lazy=False)})
+ 'dingalings':relationship(Dingaling, lazy='joined')})
mapper(Dingaling, dingalings, properties={
'address_id':deferred(dingalings.c.address_id)})
sess.expunge_all()
@@ -239,15 +242,56 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
self.assert_sql_count(testing.db, go, 1)
@testing.resolve_artifact_names
+ def test_options_pathing(self):
+ mapper(User, users, properties={
+ 'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o
+ })
+ mapper(Order, orders, properties={
+ 'items':relationship(Item,
+ secondary=order_items, order_by=items.c.id), #m2m
+ })
+ mapper(Item, items, properties={
+ 'keywords':relationship(Keyword,
+ secondary=item_keywords,
+ order_by=keywords.c.id) #m2m
+ })
+ mapper(Keyword, keywords)
+
+ for opt, count in [
+ ((
+ joinedload(User.orders, Order.items),
+ ), 10),
+ ((joinedload("orders.items"), ), 10),
+ ((
+ joinedload(User.orders, ),
+ joinedload(User.orders, Order.items),
+ joinedload(User.orders, Order.items, Item.keywords),
+ ), 1),
+ ((
+ joinedload(User.orders, Order.items, Item.keywords),
+ ), 10),
+ ((
+ joinedload(User.orders, Order.items),
+ joinedload(User.orders, Order.items, Item.keywords),
+ ), 5),
+ ]:
+ sess = create_session()
+ def go():
+ eq_(
+ sess.query(User).options(*opt).order_by(User.id).all(),
+ self.static.user_item_keyword_result
+ )
+ self.assert_sql_count(testing.db, go, count)
+
+
+
+ @testing.resolve_artifact_names
def test_many_to_many(self):
- Keyword, Item = self.Keyword, self.Item
- keywords, item_keywords, items = self.tables.get_all(
- 'keywords', 'item_keywords', 'items')
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords,
- lazy=False, order_by=keywords.c.id)))
+ lazy='joined', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
def go():
@@ -267,20 +311,16 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_eager_option(self):
- Keyword, Item = self.Keyword, self.Item
- keywords, item_keywords, items = self.tables.get_all(
- 'keywords', 'item_keywords', 'items')
-
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords, lazy=True,
+ keywords = relationship(Keyword, secondary=item_keywords, lazy='select',
order_by=keywords.c.id)))
q = create_session().query(Item)
def go():
eq_(self.static.item_keyword_result[0:2],
- (q.options(eagerload('keywords')).
+ (q.options(joinedload('keywords')).
join('keywords').filter(keywords.c.name == 'red')).order_by(Item.id).all())
self.assert_sql_count(testing.db, go, 1)
@@ -288,27 +328,23 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_cyclical(self):
"""A circular eager relationship breaks the cycle with a lazy loader"""
- User, Address = self.User, self.Address
- users, addresses = self.tables.get_all('users', 'addresses')
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=False,
- backref=sa.orm.backref('user', lazy=False), order_by=Address.id)
+ addresses = relationship(Address, lazy='joined',
+ backref=sa.orm.backref('user', lazy='joined'),
+ order_by=Address.id)
))
- is_(sa.orm.class_mapper(User).get_property('addresses').lazy, False)
- is_(sa.orm.class_mapper(Address).get_property('user').lazy, False)
+ eq_(sa.orm.class_mapper(User).get_property('addresses').lazy, 'joined')
+ eq_(sa.orm.class_mapper(Address).get_property('user').lazy, 'joined')
sess = create_session()
eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
@testing.resolve_artifact_names
def test_double(self):
- """Eager loading with two relationships simultaneously, from the same table, using aliases."""
- User, Address, Order = self.classes.get_all(
- 'User', 'Address', 'Order')
- users, addresses, orders = self.tables.get_all(
- 'users', 'addresses', 'orders')
+ """Eager loading with two relationships simultaneously,
+ from the same table, using aliases."""
openorders = sa.alias(orders, 'openorders')
closedorders = sa.alias(orders, 'closedorders')
@@ -320,17 +356,17 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
closed_mapper = mapper(Order, closedorders, non_primary=True)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=False, order_by=addresses.c.id),
+ addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
open_orders = relationship(
open_mapper,
primaryjoin=sa.and_(openorders.c.isopen == 1,
users.c.id==openorders.c.user_id),
- lazy=False, order_by=openorders.c.id),
+ lazy='joined', order_by=openorders.c.id),
closed_orders = relationship(
closed_mapper,
primaryjoin=sa.and_(closedorders.c.isopen == 0,
users.c.id==closedorders.c.user_id),
- lazy=False, order_by=closedorders.c.id)))
+ lazy='joined', order_by=closedorders.c.id)))
q = create_session().query(User).order_by(User.id)
@@ -361,29 +397,26 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_double_same_mappers(self):
- """Eager loading with two relationships simulatneously, from the same table, using aliases."""
- User, Address, Order = self.classes.get_all(
- 'User', 'Address', 'Order')
- users, addresses, orders = self.tables.get_all(
- 'users', 'addresses', 'orders')
+ """Eager loading with two relationships simulatneously,
+ from the same table, using aliases."""
mapper(Address, addresses)
mapper(Order, orders, properties={
- 'items': relationship(Item, secondary=order_items, lazy=False,
+ 'items': relationship(Item, secondary=order_items, lazy='joined',
order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties=dict(
- addresses=relationship(Address, lazy=False, order_by=addresses.c.id),
+ addresses=relationship(Address, lazy='joined', order_by=addresses.c.id),
open_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 1,
users.c.id==orders.c.user_id),
- lazy=False, order_by=orders.c.id),
+ lazy='joined', order_by=orders.c.id),
closed_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 0,
users.c.id==orders.c.user_id),
- lazy=False, order_by=orders.c.id)))
+ lazy='joined', order_by=orders.c.id)))
q = create_session().query(User).order_by(User.id)
def go():
@@ -431,15 +464,12 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_no_false_hits(self):
- """Eager loaders don't interpret main table columns as part of their eager load."""
- User, Address, Order = self.classes.get_all(
- 'User', 'Address', 'Order')
- users, addresses, orders = self.tables.get_all(
- 'users', 'addresses', 'orders')
+ """Eager loaders don't interpret main table columns as
+ part of their eager load."""
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy=False),
- 'orders':relationship(Order, lazy=False)
+ 'addresses':relationship(Address, lazy='joined'),
+ 'orders':relationship(Order, lazy='joined')
})
mapper(Address, addresses)
mapper(Order, orders)
@@ -450,7 +480,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
# eager loaders have aliases which should not hit on those columns,
# they should be required to locate only their aliased/fully table
# qualified column name.
- noeagers = create_session().query(User).from_statement("select * from users").all()
+ noeagers = create_session().query(User).\
+ from_statement("select * from users").all()
assert 'orders' not in noeagers[0].__dict__
assert 'addresses' not in noeagers[0].__dict__
@@ -458,18 +489,15 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
- User, Item, Address, Order = self.classes.get_all(
- 'User', 'Item', 'Address', 'Order')
- users, items, order_items, orders, addresses = self.tables.get_all(
- 'users', 'items', 'order_items', 'orders', 'addresses')
mapper(Item, items)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy=False, order_by=items.c.id)
+ 'items':relationship(Item, secondary=order_items, lazy='joined',
+ order_by=items.c.id)
})
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=False, order_by=addresses.c.id),
- 'orders':relationship(Order, lazy=True, order_by=orders.c.id)
+ 'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id),
+ 'orders':relationship(Order, lazy='select', order_by=orders.c.id)
})
sess = create_session()
@@ -487,7 +515,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=False, order_by=addresses.c.id),
+ 'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id),
})
sess = create_session()
@@ -503,12 +531,14 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
def test_limit_2(self):
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords, lazy=False, order_by=[keywords.c.id]),
+ keywords = relationship(Keyword, secondary=item_keywords, lazy='joined', order_by=[keywords.c.id]),
))
sess = create_session()
q = sess.query(Item)
- l = q.filter((Item.description=='item 2') | (Item.description=='item 5') | (Item.description=='item 3')).\
+ l = q.filter((Item.description=='item 2') |
+ (Item.description=='item 5') |
+ (Item.description=='item 3')).\
order_by(Item.id).limit(2).all()
eq_(self.static.item_keyword_result[1:3], l)
@@ -516,18 +546,20 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.fails_on('maxdb', 'FIXME: unknown')
@testing.resolve_artifact_names
def test_limit_3(self):
- """test that the ORDER BY is propagated from the inner select to the outer select, when using the
- 'wrapped' select statement resulting from the combination of eager loading and limit/offset clauses."""
+ """test that the ORDER BY is propagated from the inner
+ select to the outer select, when using the
+ 'wrapped' select statement resulting from the combination of
+ eager loading and limit/offset clauses."""
mapper(Item, items)
mapper(Order, orders, properties = dict(
- items = relationship(Item, secondary=order_items, lazy=False)
+ items = relationship(Item, secondary=order_items, lazy='joined')
))
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=False, order_by=addresses.c.id),
- orders = relationship(Order, lazy=False, order_by=orders.c.id),
+ addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
+ orders = relationship(Order, lazy='joined', order_by=orders.c.id),
))
sess = create_session()
@@ -556,11 +588,12 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_limit_4(self):
- # tests the LIMIT/OFFSET aliasing on a mapper against a select. original issue from ticket #904
+ # tests the LIMIT/OFFSET aliasing on a mapper
+ # against a select. original issue from ticket #904
sel = sa.select([users, addresses.c.email_address],
users.c.id==addresses.c.user_id).alias('useralias')
mapper(User, sel, properties={
- 'orders':relationship(Order, primaryjoin=sel.c.id==orders.c.user_id, lazy=False)
+ 'orders':relationship(Order, primaryjoin=sel.c.id==orders.c.user_id, lazy='joined')
})
mapper(Order, orders)
@@ -574,8 +607,33 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
@testing.resolve_artifact_names
+ def test_useget_cancels_eager(self):
+ """test that a one to many lazyload cancels the unnecessary
+ eager many-to-one join on the other side."""
+
+ mapper(User, users)
+ mapper(Address, addresses, properties={
+ 'user':relationship(User, lazy='joined', backref='addresses')
+ })
+
+ sess = create_session()
+ u1 = sess.query(User).filter(User.id==8).one()
+ def go():
+ eq_(u1.addresses[0].user, u1)
+ self.assert_sql_execution(testing.db, go,
+ CompiledSQL(
+ "SELECT addresses.id AS addresses_id, addresses.user_id AS "
+ "addresses_user_id, addresses.email_address AS "
+ "addresses_email_address FROM addresses WHERE :param_1 = "
+ "addresses.user_id",
+ {'param_1': 8})
+ )
+
+
+ @testing.resolve_artifact_names
def test_manytoone_limit(self):
- """test that the subquery wrapping only occurs with limit/offset and m2m or o2m joins present."""
+ """test that the subquery wrapping only occurs with
+ limit/offset and m2m or o2m joins present."""
mapper(User, users, properties=odict(
orders=relationship(Order, backref='user')
@@ -590,7 +648,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
sess = create_session()
self.assert_compile(
- sess.query(User).options(eagerload(User.orders)).limit(10),
+ sess.query(User).options(joinedload(User.orders)).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"orders_1.id AS orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
@@ -601,7 +659,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(Order).options(eagerload(Order.user)).limit(10),
+ sess.query(Order).options(joinedload(Order.user)).limit(10),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS orders_isopen, "
"users_1.id AS users_1_id, users_1.name AS users_1_name FROM orders LEFT OUTER JOIN users AS "
@@ -610,7 +668,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(Order).options(eagerload(Order.user, innerjoin=True)).limit(10),
+ sess.query(Order).options(joinedload(Order.user, innerjoin=True)).limit(10),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS orders_isopen, "
"users_1.id AS users_1_id, users_1.name AS users_1_name FROM orders JOIN users AS "
@@ -619,7 +677,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(User).options(eagerload_all("orders.address")).limit(10),
+ sess.query(User).options(joinedload_all("orders.address")).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
@@ -632,7 +690,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(User).options(eagerload_all("orders.items"), eagerload("orders.address")),
+ sess.query(User).options(joinedload_all("orders.items"), joinedload("orders.address")),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS items_1_id, "
"items_1.description AS items_1_description, addresses_1.id AS addresses_1_id, "
"addresses_1.user_id AS addresses_1_user_id, addresses_1.email_address AS "
@@ -647,7 +705,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
)
self.assert_compile(
- sess.query(User).options(eagerload("orders"), eagerload("orders.address", innerjoin=True)).limit(10),
+ sess.query(User).options(joinedload("orders"), joinedload("orders.address", innerjoin=True)).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
@@ -663,7 +721,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_one_to_many_scalar(self):
mapper(User, users, properties = dict(
- address = relationship(mapper(Address, addresses), lazy=False, uselist=False)
+ address = relationship(mapper(Address, addresses),
+ lazy='joined', uselist=False)
))
q = create_session().query(User)
@@ -676,7 +735,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_many_to_one(self):
mapper(Address, addresses, properties = dict(
- user = relationship(mapper(User, users), lazy=False)
+ user = relationship(mapper(User, users), lazy='joined')
))
sess = create_session()
q = sess.query(Address)
@@ -704,7 +763,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
addresses.c.email_address != None
),
- lazy=False)
+ lazy='joined')
))
sess = create_session()
@@ -725,11 +784,11 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
contains a many-to-many relationship to a third object."""
mapper(User, users, properties={
- 'orders':relationship(Order, lazy=False, order_by=orders.c.id)
+ 'orders':relationship(Order, lazy='joined', order_by=orders.c.id)
})
mapper(Item, items)
mapper(Order, orders, properties = dict(
- items = relationship(Item, secondary=order_items, lazy=False, order_by=items.c.id)
+ items = relationship(Item, secondary=order_items, lazy='joined', order_by=items.c.id)
))
q = create_session().query(User)
@@ -743,16 +802,19 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_double_with_aggregate(self):
max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')],
- group_by=[orders.c.user_id]).alias('max_orders_by_user')
+ group_by=[orders.c.user_id]
+ ).alias('max_orders_by_user')
- max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).alias('max_orders')
+ max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).\
+ alias('max_orders')
mapper(Order, orders)
mapper(User, users, properties={
- 'orders':relationship(Order, backref='user', lazy=False, order_by=orders.c.id),
+ 'orders':relationship(Order, backref='user', lazy='joined',
+ order_by=orders.c.id),
'max_order':relationship(
mapper(Order, max_orders, non_primary=True),
- lazy=False, uselist=False)
+ lazy='joined', uselist=False)
})
q = create_session().query(User)
@@ -776,18 +838,20 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_uselist_false_warning(self):
- """test that multiple rows received by a uselist=False raises a warning."""
+ """test that multiple rows received by a
+ uselist=False raises a warning."""
mapper(User, users, properties={
'order':relationship(Order, uselist=False)
})
mapper(Order, orders)
s = create_session()
- assert_raises(sa.exc.SAWarning, s.query(User).options(eagerload(User.order)).all)
+ assert_raises(sa.exc.SAWarning,
+ s.query(User).options(joinedload(User.order)).all)
@testing.resolve_artifact_names
def test_wide(self):
- mapper(Order, orders, properties={'items':relationship(Item, secondary=order_items, lazy=False,
+ mapper(Order, orders, properties={'items':relationship(Item, secondary=order_items, lazy='joined',
order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties = dict(
@@ -805,7 +869,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
s = sa.select([orders], orders.c.isopen==1).alias('openorders')
mapper(Order, s, properties={
- 'user':relationship(User, lazy=False)
+ 'user':relationship(User, lazy='joined')
})
mapper(User, users)
mapper(Item, items)
@@ -823,10 +887,12 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_aliasing(self):
- """test that eager loading uses aliases to insulate the eager load from regular criterion against those tables."""
+ """test that eager loading uses aliases to insulate the eager
+ load from regular criterion against those tables."""
mapper(User, users, properties = dict(
- addresses = relationship(mapper(Address, addresses), lazy=False, order_by=addresses.c.id)
+ addresses = relationship(mapper(Address, addresses),
+ lazy='joined', order_by=addresses.c.id)
))
q = create_session().query(User)
l = q.filter(addresses.c.email_address == 'ed@lala.com').filter(
@@ -836,7 +902,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_inner_join(self):
mapper(User, users, properties = dict(
- addresses = relationship(mapper(Address, addresses), lazy=False, innerjoin=True, order_by=addresses.c.id)
+ addresses = relationship(mapper(Address, addresses), lazy='joined', innerjoin=True, order_by=addresses.c.id)
))
sess = create_session()
eq_(
@@ -865,14 +931,14 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
))
mapper(Item, items)
sess = create_session()
- self.assert_compile(sess.query(User).options(eagerload(User.orders, innerjoin=True)),
+ self.assert_compile(sess.query(User).options(joinedload(User.orders, innerjoin=True)),
"SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM users JOIN orders AS orders_1 ON users.id = orders_1.user_id ORDER BY orders_1.id"
, use_default_dialect=True)
- self.assert_compile(sess.query(User).options(eagerload_all(User.orders, Order.items, innerjoin=True)),
+ self.assert_compile(sess.query(User).options(joinedload_all(User.orders, Order.items, innerjoin=True)),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS items_1_id, "
"items_1.description AS items_1_description, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
@@ -885,8 +951,8 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
def go():
eq_(
sess.query(User).options(
- eagerload(User.orders, innerjoin=True),
- eagerload(User.orders, Order.items, innerjoin=True)).
+ joinedload(User.orders, innerjoin=True),
+ joinedload(User.orders, Order.items, innerjoin=True)).
order_by(User.id).all(),
[User(id=7,
@@ -903,7 +969,7 @@ class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
# test that default innerjoin setting is used for options
self.assert_compile(
- sess.query(Order).options(eagerload(Order.user)).filter(Order.description == 'foo'),
+ sess.query(Order).options(joinedload(Order.user)).filter(Order.description == 'foo'),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS "
"orders_isopen, users_1.id AS users_1_id, users_1.name AS users_1_name "
@@ -964,12 +1030,12 @@ class AddEntityTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_mapper_configured(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, lazy=False),
+ 'addresses':relationship(Address, lazy='joined'),
'orders':relationship(Order)
})
mapper(Address, addresses)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy=False, order_by=items.c.id)
+ 'items':relationship(Item, secondary=order_items, lazy='joined', order_by=items.c.id)
})
mapper(Item, items)
@@ -998,15 +1064,15 @@ class AddEntityTest(_fixtures.FixtureTest):
oalias = sa.orm.aliased(Order)
def go():
- ret = sess.query(User, oalias).options(eagerload('addresses')).join(
+ ret = sess.query(User, oalias).options(joinedload('addresses')).join(
('orders', oalias)).order_by(User.id, oalias.id).all()
eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 6)
sess.expunge_all()
def go():
- ret = sess.query(User, oalias).options(eagerload('addresses'),
- eagerload(oalias.items)).join(
+ ret = sess.query(User, oalias).options(joinedload('addresses'),
+ joinedload(oalias.items)).join(
('orders', oalias)).order_by(User.id, oalias.id).all()
eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 1)
@@ -1053,13 +1119,15 @@ class OrderBySecondaryTest(_base.MappedTest):
class B(_base.ComparableEntity):pass
mapper(A, a, properties={
- 'bs':relationship(B, secondary=m2m, lazy=False, order_by=m2m.c.id)
+ 'bs':relationship(B, secondary=m2m, lazy='joined', order_by=m2m.c.id)
})
mapper(B, b)
sess = create_session()
- eq_(sess.query(A).all(), [A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
- A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])])
+ eq_(sess.query(A).all(), [
+ A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
+ A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
+ ])
class SelfReferentialEagerTest(_base.MappedTest):
@@ -1078,7 +1146,9 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=False, join_depth=3, order_by=nodes.c.id)
+ 'children':relationship(Node,
+ lazy='joined',
+ join_depth=3, order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
@@ -1126,7 +1196,8 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=False, join_depth=1, order_by=nodes.c.id)
+ 'children':relationship(Node, lazy='joined', join_depth=1,
+ order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
@@ -1166,7 +1237,8 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=False, join_depth=3, order_by=nodes.c.id),
+ 'children':relationship(Node, lazy='joined', join_depth=3,
+ order_by=nodes.c.id),
'data':deferred(nodes.c.data)
})
sess = create_session()
@@ -1195,7 +1267,8 @@ class SelfReferentialEagerTest(_base.MappedTest):
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
- sess.query(Node).options(undefer('data'), undefer('children.data')).first())
+ sess.query(Node).options(undefer('data'),
+ undefer('children.data')).first())
self.assert_sql_count(testing.db, go, 1)
@@ -1206,7 +1279,7 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=True, order_by=nodes.c.id)
+ 'children':relationship(Node, lazy='select', order_by=nodes.c.id)
}, order_by=nodes.c.id)
sess = create_session()
n1 = Node(data='n1')
@@ -1220,7 +1293,8 @@ class SelfReferentialEagerTest(_base.MappedTest):
sess.flush()
sess.expunge_all()
def go():
- d = sess.query(Node).filter_by(data='n1').options(eagerload('children.children')).first()
+ d = sess.query(Node).filter_by(data='n1').\
+ options(joinedload('children.children')).first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
@@ -1233,7 +1307,8 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.assert_sql_count(testing.db, go, 2)
def go():
- d = sess.query(Node).filter_by(data='n1').options(eagerload('children.children')).first()
+ d = sess.query(Node).filter_by(data='n1').\
+ options(joinedload('children.children')).first()
# test that the query isn't wrapping the initial query for eager loading.
self.assert_sql_execution(testing.db, go,
@@ -1252,7 +1327,7 @@ class SelfReferentialEagerTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=False)
+ 'children':relationship(Node, lazy='joined')
})
sess = create_session()
n1 = Node(data='n1')
@@ -1282,14 +1357,14 @@ class MixedSelfReferentialEagerTest(_base.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a_table', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True)
- )
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True)
+ )
Table('b_table', metadata,
- Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
- Column('parent_b1_id', Integer, ForeignKey('b_table.id')),
- Column('parent_a_id', Integer, ForeignKey('a_table.id')),
- Column('parent_b2_id', Integer, ForeignKey('b_table.id')))
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('parent_b1_id', Integer, ForeignKey('b_table.id')),
+ Column('parent_a_id', Integer, ForeignKey('a_table.id')),
+ Column('parent_b2_id', Integer, ForeignKey('b_table.id')))
@classmethod
@@ -1341,7 +1416,11 @@ class MixedSelfReferentialEagerTest(_base.MappedTest):
session = create_session()
def go():
eq_(
- session.query(B).options(eagerload('parent_b1'),eagerload('parent_b2'),eagerload('parent_z')).
+ session.query(B).\
+ options(
+ joinedload('parent_b1'),
+ joinedload('parent_b2'),
+ joinedload('parent_z')).
filter(B.id.in_([2, 8, 11])).order_by(B.id).all(),
[
B(id=2, parent_z=A(id=1), parent_b1=B(id=1), parent_b2=None),
@@ -1374,7 +1453,7 @@ class SelfReferentialM2MEagerTest(_base.MappedTest):
'children': relationship(Widget, secondary=widget_rel,
primaryjoin=widget_rel.c.parent_id==widget.c.id,
secondaryjoin=widget_rel.c.child_id==widget.c.id,
- lazy=False, join_depth=1,
+ lazy='joined', join_depth=1,
)
})
@@ -1422,7 +1501,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, Order).filter(User.id==Order.user_id).\
- options(eagerload(User.addresses), eagerload(Order.items)).filter(User.id==9).\
+ options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
order_by(User.id, Order.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
@@ -1434,7 +1513,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
- sess.query(User, Order).join(User.orders).options(eagerload(User.addresses), eagerload(Order.items)).filter(User.id==9).\
+ sess.query(User, Order).join(User.orders).options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
order_by(User.id, Order.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
@@ -1472,8 +1551,8 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
),
],
sess.query(User, Order, u1, o1).\
- join((Order, User.orders)).options(eagerload(User.addresses), eagerload(Order.items)).filter(User.id==9).\
- join((o1, u1.orders)).options(eagerload(u1.addresses), eagerload(o1.items)).filter(u1.id==7).\
+ join((Order, User.orders)).options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
+ join((o1, u1.orders)).options(joinedload(u1.addresses), joinedload(o1.items)).filter(u1.id==7).\
filter(Order.id<o1.id).\
order_by(User.id, Order.id, u1.id, o1.id).all(),
)
@@ -1495,7 +1574,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, oalias).filter(User.id==oalias.user_id).\
- options(eagerload(User.addresses), eagerload(oalias.items)).filter(User.id==9).\
+ options(joinedload(User.addresses), joinedload(oalias.items)).filter(User.id==9).\
order_by(User.id, oalias.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
@@ -1507,7 +1586,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
- sess.query(User, oalias).join((User.orders, oalias)).options(eagerload(User.addresses), eagerload(oalias.items)).filter(User.id==9).\
+ sess.query(User, oalias).join((User.orders, oalias)).options(joinedload(User.addresses), joinedload(oalias.items)).filter(User.id==9).\
order_by(User.id, oalias.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
@@ -1517,7 +1596,7 @@ class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
# improper setup: oalias in the columns clause but join to usual
# orders alias. this should create two FROM clauses even though the
# query has a from_clause set up via the join
- self.assert_compile(sess.query(User, oalias).join(User.orders).options(eagerload(oalias.items)).with_labels().statement,
+ self.assert_compile(sess.query(User, oalias).join(User.orders).options(joinedload(oalias.items)).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, "\
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "\
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen, items_1.id AS items_1_id, "\
@@ -1559,7 +1638,7 @@ class CyclicalInheritingEagerTest(_base.MappedTest):
mapper(T, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1')
mapper(SubT, None, inherits=T, polymorphic_identity='subt1', properties={
- 't2s':relationship(SubT2, lazy=False, backref=sa.orm.backref('subt', lazy=False))
+ 't2s':relationship(SubT2, lazy='joined', backref=sa.orm.backref('subt', lazy='joined'))
})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(SubT2, None, inherits=T2, polymorphic_identity='subt2')
@@ -1626,7 +1705,7 @@ class SubqueryTest(_base.MappedTest):
mapper(User, users_table, properties={
- 'tags': relationship(Tag, backref='user', lazy=False),
+ 'tags': relationship(Tag, backref='user', lazy='joined'),
'query_score': sa.orm.column_property(user_score),
})
@@ -1657,7 +1736,7 @@ class CorrelatedSubqueryTest(_base.MappedTest):
"""
- # another argument for eagerload learning about inner joins
+ # another argument for joinedload learning about inner joins
__requires__ = ('correlated_outer_joins', )
@@ -1773,7 +1852,7 @@ class CorrelatedSubqueryTest(_base.MappedTest):
sess = create_session()
def go():
eq_(
- sess.query(User).order_by(User.name).options(eagerload('stuff')).all(),
+ sess.query(User).order_by(User.name).options(joinedload('stuff')).all(),
[
User(name='user1', stuff=[Stuff(id=2)]),
User(name='user2', stuff=[Stuff(id=4)]),
@@ -1793,7 +1872,7 @@ class CorrelatedSubqueryTest(_base.MappedTest):
sess = create_session()
def go():
eq_(
- sess.query(User).order_by(User.name).options(eagerload('stuff')).first(),
+ sess.query(User).order_by(User.name).options(joinedload('stuff')).first(),
User(name='user1', stuff=[Stuff(id=2)])
)
self.assert_sql_count(testing.db, go, 1)
@@ -1801,7 +1880,7 @@ class CorrelatedSubqueryTest(_base.MappedTest):
sess = create_session()
def go():
eq_(
- sess.query(User).filter(User.id==2).options(eagerload('stuff')).one(),
+ sess.query(User).filter(User.id==2).options(joinedload('stuff')).one(),
User(name='user2', stuff=[Stuff(id=4)])
)
self.assert_sql_count(testing.db, go, 1)
diff --git a/test/orm/test_expire.py b/test/orm/test_expire.py
index e801fe6e8..926ae0629 100644
--- a/test/orm/test_expire.py
+++ b/test/orm/test_expire.py
@@ -9,7 +9,7 @@ from sqlalchemy.test.schema import Table
from sqlalchemy.test.schema import Column
from sqlalchemy.orm import mapper, relationship, create_session, \
attributes, deferred, exc as orm_exc, defer, undefer,\
- strategies, state, lazyload
+ strategies, state, lazyload, backref
from test.orm import _base, _fixtures
@@ -295,11 +295,63 @@ class ExpireTest(_fixtures.FixtureTest):
u.addresses[0].email_address = 'someotheraddress'
s.expire(u)
- u.name
- print attributes.instance_state(u).dict
assert u.addresses[0].email_address == 'ed@wood.com'
@testing.resolve_artifact_names
+ def test_refresh_cascade(self):
+ mapper(User, users, properties={
+ 'addresses':relationship(Address, cascade="all, refresh-expire")
+ })
+ mapper(Address, addresses)
+ s = create_session()
+ u = s.query(User).get(8)
+ assert u.addresses[0].email_address == 'ed@wood.com'
+
+ u.addresses[0].email_address = 'someotheraddress'
+ s.refresh(u)
+ assert u.addresses[0].email_address == 'ed@wood.com'
+
+ def test_expire_cascade_pending_orphan(self):
+ cascade = 'save-update, refresh-expire, delete, delete-orphan'
+ self._test_cascade_to_pending(cascade, True)
+
+ def test_refresh_cascade_pending_orphan(self):
+ cascade = 'save-update, refresh-expire, delete, delete-orphan'
+ self._test_cascade_to_pending(cascade, False)
+
+ def test_expire_cascade_pending(self):
+ cascade = 'save-update, refresh-expire'
+ self._test_cascade_to_pending(cascade, True)
+
+ def test_refresh_cascade_pending(self):
+ cascade = 'save-update, refresh-expire'
+ self._test_cascade_to_pending(cascade, False)
+
+ @testing.resolve_artifact_names
+ def _test_cascade_to_pending(self, cascade, expire_or_refresh):
+ mapper(User, users, properties={
+ 'addresses':relationship(Address, cascade=cascade)
+ })
+ mapper(Address, addresses)
+ s = create_session()
+
+ u = s.query(User).get(8)
+ a = Address(id=12, email_address='foobar')
+
+ u.addresses.append(a)
+ if expire_or_refresh:
+ s.expire(u)
+ else:
+ s.refresh(u)
+ if "delete-orphan" in cascade:
+ assert a not in s
+ else:
+ assert a in s
+
+ assert a not in u.addresses
+ s.flush()
+
+ @testing.resolve_artifact_names
def test_expired_lazy(self):
mapper(User, users, properties={
'addresses':relationship(Address, backref='user'),
@@ -324,7 +376,7 @@ class ExpireTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_expired_eager(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
@@ -359,7 +411,7 @@ class ExpireTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_relationship_changes_preserved(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
sess = create_session()
@@ -373,19 +425,19 @@ class ExpireTest(_fixtures.FixtureTest):
assert len(u.addresses) == 2
@testing.resolve_artifact_names
- def test_eagerload_props_dontload(self):
+ def test_joinedload_props_dontload(self):
# relationships currently have to load separately from scalar instances.
# the use case is: expire "addresses". then access it. lazy load
# fires off to load "addresses", but needs foreign key or primary key
# attributes in order to lazy load; hits those attributes, such as
# below it hits "u.id". "u.id" triggers full unexpire operation,
- # eagerloads addresses since lazy=False. this is all wihtin lazy load
- # which fires unconditionally; so an unnecessary eagerload (or
+ # joinedloads addresses since lazy='joined'. this is all wihtin lazy load
+ # which fires unconditionally; so an unnecessary joinedload (or
# lazyload) was issued. would prefer not to complicate lazyloading to
# "figure out" that the operation should be aborted right now.
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
sess = create_session()
@@ -514,7 +566,7 @@ class ExpireTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_partial_expire_eager(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
@@ -568,7 +620,7 @@ class ExpireTest(_fixtures.FixtureTest):
sess.expire(u, ['name', 'addresses'])
assert 'name' not in u.__dict__
assert 'addresses' not in u.__dict__
- (sess.query(User).options(sa.orm.eagerload('addresses')).
+ (sess.query(User).options(sa.orm.joinedload('addresses')).
filter_by(id=8).all())
assert 'name' in u.__dict__
assert 'addresses' in u.__dict__
@@ -641,9 +693,9 @@ class ExpireTest(_fixtures.FixtureTest):
self.assert_sql_count(testing.db, go, 1)
@testing.resolve_artifact_names
- def test_eagerload_query_refreshes(self):
+ def test_joinedload_query_refreshes(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
@@ -660,7 +712,7 @@ class ExpireTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_expire_all(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=False),
+ 'addresses':relationship(Address, backref='user', lazy='joined'),
})
mapper(Address, addresses)
@@ -768,7 +820,7 @@ class ExpireTest(_fixtures.FixtureTest):
def test_state_noload_to_lazy(self):
"""Behavioral test to verify the current activity of loader callables."""
- mapper(User, users, properties={'addresses':relationship(Address, lazy=None)})
+ mapper(User, users, properties={'addresses':relationship(Address, lazy='noload')})
mapper(Address, addresses)
sess = create_session()
@@ -1002,7 +1054,7 @@ class RefreshTest(_fixtures.FixtureTest):
"""test that a refresh/expire operation loads rows properly and sends correct "isnew" state to eager loaders"""
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=False)
+ 'addresses':relationship(mapper(Address, addresses), lazy='joined')
})
s = create_session()
@@ -1025,7 +1077,7 @@ class RefreshTest(_fixtures.FixtureTest):
s = create_session()
mapper(Address, addresses)
- mapper(User, users, properties = dict(addresses=relationship(Address,cascade="all, delete-orphan",lazy=False)) )
+ mapper(User, users, properties = dict(addresses=relationship(Address,cascade="all, delete-orphan",lazy='joined')) )
u = User()
u.name='Justin'
diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py
index 502747bfd..f6147a3eb 100644
--- a/test/orm/test_lazy_relations.py
+++ b/test/orm/test_lazy_relations.py
@@ -22,7 +22,7 @@ class LazyTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_basic(self):
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=True)
+ 'addresses':relationship(mapper(Address, addresses), lazy='select')
})
sess = create_session()
q = sess.query(User)
@@ -33,7 +33,7 @@ class LazyTest(_fixtures.FixtureTest):
"""test the error raised when parent object is not bound."""
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=True)
+ 'addresses':relationship(mapper(Address, addresses), lazy='select')
})
sess = create_session()
q = sess.query(User)
@@ -44,7 +44,7 @@ class LazyTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_orderby(self):
mapper(User, users, properties = {
- 'addresses':relationship(mapper(Address, addresses), lazy=True, order_by=addresses.c.email_address),
+ 'addresses':relationship(mapper(Address, addresses), lazy='select', order_by=addresses.c.email_address),
})
q = create_session().query(User)
assert [
@@ -69,7 +69,7 @@ class LazyTest(_fixtures.FixtureTest):
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=True),
+ addresses = relationship(Address, lazy='select'),
))
q = create_session().query(User)
l = q.filter(users.c.id==addresses.c.user_id).order_by(addresses.c.email_address).all()
@@ -92,7 +92,7 @@ class LazyTest(_fixtures.FixtureTest):
mapper(Address, addresses)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=True, order_by=[sa.desc(addresses.c.email_address)]),
+ addresses = relationship(Address, lazy='select', order_by=[sa.desc(addresses.c.email_address)]),
))
sess = create_session()
assert [
@@ -115,7 +115,7 @@ class LazyTest(_fixtures.FixtureTest):
"""test that a lazily loaded child object is not marked as an orphan"""
mapper(User, users, properties={
- 'addresses':relationship(Address, cascade="all,delete-orphan", lazy=True)
+ 'addresses':relationship(Address, cascade="all,delete-orphan", lazy='select')
})
mapper(Address, addresses)
@@ -130,11 +130,11 @@ class LazyTest(_fixtures.FixtureTest):
mapper(Item, items)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy=True)
+ 'items':relationship(Item, secondary=order_items, lazy='select')
})
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=True),
- 'orders':relationship(Order, lazy=True)
+ 'addresses':relationship(mapper(Address, addresses), lazy='select'),
+ 'orders':relationship(Order, lazy='select')
})
sess = create_session()
@@ -151,11 +151,11 @@ class LazyTest(_fixtures.FixtureTest):
def test_distinct(self):
mapper(Item, items)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy=True)
+ 'items':relationship(Item, secondary=order_items, lazy='select')
})
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses), lazy=True),
- 'orders':relationship(Order, lazy=True)
+ 'addresses':relationship(mapper(Address, addresses), lazy='select'),
+ 'orders':relationship(Order, lazy='select')
})
sess = create_session()
@@ -182,7 +182,7 @@ class LazyTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_one_to_many_scalar(self):
mapper(User, users, properties = dict(
- address = relationship(mapper(Address, addresses), lazy=True, uselist=False)
+ address = relationship(mapper(Address, addresses), lazy='select', uselist=False)
))
q = create_session().query(User)
l = q.filter(users.c.id == 7).all()
@@ -224,8 +224,8 @@ class LazyTest(_fixtures.FixtureTest):
closed_mapper = mapper(Order, closedorders, non_primary=True)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy = True),
- open_orders = relationship(open_mapper, primaryjoin = sa.and_(openorders.c.isopen == 1, users.c.id==openorders.c.user_id), lazy=True),
- closed_orders = relationship(closed_mapper, primaryjoin = sa.and_(closedorders.c.isopen == 0, users.c.id==closedorders.c.user_id), lazy=True)
+ open_orders = relationship(open_mapper, primaryjoin = sa.and_(openorders.c.isopen == 1, users.c.id==openorders.c.user_id), lazy='select'),
+ closed_orders = relationship(closed_mapper, primaryjoin = sa.and_(closedorders.c.isopen == 0, users.c.id==closedorders.c.user_id), lazy='select')
))
q = create_session().query(User)
@@ -262,7 +262,7 @@ class LazyTest(_fixtures.FixtureTest):
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
- keywords = relationship(Keyword, secondary=item_keywords, lazy=True),
+ keywords = relationship(Keyword, secondary=item_keywords, lazy='select'),
))
q = create_session().query(Item)
@@ -280,7 +280,7 @@ class LazyTest(_fixtures.FixtureTest):
addresses.c.user_id==users.c.id
):
mapper(Address, addresses, properties = dict(
- user = relationship(mapper(User, users), lazy=True, primaryjoin=pj)
+ user = relationship(mapper(User, users), lazy='select', primaryjoin=pj)
))
sess = create_session()
@@ -349,7 +349,7 @@ class LazyTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_many_to_one(self):
mapper(Address, addresses, properties = dict(
- user = relationship(mapper(User, users), lazy=True)
+ user = relationship(mapper(User, users), lazy='select')
))
sess = create_session()
q = sess.query(Address)
diff --git a/test/orm/test_manytomany.py b/test/orm/test_manytomany.py
index bb7ca8783..84c60fc28 100644
--- a/test/orm/test_manytomany.py
+++ b/test/orm/test_manytomany.py
@@ -88,7 +88,7 @@ class M2MTest(_base.MappedTest):
Place.mapper, secondary=place_place, primaryjoin=place.c.place_id==place_place.c.pl1_id,
secondaryjoin=place.c.place_id==place_place.c.pl2_id,
order_by=place_place.c.pl2_id,
- lazy=True,
+ lazy='select',
))
sess = create_session()
@@ -134,12 +134,12 @@ class M2MTest(_base.MappedTest):
two different association tables. aliases are required."""
Place.mapper = mapper(Place, place, properties = {
- 'thingies':relationship(mapper(PlaceThingy, place_thingy), lazy=False)
+ 'thingies':relationship(mapper(PlaceThingy, place_thingy), lazy='joined')
})
Transition.mapper = mapper(Transition, transition, properties = dict(
- inputs = relationship(Place.mapper, place_output, lazy=False),
- outputs = relationship(Place.mapper, place_input, lazy=False),
+ inputs = relationship(Place.mapper, place_output, lazy='joined'),
+ outputs = relationship(Place.mapper, place_input, lazy='joined'),
)
)
@@ -164,8 +164,8 @@ class M2MTest(_base.MappedTest):
"""tests a many-to-many backrefs"""
Place.mapper = mapper(Place, place)
Transition.mapper = mapper(Transition, transition, properties = dict(
- inputs = relationship(Place.mapper, place_output, lazy=True, backref='inputs'),
- outputs = relationship(Place.mapper, place_input, lazy=True, backref='outputs'),
+ inputs = relationship(Place.mapper, place_output, lazy='select', backref='inputs'),
+ outputs = relationship(Place.mapper, place_input, lazy='select', backref='outputs'),
)
)
@@ -263,7 +263,7 @@ class M2MTest2(_base.MappedTest):
mapper(Student, student)
mapper(Course, course, properties = {
- 'students': relationship(Student, enroll, lazy=True,
+ 'students': relationship(Student, enroll, lazy='select',
backref='courses')})
sess = create_session()
@@ -316,11 +316,11 @@ class M2MTest3(_base.MappedTest):
mapper(A, a, properties={
'tbs': relationship(B, primaryjoin=sa.and_(b.c.a1 == a.c.a1,
b.c.b2 == True),
- lazy=False)})
+ lazy='joined')})
mapper(C, c, properties={
- 'a1s': relationship(A, secondary=c2a1, lazy=False),
- 'a2s': relationship(A, secondary=c2a2, lazy=False)})
+ 'a1s': relationship(A, secondary=c2a1, lazy='joined'),
+ 'a2s': relationship(A, secondary=c2a2, lazy='joined')})
assert create_session().query(C).with_labels().statement is not None
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 09d1387f7..02be04edc 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -674,7 +674,7 @@ class MapperTest(_fixtures.FixtureTest):
def test_many_to_many_count(self):
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy=True)))
+ keywords = relationship(Keyword, item_keywords, lazy='select')))
session = create_session()
q = (session.query(Item).
@@ -731,7 +731,7 @@ class MapperTest(_fixtures.FixtureTest):
uname = extendedproperty(_get_name, _set_name)
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True),
+ addresses = relationship(mapper(Address, addresses), lazy='select'),
uname = synonym('name'),
adlist = synonym('addresses'),
adname = synonym('addresses')
@@ -810,7 +810,7 @@ class MapperTest(_fixtures.FixtureTest):
mapper(Address, addresses)
mapper(User, users, properties = {
- 'addresses':relationship(Address, lazy=True),
+ 'addresses':relationship(Address, lazy='select'),
'name':synonym('_name', map_column=True)
})
@@ -1086,7 +1086,7 @@ class OptionsTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_synonym_options(self):
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True,
+ addresses = relationship(mapper(Address, addresses), lazy='select',
order_by=addresses.c.id),
adlist = synonym('addresses')))
@@ -1095,7 +1095,7 @@ class OptionsTest(_fixtures.FixtureTest):
sess = create_session()
u = (sess.query(User).
order_by(User.id).
- options(sa.orm.eagerload('adlist')).
+ options(sa.orm.joinedload('adlist')).
filter_by(name='jack')).one()
eq_(u.adlist,
[self.static.user_address_result[0].addresses[0]])
@@ -1111,7 +1111,7 @@ class OptionsTest(_fixtures.FixtureTest):
sess = create_session()
l = (sess.query(User).
order_by(User.id).
- options(sa.orm.eagerload('addresses'))).all()
+ options(sa.orm.joinedload('addresses'))).all()
def go():
eq_(l, self.static.user_address_result)
@@ -1121,11 +1121,11 @@ class OptionsTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_eager_options_with_limit(self):
mapper(User, users, properties=dict(
- addresses=relationship(mapper(Address, addresses), lazy=True)))
+ addresses=relationship(mapper(Address, addresses), lazy='select')))
sess = create_session()
u = (sess.query(User).
- options(sa.orm.eagerload('addresses')).
+ options(sa.orm.joinedload('addresses')).
filter_by(id=8)).one()
def go():
@@ -1143,7 +1143,7 @@ class OptionsTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_lazy_options_with_limit(self):
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=False)))
+ addresses = relationship(mapper(Address, addresses), lazy='joined')))
sess = create_session()
u = (sess.query(User).
@@ -1159,7 +1159,7 @@ class OptionsTest(_fixtures.FixtureTest):
def test_eager_degrade(self):
"""An eager relationship automatically degrades to a lazy relationship if eager columns are not available"""
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=False)))
+ addresses = relationship(mapper(Address, addresses), lazy='joined')))
sess = create_session()
# first test straight eager load, 1 statement
@@ -1192,17 +1192,17 @@ class OptionsTest(_fixtures.FixtureTest):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
- lazy=False,
+ lazy='joined',
order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
- items=relationship(Item, secondary=order_items, lazy=False,
+ items=relationship(Item, secondary=order_items, lazy='joined',
order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
- addresses=relationship(Address, lazy=False,
+ addresses=relationship(Address, lazy='joined',
order_by=addresses.c.id),
- orders=relationship(Order, lazy=False,
+ orders=relationship(Order, lazy='joined',
order_by=orders.c.id)))
sess = create_session()
@@ -1227,7 +1227,7 @@ class OptionsTest(_fixtures.FixtureTest):
def test_lazy_options(self):
"""An eager relationship can be upgraded to a lazy relationship."""
mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=False)
+ addresses = relationship(mapper(Address, addresses), lazy='joined')
))
sess = create_session()
@@ -1252,7 +1252,7 @@ class OptionsTest(_fixtures.FixtureTest):
sess = create_session()
oalias = aliased(Order)
- opt1 = sa.orm.eagerload(User.orders, Order.items)
+ opt1 = sa.orm.joinedload(User.orders, Order.items)
opt2a, opt2b = sa.orm.contains_eager(User.orders, Order.items, alias=oalias)
u1 = sess.query(User).join((oalias, User.orders)).options(opt1, opt2a, opt2b).first()
ustate = attributes.instance_state(u1)
@@ -1284,7 +1284,7 @@ class DeepOptionsTest(_fixtures.FixtureTest):
def test_deep_options_1(self):
sess = create_session()
- # eagerload nothing.
+ # joinedload nothing.
u = sess.query(User).all()
def go():
x = u[0].orders[1].items[0].keywords[1]
@@ -1292,12 +1292,20 @@ class DeepOptionsTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_deep_options_2(self):
+ """test (joined|subquery)load_all() options"""
+
+ sess = create_session()
+
+ l = (sess.query(User).
+ options(sa.orm.joinedload_all('orders.items.keywords'))).all()
+ def go():
+ x = l[0].orders[1].items[0].keywords[1]
+ self.sql_count_(0, go)
+
sess = create_session()
- # eagerload orders.items.keywords; eagerload_all() implies eager load
- # of orders, orders.items
l = (sess.query(User).
- options(sa.orm.eagerload_all('orders.items.keywords'))).all()
+ options(sa.orm.subqueryload_all('orders.items.keywords'))).all()
def go():
x = l[0].orders[1].items[0].keywords[1]
self.sql_count_(0, go)
@@ -1309,9 +1317,9 @@ class DeepOptionsTest(_fixtures.FixtureTest):
# same thing, with separate options calls
q2 = (sess.query(User).
- options(sa.orm.eagerload('orders')).
- options(sa.orm.eagerload('orders.items')).
- options(sa.orm.eagerload('orders.items.keywords')))
+ options(sa.orm.joinedload('orders')).
+ options(sa.orm.joinedload('orders.items')).
+ options(sa.orm.joinedload('orders.items.keywords')))
u = q2.all()
def go():
x = u[0].orders[1].items[0].keywords[1]
@@ -1325,12 +1333,20 @@ class DeepOptionsTest(_fixtures.FixtureTest):
sa.exc.ArgumentError,
r"Can't find entity Mapper\|Order\|orders in Query. "
r"Current list: \['Mapper\|User\|users'\]",
- sess.query(User).options, sa.orm.eagerload(Order.items))
+ sess.query(User).options, sa.orm.joinedload(Order.items))
- # eagerload "keywords" on items. it will lazy load "orders", then
+ # joinedload "keywords" on items. it will lazy load "orders", then
# lazy load the "items" on the order, but on "items" it will eager
# load the "keywords"
- q3 = sess.query(User).options(sa.orm.eagerload('orders.items.keywords'))
+ q3 = sess.query(User).options(sa.orm.joinedload('orders.items.keywords'))
+ u = q3.all()
+ def go():
+ x = u[0].orders[1].items[0].keywords[1]
+ self.sql_count_(2, go)
+
+ sess = create_session()
+ q3 = sess.query(User).options(
+ sa.orm.joinedload(User.orders, Order.items, Item.keywords))
u = q3.all()
def go():
x = u[0].orders[1].items[0].keywords[1]
@@ -1850,10 +1866,10 @@ class SecondaryOptionsTest(_base.MappedTest):
)
@testing.resolve_artifact_names
- def test_eagerload_on_other(self):
+ def test_joinedload_on_other(self):
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.eagerload(Child1.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.related)).order_by(Child1.id)
def go():
eq_(
@@ -1871,7 +1887,7 @@ class SecondaryOptionsTest(_base.MappedTest):
"SELECT base.id AS base_id, child2.id AS child2_id, base.type AS base_type "
"FROM base JOIN child2 ON base.id = child2.id WHERE base.id = :param_1",
-# eagerload- this shouldn't happen
+# joinedload- this shouldn't happen
# "SELECT base.id AS base_id, child2.id AS child2_id, base.type AS base_type, "
# "related_1.id AS related_1_id FROM base JOIN child2 ON base.id = child2.id "
# "LEFT OUTER JOIN related AS related_1 ON base.id = related_1.id WHERE base.id = :param_1",
@@ -1880,10 +1896,10 @@ class SecondaryOptionsTest(_base.MappedTest):
)
@testing.resolve_artifact_names
- def test_eagerload_on_same(self):
+ def test_joinedload_on_same(self):
sess = create_session()
- child1s = sess.query(Child1).join(Child1.related).options(sa.orm.eagerload(Child1.child2, Child2.related)).order_by(Child1.id)
+ child1s = sess.query(Child1).join(Child1.related).options(sa.orm.joinedload(Child1.child2, Child2.related)).order_by(Child1.id)
def go():
eq_(
@@ -1894,7 +1910,7 @@ class SecondaryOptionsTest(_base.MappedTest):
c1 = child1s[0]
- # this *does* eagerload
+ # this *does* joinedload
self.assert_sql_execution(
testing.db,
lambda: c1.child2,
@@ -1964,17 +1980,17 @@ class DeferredPopulationTest(_base.MappedTest):
self._test(thing)
@testing.resolve_artifact_names
- def test_eagerload_with_clear(self):
+ def test_joinedload_with_clear(self):
session = create_session()
- human = session.query(Human).options(sa.orm.eagerload("thing")).first()
+ human = session.query(Human).options(sa.orm.joinedload("thing")).first()
session.expunge_all()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@testing.resolve_artifact_names
- def test_eagerload_no_clear(self):
+ def test_joinedload_no_clear(self):
session = create_session()
- human = session.query(Human).options(sa.orm.eagerload("thing")).first()
+ human = session.query(Human).options(sa.orm.joinedload("thing")).first()
thing = session.query(Thing).options(sa.orm.undefer("name")).first()
self._test(thing)
@@ -2084,7 +2100,7 @@ class CompositeTypesTest(_base.MappedTest):
sess.expunge_all()
def go():
g2 = (sess.query(Graph).
- options(sa.orm.eagerload('edges'))).get([g.id, g.version_id])
+ options(sa.orm.joinedload('edges'))).get([g.id, g.version_id])
for e1, e2 in zip(g.edges, g2.edges):
eq_(e1.start, e2.start)
eq_(e1.end, e2.end)
@@ -2301,7 +2317,7 @@ class NoLoadTest(_fixtures.FixtureTest):
def test_basic(self):
"""A basic one-to-many lazy load"""
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=None)
+ addresses = relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m)
l = [None]
@@ -2318,7 +2334,7 @@ class NoLoadTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_options(self):
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=None)
+ addresses = relationship(mapper(Address, addresses), lazy='noload')
))
q = create_session().query(m).options(sa.orm.lazyload('addresses'))
l = [None]
@@ -2700,7 +2716,7 @@ class RequirementsTest(_base.MappedTest):
h1.h2s.extend([H2(), H2()])
s.flush()
- h1s = s.query(H1).options(sa.orm.eagerload('h2s')).all()
+ h1s = s.query(H1).options(sa.orm.joinedload('h2s')).all()
eq_(len(h1s), 5)
self.assert_unordered_result(h1s, H1,
@@ -2712,12 +2728,12 @@ class RequirementsTest(_base.MappedTest):
{'h2s': []},
{'h2s': (H2, [{'value': 'abc'}])})
- h1s = s.query(H1).options(sa.orm.eagerload('h3s')).all()
+ h1s = s.query(H1).options(sa.orm.joinedload('h3s')).all()
eq_(len(h1s), 5)
- h1s = s.query(H1).options(sa.orm.eagerload_all('t6a.h1b'),
- sa.orm.eagerload('h2s'),
- sa.orm.eagerload_all('h3s.h1s')).all()
+ h1s = s.query(H1).options(sa.orm.joinedload_all('t6a.h1b'),
+ sa.orm.joinedload('h2s'),
+ sa.orm.joinedload_all('h3s.h1s')).all()
eq_(len(h1s), 5)
@testing.resolve_artifact_names
diff --git a/test/orm/test_merge.py b/test/orm/test_merge.py
index 9f8861ad8..e80b92699 100644
--- a/test/orm/test_merge.py
+++ b/test/orm/test_merge.py
@@ -708,7 +708,7 @@ class MergeTest(_fixtures.FixtureTest):
sess.flush()
sess2 = create_session()
- u2 = sess2.query(User).options(sa.orm.eagerload('addresses')).get(7)
+ u2 = sess2.query(User).options(sa.orm.joinedload('addresses')).get(7)
sess3 = create_session()
u3 = sess3.merge(u2, load=False)
diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py
index 806558a2b..1befbe8c0 100644
--- a/test/orm/test_naturalpks.py
+++ b/test/orm/test_naturalpks.py
@@ -423,7 +423,7 @@ class ReversePKsTest(_base.MappedTest):
class SelfRefTest(_base.MappedTest):
- __unsupported_on__ = 'mssql' # mssql doesn't allow ON UPDATE on self-referential keys
+ __unsupported_on__ = ('mssql',) # mssql doesn't allow ON UPDATE on self-referential keys
@classmethod
def define_tables(cls, metadata):
@@ -696,6 +696,8 @@ class CascadeToFKPKTest(_base.MappedTest, testing.AssertsCompiledSQL):
class JoinedInheritanceTest(_base.MappedTest):
"""Test cascades of pk->pk/fk on joined table inh."""
+
+ __unsupported_on__ = ('mssql',) # mssql doesn't allow ON UPDATE on self-referential keys
@classmethod
def define_tables(cls, metadata):
diff --git a/test/orm/test_pickled.py b/test/orm/test_pickled.py
index abe4c91a2..4cdfa4181 100644
--- a/test/orm/test_pickled.py
+++ b/test/orm/test_pickled.py
@@ -205,11 +205,11 @@ class PickleTest(_fixtures.FixtureTest):
sess.expunge_all()
for opt in [
- sa.orm.eagerload(User.addresses),
- sa.orm.eagerload("addresses"),
+ sa.orm.joinedload(User.addresses),
+ sa.orm.joinedload("addresses"),
sa.orm.defer("name"),
sa.orm.defer(User.name),
- sa.orm.eagerload("addresses", User.addresses),
+ sa.orm.joinedload("addresses", User.addresses),
]:
opt2 = pickle.loads(pickle.dumps(opt))
eq_(opt.key, opt2.key)
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 6de4a31e4..89b66fd86 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -209,7 +209,7 @@ class GetTest(QueryTest):
assert u.orders[1].items[2].description == 'item 12'
# eager load does
- s.query(User).options(eagerload('addresses'), eagerload_all('orders.items')).populate_existing().all()
+ s.query(User).options(joinedload('addresses'), joinedload_all('orders.items')).populate_existing().all()
assert u.addresses[0].email_address == 'jack@bean.com'
assert u.orders[1].items[2].description == 'item 5'
@@ -563,6 +563,9 @@ class RawSelectTest(QueryTest, AssertsCompiledSQL):
self.assert_compile(sess.query(x).filter(x==5).statement,
"SELECT lala(users.id) AS foo FROM users WHERE lala(users.id) = :param_1", dialect=default.DefaultDialect())
+ self.assert_compile(sess.query(func.sum(x).label('bar')).statement,
+ "SELECT sum(lala(users.id)) AS bar FROM users", dialect=default.DefaultDialect())
+
class ExpressionTest(QueryTest, AssertsCompiledSQL):
def test_deferred_instances(self):
@@ -690,6 +693,12 @@ class FilterTest(QueryTest):
assert [] == create_session().query(User).order_by(User.id)[3:3]
assert [] == create_session().query(User).order_by(User.id)[0:0]
+ @testing.requires.boolean_col_expressions
+ def test_exists(self):
+ sess = create_session(testing.db)
+
+ assert sess.query(exists().where(User.id==9)).scalar()
+ assert not sess.query(exists().where(User.id==29)).scalar()
def test_one_filter(self):
assert [User(id=8), User(id=9)] == create_session().query(User).filter(User.name.endswith('ed')).all()
@@ -877,15 +886,15 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
[(7, 1), (8, 3), (9, 1)]
)
- def test_no_eagerload(self):
- """test that eagerloads are pushed outwards and not rendered in subqueries."""
+ def test_no_joinedload(self):
+ """test that joinedloads are pushed outwards and not rendered in subqueries."""
s = create_session()
oracle_as = not testing.against('oracle') and "AS " or ""
self.assert_compile(
- s.query(User).options(eagerload(User.addresses)).from_self().statement,
+ s.query(User).options(joinedload(User.addresses)).from_self().statement,
"SELECT anon_1.users_id, anon_1.users_name, addresses_1.id, addresses_1.user_id, "\
"addresses_1.email_address FROM (SELECT users.id AS users_id, users.name AS users_name FROM users) %(oracle_as)sanon_1 "\
"LEFT OUTER JOIN addresses %(oracle_as)saddresses_1 ON anon_1.users_id = addresses_1.user_id ORDER BY addresses_1.id" % {
@@ -942,7 +951,7 @@ class FromSelfTest(QueryTest, AssertsCompiledSQL):
)
eq_(
- sess.query(User, Address).filter(User.id==Address.user_id).filter(Address.id.in_([2, 5])).from_self().options(eagerload('addresses')).first(),
+ sess.query(User, Address).filter(User.id==Address.user_id).filter(Address.id.in_([2, 5])).from_self().options(joinedload('addresses')).first(),
# order_by(User.id, Address.id).first(),
(User(id=8, addresses=[Address(), Address(), Address()]), Address(id=2)),
@@ -1059,7 +1068,7 @@ class SetOpsTest(QueryTest, AssertsCompiledSQL):
def go():
eq_(
- fred.union(ed).order_by(User.name).options(eagerload(User.addresses)).all(),
+ fred.union(ed).order_by(User.name).options(joinedload(User.addresses)).all(),
[
User(name='ed', addresses=[Address(), Address(), Address()]),
User(name='fred', addresses=[Address()])
@@ -1156,8 +1165,8 @@ class DistinctTest(QueryTest):
sess.expunge_all()
- # test that it works on embedded eagerload/LIMIT subquery
- q = sess.query(User).join('addresses').distinct().options(eagerload('addresses')).order_by(desc(Address.email_address)).limit(2)
+ # test that it works on embedded joinedload/LIMIT subquery
+ q = sess.query(User).join('addresses').distinct().options(joinedload('addresses')).order_by(desc(Address.email_address)).limit(2)
def go():
assert [
@@ -1190,6 +1199,39 @@ class YieldTest(QueryTest):
except StopIteration:
pass
+class HintsTest(QueryTest, AssertsCompiledSQL):
+ def test_hints(self):
+ from sqlalchemy.dialects import mysql
+ dialect = mysql.dialect()
+
+ sess = create_session()
+
+ self.assert_compile(
+ sess.query(User).with_hint(User, 'USE INDEX (col1_index,col2_index)'),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users USE INDEX (col1_index,col2_index)",
+ dialect=dialect
+ )
+
+ self.assert_compile(
+ sess.query(User).with_hint(User, 'WITH INDEX col1_index', 'sybase'),
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users",
+ dialect=dialect
+ )
+
+ ualias = aliased(User)
+ self.assert_compile(
+ sess.query(User, ualias).with_hint(ualias, 'USE INDEX (col1_index,col2_index)').
+ join((ualias, ualias.id > User.id)),
+ "SELECT users.id AS users_id, users.name AS users_name, "
+ "users_1.id AS users_1_id, users_1.name AS users_1_name "
+ "FROM users INNER JOIN users AS users_1 USE INDEX (col1_index,col2_index) "
+ "ON users.id < users_1.id",
+ dialect=dialect
+ )
+
+
class TextTest(QueryTest):
def test_fulltext(self):
assert [User(id=7), User(id=8), User(id=9),User(id=10)] == create_session().query(User).from_statement("select * from users order by id").all()
@@ -1325,7 +1367,9 @@ class InheritedJoinTest(_base.MappedTest, AssertsCompiledSQL):
mapper(Machine, machines)
mapper(Person, people,
- polymorphic_on=people.c.type, polymorphic_identity='person', order_by=people.c.person_id,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ order_by=people.c.person_id,
properties={
'paperwork':relationship(Paperwork, order_by=paperwork.c.paperwork_id)
})
@@ -1382,11 +1426,14 @@ class InheritedJoinTest(_base.MappedTest, AssertsCompiledSQL):
self.assert_compile(
sess.query(Company).join(Company.employees.of_type(Engineer)),
"SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN (SELECT people.person_id AS people_person_id, people.company_id AS "
- "people_company_id, people.name AS people_name, people.type AS people_type, engineers.person_id AS "
- "engineers_person_id, engineers.status AS engineers_status, engineers.engineer_name AS engineers_engineer_name, "
+ "FROM companies JOIN (SELECT people.person_id AS people_person_id, "
+ "people.company_id AS people_company_id, people.name AS people_name, "
+ "people.type AS people_type, engineers.person_id AS "
+ "engineers_person_id, engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
"engineers.primary_language AS engineers_primary_language "
- "FROM people JOIN engineers ON people.person_id = engineers.person_id) AS anon_1 ON companies.company_id = anon_1.people_company_id"
+ "FROM people JOIN engineers ON people.person_id = engineers.person_id) AS "
+ "anon_1 ON companies.company_id = anon_1.people_company_id"
, use_default_dialect = True
)
@@ -1395,19 +1442,28 @@ class InheritedJoinTest(_base.MappedTest, AssertsCompiledSQL):
sess = create_session()
self.assert_compile(
- sess.query(Person).with_polymorphic(Manager).join('paperwork').filter(Paperwork.description.like('%review%')),
- "SELECT people.person_id AS people_person_id, people.company_id AS people_company_id, "
- "people.name AS people_name, people.type AS people_type, managers.person_id AS managers_person_id, "
- "managers.status AS managers_status, managers.manager_name AS managers_manager_name FROM people "
- "LEFT OUTER JOIN managers ON people.person_id = managers.person_id JOIN paperwork ON people.person_id = "
- "paperwork.person_id WHERE paperwork.description LIKE :description_1 ORDER BY people.person_id"
+ sess.query(Person).with_polymorphic(Manager).
+ join('paperwork').filter(Paperwork.description.like('%review%')),
+ "SELECT people.person_id AS people_person_id, people.company_id AS"
+ " people_company_id, "
+ "people.name AS people_name, people.type AS people_type, managers.person_id "
+ "AS managers_person_id, "
+ "managers.status AS managers_status, managers.manager_name AS "
+ "managers_manager_name FROM people "
+ "LEFT OUTER JOIN managers ON people.person_id = managers.person_id JOIN "
+ "paperwork ON people.person_id = "
+ "paperwork.person_id WHERE paperwork.description LIKE :description_1 "
+ "ORDER BY people.person_id"
, use_default_dialect=True
)
self.assert_compile(
- sess.query(Person).with_polymorphic(Manager).join('paperwork', aliased=True).filter(Paperwork.description.like('%review%')),
+ sess.query(Person).with_polymorphic(Manager).
+ join('paperwork', aliased=True).
+ filter(Paperwork.description.like('%review%')),
"SELECT people.person_id AS people_person_id, people.company_id AS people_company_id, "
- "people.name AS people_name, people.type AS people_type, managers.person_id AS managers_person_id, "
+ "people.name AS people_name, people.type AS people_type, managers.person_id "
+ "AS managers_person_id, "
"managers.status AS managers_status, managers.manager_name AS managers_manager_name "
"FROM people LEFT OUTER JOIN managers ON people.person_id = managers.person_id JOIN "
"paperwork AS paperwork_1 ON people.person_id = paperwork_1.person_id "
@@ -1421,24 +1477,35 @@ class InheritedJoinTest(_base.MappedTest, AssertsCompiledSQL):
self.assert_compile(
sess.query(Company).join(Engineer).filter(Engineer.engineer_name=='vlad'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN (SELECT people.person_id AS people_person_id, people.company_id AS "
- "people_company_id, people.name AS people_name, people.type AS people_type, engineers.person_id AS "
- "engineers_person_id, engineers.status AS engineers_status, engineers.engineer_name AS engineers_engineer_name, "
+ "SELECT companies.company_id AS companies_company_id, companies.name AS "
+ "companies_name "
+ "FROM companies JOIN (SELECT people.person_id AS people_person_id, "
+ "people.company_id AS "
+ "people_company_id, people.name AS people_name, people.type AS people_type,"
+ " engineers.person_id AS "
+ "engineers_person_id, engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
"engineers.primary_language AS engineers_primary_language "
- "FROM people JOIN engineers ON people.person_id = engineers.person_id) AS anon_1 ON "
+ "FROM people JOIN engineers ON people.person_id = engineers.person_id) "
+ "AS anon_1 ON "
"companies.company_id = anon_1.people_company_id "
"WHERE anon_1.engineers_engineer_name = :engineer_name_1"
, use_default_dialect=True
)
self.assert_compile(
- sess.query(Company).join((Engineer, Company.company_id==Engineer.company_id)).filter(Engineer.engineer_name=='vlad'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN (SELECT people.person_id AS people_person_id, people.company_id AS "
- "people_company_id, people.name AS people_name, people.type AS people_type, engineers.person_id AS "
- "engineers_person_id, engineers.status AS engineers_status, engineers.engineer_name AS engineers_engineer_name, "
+ sess.query(Company).join((Engineer, Company.company_id==Engineer.company_id)).
+ filter(Engineer.engineer_name=='vlad'),
+ "SELECT companies.company_id AS companies_company_id, companies.name "
+ "AS companies_name "
+ "FROM companies JOIN (SELECT people.person_id AS people_person_id, "
+ "people.company_id AS "
+ "people_company_id, people.name AS people_name, people.type AS "
+ "people_type, engineers.person_id AS "
+ "engineers_person_id, engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
"engineers.primary_language AS engineers_primary_language "
- "FROM people JOIN engineers ON people.person_id = engineers.person_id) AS anon_1 ON "
+ "FROM people JOIN engineers ON people.person_id = engineers.person_id) AS "
+ "anon_1 ON "
"companies.company_id = anon_1.people_company_id "
"WHERE anon_1.engineers_engineer_name = :engineer_name_1"
, use_default_dialect=True
@@ -1446,39 +1513,151 @@ class InheritedJoinTest(_base.MappedTest, AssertsCompiledSQL):
@testing.resolve_artifact_names
def test_multiple_adaption(self):
- """test that multiple filter() adapters get chained together and work correctly within a multiple-entry join()."""
+ """test that multiple filter() adapters get chained together "
+ and work correctly within a multiple-entry join()."""
sess = create_session()
self.assert_compile(
sess.query(Company).join((people.join(engineers), Company.employees)).
filter(Engineer.name=='dilbert'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN (SELECT people.person_id AS people_person_id, people.company_id AS "
- "people_company_id, people.name AS people_name, people.type AS people_type, engineers.person_id "
- "AS engineers_person_id, engineers.status AS engineers_status, engineers.engineer_name AS engineers_engineer_name, "
- "engineers.primary_language AS engineers_primary_language FROM people JOIN engineers ON people.person_id = "
- "engineers.person_id) AS anon_1 ON companies.company_id = anon_1.people_company_id WHERE anon_1.people_name = :name_1"
+ "SELECT companies.company_id AS companies_company_id, companies.name AS "
+ "companies_name "
+ "FROM companies JOIN (SELECT people.person_id AS people_person_id, "
+ "people.company_id AS "
+ "people_company_id, people.name AS people_name, people.type AS "
+ "people_type, engineers.person_id "
+ "AS engineers_person_id, engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
+ "engineers.primary_language AS engineers_primary_language FROM people "
+ "JOIN engineers ON people.person_id = "
+ "engineers.person_id) AS anon_1 ON companies.company_id = "
+ "anon_1.people_company_id WHERE anon_1.people_name = :name_1"
, use_default_dialect = True
)
mach_alias = machines.select()
self.assert_compile(
- sess.query(Company).join((people.join(engineers), Company.employees), (mach_alias, Engineer.machines)).
+ sess.query(Company).join((people.join(engineers), Company.employees),
+ (mach_alias, Engineer.machines)).
filter(Engineer.name=='dilbert').filter(Machine.name=='foo'),
- "SELECT companies.company_id AS companies_company_id, companies.name AS companies_name "
- "FROM companies JOIN (SELECT people.person_id AS people_person_id, people.company_id AS "
- "people_company_id, people.name AS people_name, people.type AS people_type, engineers.person_id "
- "AS engineers_person_id, engineers.status AS engineers_status, engineers.engineer_name AS engineers_engineer_name, "
- "engineers.primary_language AS engineers_primary_language FROM people JOIN engineers ON people.person_id = "
- "engineers.person_id) AS anon_1 ON companies.company_id = anon_1.people_company_id JOIN "
- "(SELECT machines.machine_id AS machine_id, machines.name AS name, machines.engineer_id AS engineer_id "
+ "SELECT companies.company_id AS companies_company_id, companies.name AS "
+ "companies_name "
+ "FROM companies JOIN (SELECT people.person_id AS people_person_id, "
+ "people.company_id AS "
+ "people_company_id, people.name AS people_name, people.type AS people_type,"
+ " engineers.person_id "
+ "AS engineers_person_id, engineers.status AS engineers_status, "
+ "engineers.engineer_name AS engineers_engineer_name, "
+ "engineers.primary_language AS engineers_primary_language FROM people "
+ "JOIN engineers ON people.person_id = "
+ "engineers.person_id) AS anon_1 ON companies.company_id = "
+ "anon_1.people_company_id JOIN "
+ "(SELECT machines.machine_id AS machine_id, machines.name AS name, "
+ "machines.engineer_id AS engineer_id "
"FROM machines) AS anon_2 ON anon_1.engineers_person_id = anon_2.engineer_id "
"WHERE anon_1.people_name = :name_1 AND anon_2.name = :name_2"
, use_default_dialect = True
)
+
+class AddEntityEquivalenceTest(_base.MappedTest, AssertsCompiledSQL):
+ run_setup_mappers = 'once'
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('a', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(20)),
+ Column('bid', Integer, ForeignKey('b.id'))
+ )
+
+ Table('b', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('name', String(50)),
+ Column('type', String(20))
+ )
+
+ Table('c', metadata,
+ Column('id', Integer, ForeignKey('b.id'), primary_key=True),
+ Column('age', Integer)
+ )
+
+ Table('d', metadata,
+ Column('id', Integer, ForeignKey('a.id'), primary_key=True),
+ Column('dede', Integer)
+ )
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_classes(cls):
+ class A(_fixtures.Base):
+ pass
+
+ class B(_fixtures.Base):
+ pass
+ class C(B):
+ pass
+
+ class D(A):
+ pass
+
+ mapper(A, a,
+ polymorphic_identity='a',
+ polymorphic_on=a.c.type,
+ with_polymorphic= ('*', None),
+ properties={
+ 'link':relation( B, uselist=False, backref='back')
+ })
+ mapper(B, b,
+ polymorphic_identity='b',
+ polymorphic_on=b.c.type,
+ with_polymorphic= ('*', None)
+ )
+ mapper(C, c, inherits=B, polymorphic_identity='c')
+ mapper(D, d, inherits=A, polymorphic_identity='d')
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def insert_data(cls):
+ sess = create_session()
+ sess.add_all([
+ B(name='b1'),
+ A(name='a1', link= C(name='c1',age=3)),
+ C(name='c2',age=6),
+ A(name='a2')
+ ])
+ sess.flush()
+
+ @testing.resolve_artifact_names
+ def test_add_entity_equivalence(self):
+ sess = create_session()
+ for q in [
+ sess.query( A,B).join( A.link),
+ sess.query( A).join( A.link).add_entity(B),
+ ]:
+ eq_(
+ q.all(),
+ [(
+ A(bid=2, id=1, name=u'a1', type=u'a'),
+ C(age=3, id=2, name=u'c1', type=u'c')
+ )]
+ )
+
+ for q in [
+ sess.query( B,A).join( B.back),
+ sess.query( B).join( B.back).add_entity(A),
+ sess.query( B).add_entity(A).join( B.back)
+ ]:
+ eq_(
+ q.all(),
+ [(
+ C(age=3, id=2, name=u'c1', type=u'c'),
+ A(bid=2, id=1, name=u'a1', type=u'a')
+ )]
+ )
class JoinTest(QueryTest, AssertsCompiledSQL):
@@ -2138,6 +2317,25 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
[(u'jack',), (u'ed',), (u'ed',), (u'ed',), (u'fred',)]
)
+ def test_from_self_resets_joinpaths(self):
+ """test a join from from_self() doesn't confuse joins inside the subquery
+ with the outside.
+ """
+ sess = create_session()
+
+ self.assert_compile(
+ sess.query(Item).join(Item.keywords).from_self(Keyword).join(Item.keywords),
+ "SELECT keywords.id AS keywords_id, keywords.name AS keywords_name FROM "
+ "(SELECT items.id AS items_id, items.description AS items_description "
+ "FROM items JOIN item_keywords AS item_keywords_1 ON items.id = "
+ "item_keywords_1.item_id JOIN keywords ON keywords.id = item_keywords_1.keyword_id) "
+ "AS anon_1 JOIN item_keywords AS item_keywords_2 ON "
+ "anon_1.items_id = item_keywords_2.item_id "
+ "JOIN keywords ON "
+ "keywords.id = item_keywords_2.keyword_id",
+ use_default_dialect=True
+ )
+
class MultiplePathTest(_base.MappedTest, AssertsCompiledSQL):
@classmethod
@@ -2372,10 +2570,10 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
q = sess.query(User)
def go():
# outerjoin to User.orders, offset 1/limit 2 so we get user 7 + second two orders.
- # then eagerload the addresses. User + Order columns go into the subquery, address
- # left outer joins to the subquery, eagerloader for User.orders applies context.adapter
+ # then joinedload the addresses. User + Order columns go into the subquery, address
+ # left outer joins to the subquery, joinedloader for User.orders applies context.adapter
# to result rows. This was [ticket:1180].
- l = q.outerjoin(User.orders).options(eagerload(User.addresses), contains_eager(User.orders)).order_by(User.id, Order.id).offset(1).limit(2).all()
+ l = q.outerjoin(User.orders).options(joinedload(User.addresses), contains_eager(User.orders)).order_by(User.id, Order.id).offset(1).limit(2).all()
eq_(l, [User(id=7,
addresses=[Address(email_address=u'jack@bean.com',user_id=7,id=1)],
name=u'jack',
@@ -2390,7 +2588,7 @@ class InstancesTest(QueryTest, AssertsCompiledSQL):
# same as above, except Order is aliased, so two adapters are applied by the
# eager loader
oalias = aliased(Order)
- l = q.outerjoin((User.orders, oalias)).options(eagerload(User.addresses), contains_eager(User.orders, alias=oalias)).order_by(User.id, oalias.id).offset(1).limit(2).all()
+ l = q.outerjoin((User.orders, oalias)).options(joinedload(User.addresses), contains_eager(User.orders, alias=oalias)).order_by(User.id, oalias.id).offset(1).limit(2).all()
eq_(l, [User(id=7,
addresses=[Address(email_address=u'jack@bean.com',user_id=7,id=1)],
name=u'jack',
@@ -2606,8 +2804,8 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
# test eager aliasing, with/without select_from aliasing
for q in [
- sess.query(User, adalias.email_address).outerjoin((User.addresses, adalias)).options(eagerload(User.addresses)).order_by(User.id, adalias.id).limit(10),
- sess.query(User, adalias.email_address, adalias.id).outerjoin((User.addresses, adalias)).from_self(User, adalias.email_address).options(eagerload(User.addresses)).order_by(User.id, adalias.id).limit(10),
+ sess.query(User, adalias.email_address).outerjoin((User.addresses, adalias)).options(joinedload(User.addresses)).order_by(User.id, adalias.id).limit(10),
+ sess.query(User, adalias.email_address, adalias.id).outerjoin((User.addresses, adalias)).from_self(User, adalias.email_address).options(joinedload(User.addresses)).order_by(User.id, adalias.id).limit(10),
]:
eq_(
@@ -2630,11 +2828,11 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
(User(addresses=[],name=u'chuck',id=10), None)]
)
- def test_column_from_limited_eagerload(self):
+ def test_column_from_limited_joinedload(self):
sess = create_session()
def go():
- results = sess.query(User).limit(1).options(eagerload('addresses')).add_column(User.name).all()
+ results = sess.query(User).limit(1).options(joinedload('addresses')).add_column(User.name).all()
eq_(results, [(User(name='jack'), 'jack')])
self.assert_sql_count(testing.db, go, 1)
@@ -2652,10 +2850,10 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
sess.query(oalias, Order).from_self().filter(oalias.user_id==Order.user_id).filter(oalias.user_id==7).filter(Order.id<oalias.id).order_by(oalias.id, Order.id),
# here we go....two layers of aliasing
- sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().order_by(Order.id, oalias.id).limit(10).options(eagerload(Order.items)),
+ sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().order_by(Order.id, oalias.id).limit(10).options(joinedload(Order.items)),
# gratuitous four layers
- sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().from_self().from_self().order_by(Order.id, oalias.id).limit(10).options(eagerload(Order.items)),
+ sess.query(Order, oalias).filter(Order.user_id==oalias.user_id).filter(Order.user_id==7).filter(Order.id>oalias.id).from_self().from_self().from_self().order_by(Order.id, oalias.id).limit(10).options(joinedload(Order.items)),
]:
@@ -2716,7 +2914,7 @@ class MixedEntitiesTest(QueryTest, AssertsCompiledSQL):
eq_(q.all(), [(user8, address3)])
sess.expunge_all()
- q = sess.query(User, address_entity).join(('addresses', address_entity)).options(eagerload('addresses')).filter_by(email_address='ed@bettyboop.com')
+ q = sess.query(User, address_entity).join(('addresses', address_entity)).options(joinedload('addresses')).filter_by(email_address='ed@bettyboop.com')
eq_(list(util.OrderedSet(q.all())), [(user8, address3)])
sess.expunge_all()
@@ -2940,7 +3138,7 @@ class ImmediateTest(_fixtures.FixtureTest):
eq_(sess.query().value(sa.literal_column('1').label('x')), 1)
-class SelectFromTest(QueryTest):
+class SelectFromTest(QueryTest, AssertsCompiledSQL):
run_setup_mappers = None
def test_replace_with_select(self):
@@ -2964,7 +3162,7 @@ class SelectFromTest(QueryTest):
User(name='ed',id=8), User(name='jack',id=7)
])
- eq_(sess.query(User).select_from(sel).options(eagerload('addresses')).first(),
+ eq_(sess.query(User).select_from(sel).options(joinedload('addresses')).first(),
User(name='jack', addresses=[Address(id=1)])
)
@@ -2982,6 +3180,64 @@ class SelectFromTest(QueryTest):
]
)
+ def test_differentiate_self_external(self):
+ """test some different combinations of joining a table to a subquery of itself."""
+
+ mapper(User, users)
+
+ sess = create_session()
+
+ sel = sess.query(User).filter(User.id.in_([7, 8])).subquery()
+ ualias = aliased(User)
+
+ self.assert_compile(
+ sess.query(User).join((sel, User.id>sel.c.id)),
+ "SELECT users.id AS users_id, users.name AS users_name FROM "
+ "users JOIN (SELECT users.id AS id, users.name AS name FROM "
+ "users WHERE users.id IN (:id_1, :id_2)) AS anon_1 ON users.id > anon_1.id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(ualias).select_from(sel).filter(ualias.id>sel.c.id),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
+ "users AS users_1, (SELECT users.id AS id, users.name AS name FROM "
+ "users WHERE users.id IN (:id_1, :id_2)) AS anon_1 WHERE users_1.id > anon_1.id",
+ use_default_dialect=True
+ )
+
+ # these two are essentially saying, "join ualias to ualias", so an
+ # error is raised. join() deals with entities, not what's in
+ # select_from().
+ assert_raises(sa_exc.InvalidRequestError,
+ sess.query(ualias).select_from(sel).join, (ualias, ualias.id>sel.c.id)
+ )
+
+ assert_raises(sa_exc.InvalidRequestError,
+ sess.query(ualias).select_from(sel).join, (ualias, ualias.id>User.id)
+ )
+
+ salias = aliased(User, sel)
+ self.assert_compile(
+ sess.query(salias).join((ualias, ualias.id>salias.id)),
+ "SELECT anon_1.id AS anon_1_id, anon_1.name AS anon_1_name FROM "
+ "(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
+ "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
+ use_default_dialect=True
+ )
+
+
+ # this one uses an explicit join(left, right, onclause) so works
+ self.assert_compile(
+ sess.query(ualias).select_from(join(sel, ualias, ualias.id>sel.c.id)),
+ "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name FROM "
+ "(SELECT users.id AS id, users.name AS name FROM users WHERE users.id "
+ "IN (:id_1, :id_2)) AS anon_1 JOIN users AS users_1 ON users_1.id > anon_1.id",
+ use_default_dialect=True
+ )
+
+
+
def test_join_no_order_by(self):
mapper(User, users)
@@ -3003,7 +3259,8 @@ class SelectFromTest(QueryTest):
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
- eq_(sess.query(User).select_from(sel).join('addresses').add_entity(Address).order_by(User.id).order_by(Address.id).all(),
+ eq_(sess.query(User).select_from(sel).join('addresses').
+ add_entity(Address).order_by(User.id).order_by(Address.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@wood.com',id=2)),
@@ -3013,7 +3270,8 @@ class SelectFromTest(QueryTest):
)
adalias = aliased(Address)
- eq_(sess.query(User).select_from(sel).join(('addresses', adalias)).add_entity(adalias).order_by(User.id).order_by(adalias.id).all(),
+ eq_(sess.query(User).select_from(sel).join(('addresses', adalias)).
+ add_entity(adalias).order_by(User.id).order_by(adalias.id).all(),
[
(User(name='jack',id=7), Address(user_id=7,email_address='jack@bean.com',id=1)),
(User(name='ed',id=8), Address(user_id=8,email_address='ed@wood.com',id=2)),
@@ -3038,9 +3296,6 @@ class SelectFromTest(QueryTest):
sel = users.select(users.c.id.in_([7, 8]))
sess = create_session()
- # TODO: remove
- sess.query(User).select_from(sel).options(eagerload_all('orders.items.keywords')).join('orders', 'items', 'keywords', aliased=True).filter(Keyword.name.in_(['red', 'big', 'round'])).all()
-
eq_(sess.query(User).select_from(sel).join('orders', 'items', 'keywords').filter(Keyword.name.in_(['red', 'big', 'round'])).all(), [
User(name=u'jack',id=7)
])
@@ -3050,7 +3305,12 @@ class SelectFromTest(QueryTest):
])
def go():
- eq_(sess.query(User).select_from(sel).options(eagerload_all('orders.items.keywords')).join('orders', 'items', 'keywords', aliased=True).filter(Keyword.name.in_(['red', 'big', 'round'])).all(), [
+ eq_(
+ sess.query(User).select_from(sel).
+ options(joinedload_all('orders.items.keywords')).
+ join('orders', 'items', 'keywords', aliased=True).
+ filter(Keyword.name.in_(['red', 'big', 'round'])).all(),
+ [
User(name=u'jack',orders=[
Order(description=u'order 1',items=[
Item(description=u'item 1',keywords=[Keyword(name=u'red'), Keyword(name=u'big'), Keyword(name=u'round')]),
@@ -3088,7 +3348,7 @@ class SelectFromTest(QueryTest):
sess = create_session()
def go():
- eq_(sess.query(User).options(eagerload('addresses')).select_from(sel).order_by(User.id).all(),
+ eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).order_by(User.id).all(),
[
User(id=7, addresses=[Address(id=1)]),
User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])
@@ -3098,14 +3358,14 @@ class SelectFromTest(QueryTest):
sess.expunge_all()
def go():
- eq_(sess.query(User).options(eagerload('addresses')).select_from(sel).filter(User.id==8).order_by(User.id).all(),
+ eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).filter(User.id==8).order_by(User.id).all(),
[User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)])]
)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
- eq_(sess.query(User).options(eagerload('addresses')).select_from(sel).order_by(User.id)[1], User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
+ eq_(sess.query(User).options(joinedload('addresses')).select_from(sel).order_by(User.id)[1], User(id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)]))
self.assert_sql_count(testing.db, go, 1)
class CustomJoinTest(QueryTest):
@@ -3115,19 +3375,23 @@ class CustomJoinTest(QueryTest):
"""test aliasing of joins with a custom join condition"""
mapper(Address, addresses)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items, lazy=True, order_by=items.c.id),
+ 'items':relationship(Item, secondary=order_items, lazy='select', order_by=items.c.id),
})
mapper(Item, items)
mapper(User, users, properties = dict(
- addresses = relationship(Address, lazy=True),
- open_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 1, users.c.id==orders.c.user_id), lazy=True),
- closed_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 0, users.c.id==orders.c.user_id), lazy=True)
+ addresses = relationship(Address, lazy='select'),
+ open_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 1, users.c.id==orders.c.user_id), lazy='select'),
+ closed_orders = relationship(Order, primaryjoin = and_(orders.c.isopen == 0, users.c.id==orders.c.user_id), lazy='select')
))
q = create_session().query(User)
+
+ eq_(
+ q.join('open_orders', 'items', aliased=True).filter(Item.id==4).\
+ join('closed_orders', 'items', aliased=True).filter(Item.id==3).all(),
+ [User(id=7)]
+ )
- assert [User(id=7)] == q.join('open_orders', 'items', aliased=True).filter(Item.id==4).join('closed_orders', 'items', aliased=True).filter(Item.id==3).all()
-
-class SelfReferentialTest(_base.MappedTest):
+class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@@ -3149,7 +3413,7 @@ class SelfReferentialTest(_base.MappedTest):
self.children.append(node)
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=True, join_depth=3,
+ 'children':relationship(Node, lazy='select', join_depth=3,
backref=backref('parent', remote_side=[nodes.c.id])
)
})
@@ -3181,7 +3445,108 @@ class SelfReferentialTest(_base.MappedTest):
node = sess.query(Node).filter_by(data='n122').join('parent', aliased=True).filter_by(data='n12').\
join('parent', aliased=True, from_joinpoint=True).filter_by(data='n1').first()
assert node.data == 'n122'
+
+ def test_string_or_prop_aliased(self):
+ """test that join('foo') behaves the same as join(Cls.foo) in a self
+ referential scenario.
+
+ """
+
+ sess = create_session()
+ nalias = aliased(Node, sess.query(Node).filter_by(data='n1').subquery())
+
+ q1 = sess.query(nalias).join(nalias.children, aliased=True).\
+ join(Node.children, from_joinpoint=True)
+ q2 = sess.query(nalias).join(nalias.children, aliased=True).\
+ join("children", from_joinpoint=True)
+
+ for q in (q1, q2):
+ self.assert_compile(
+ q,
+ "SELECT anon_1.id AS anon_1_id, anon_1.parent_id AS "
+ "anon_1_parent_id, anon_1.data AS anon_1_data FROM "
+ "(SELECT nodes.id AS id, nodes.parent_id AS parent_id, "
+ "nodes.data AS data FROM nodes WHERE nodes.data = :data_1) "
+ "AS anon_1 JOIN nodes AS nodes_1 ON anon_1.id = "
+ "nodes_1.parent_id JOIN nodes ON nodes_1.id = nodes.parent_id",
+ use_default_dialect=True
+ )
+
+ q1 = sess.query(Node).join(nalias.children, aliased=True).\
+ join(Node.children, aliased=True, from_joinpoint=True).\
+ join(Node.children, from_joinpoint=True)
+
+ q2 = sess.query(Node).join(nalias.children, aliased=True).\
+ join("children", aliased=True, from_joinpoint=True).\
+ join("children", from_joinpoint=True)
+
+ for q in (q1, q2):
+ self.assert_compile(
+ q,
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS "
+ "nodes_parent_id, nodes.data AS nodes_data FROM (SELECT "
+ "nodes.id AS id, nodes.parent_id AS parent_id, nodes.data "
+ "AS data FROM nodes WHERE nodes.data = :data_1) AS anon_1 "
+ "JOIN nodes AS nodes_1 ON anon_1.id = nodes_1.parent_id "
+ "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id "
+ "JOIN nodes ON nodes_2.id = nodes.parent_id",
+ use_default_dialect=True
+ )
+
+ def test_from_self_inside_excludes_outside(self):
+ """test the propagation of aliased() from inside to outside
+ on a from_self()..
+ """
+ sess = create_session()
+
+ n1 = aliased(Node)
+
+ # n1 is not inside the from_self(), so all cols must be maintained
+ # on the outside
+ self.assert_compile(
+ sess.query(Node).filter(Node.data=='n122').from_self(n1, Node.id),
+ "SELECT nodes_1.id AS nodes_1_id, nodes_1.parent_id AS nodes_1_parent_id, "
+ "nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
+ "FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
+ "nodes.parent_id AS nodes_parent_id, nodes.data AS nodes_data FROM "
+ "nodes WHERE nodes.data = :data_1) AS anon_1",
+ use_default_dialect=True
+ )
+
+ parent = aliased(Node)
+ grandparent = aliased(Node)
+ q = sess.query(Node, parent, grandparent).\
+ join((Node.parent, parent), (parent.parent, grandparent)).\
+ filter(Node.data=='n122').filter(parent.data=='n12').\
+ filter(grandparent.data=='n1').from_self().limit(1)
+
+ # parent, grandparent *are* inside the from_self(), so they
+ # should get aliased to the outside.
+ self.assert_compile(
+ q,
+ "SELECT anon_1.nodes_id AS anon_1_nodes_id, "
+ "anon_1.nodes_parent_id AS anon_1_nodes_parent_id, "
+ "anon_1.nodes_data AS anon_1_nodes_data, "
+ "anon_1.nodes_1_id AS anon_1_nodes_1_id, "
+ "anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, "
+ "anon_1.nodes_1_data AS anon_1_nodes_1_data, "
+ "anon_1.nodes_2_id AS anon_1_nodes_2_id, "
+ "anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, "
+ "anon_1.nodes_2_data AS anon_1_nodes_2_data "
+ "FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
+ "AS nodes_parent_id, nodes.data AS nodes_data, "
+ "nodes_1.id AS nodes_1_id, nodes_1.parent_id AS nodes_1_parent_id, "
+ "nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
+ "nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
+ "nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
+ "nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
+ "ON nodes_2.id = nodes_1.parent_id "
+ "WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
+ "nodes_2.data = :data_3) AS anon_1 LIMIT 1",
+ use_default_dialect=True
+ )
+
def test_explicit_join(self):
sess = create_session()
@@ -3226,6 +3591,7 @@ class SelfReferentialTest(_base.MappedTest):
[Node(parent_id=1,data=u'n11',id=2), Node(parent_id=1,data=u'n12',id=3), Node(parent_id=1,data=u'n13',id=4)]
)
+
def test_multiple_explicit_entities(self):
sess = create_session()
@@ -3261,7 +3627,7 @@ class SelfReferentialTest(_base.MappedTest):
join((Node.parent, parent), (parent.parent, grandparent)).\
filter(Node.data=='n122').filter(parent.data=='n12').\
filter(grandparent.data=='n1').\
- options(eagerload(Node.children)).first(),
+ options(joinedload(Node.children)).first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
@@ -3270,7 +3636,7 @@ class SelfReferentialTest(_base.MappedTest):
join((Node.parent, parent), (parent.parent, grandparent)).\
filter(Node.data=='n122').filter(parent.data=='n12').\
filter(grandparent.data=='n1').from_self().\
- options(eagerload(Node.children)).first(),
+ options(joinedload(Node.children)).first(),
(Node(data='n122'), Node(data='n12'), Node(data='n1'))
)
@@ -3332,7 +3698,7 @@ class SelfReferentialM2MTest(_base.MappedTest):
pass
mapper(Node, nodes, properties={
- 'children':relationship(Node, lazy=True, secondary=node_to_nodes,
+ 'children':relationship(Node, lazy='select', secondary=node_to_nodes,
primaryjoin=nodes.c.id==node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id==node_to_nodes.c.right_node_id,
)
@@ -3403,7 +3769,7 @@ class ExternalColumnsTest(QueryTest):
sess = create_session()
- sess.query(Address).options(eagerload('user')).all()
+ sess.query(Address).options(joinedload('user')).all()
eq_(sess.query(User).all(),
[
@@ -3427,7 +3793,7 @@ class ExternalColumnsTest(QueryTest):
for x in range(2):
sess.expunge_all()
def go():
- eq_(sess.query(Address).options(eagerload('user')).order_by(Address.id).all(), address_result)
+ eq_(sess.query(Address).options(joinedload('user')).order_by(Address.id).all(), address_result)
self.assert_sql_count(testing.db, go, 1)
ualias = aliased(User)
@@ -3460,7 +3826,7 @@ class ExternalColumnsTest(QueryTest):
ua = aliased(User)
eq_(sess.query(Address, ua.concat, ua.count).
select_from(join(Address, ua, 'user')).
- options(eagerload(Address.user)).order_by(Address.id).all(),
+ options(joinedload(Address.user)).order_by(Address.id).all(),
[
(Address(id=1, user=User(id=7, concat=14, count=1)), 14, 1),
(Address(id=2, user=User(id=8, concat=16, count=3)), 16, 3),
@@ -3478,9 +3844,9 @@ class ExternalColumnsTest(QueryTest):
[(1, 7, 14, 1), (2, 8, 16, 3), (3, 8, 16, 3), (4, 8, 16, 3), (5, 9, 18, 1)]
)
- def test_external_columns_eagerload(self):
+ def test_external_columns_joinedload(self):
# in this test, we have a subquery on User that accesses "addresses", underneath
- # an eagerload for "addresses". So the "addresses" alias adapter needs to *not* hit
+ # an joinedload for "addresses". So the "addresses" alias adapter needs to *not* hit
# the "addresses" table within the "user" subquery, but "user" still needs to be adapted.
# therefore the long standing practice of eager adapters being "chained" has been removed
# since its unnecessary and breaks this exact condition.
@@ -3496,13 +3862,13 @@ class ExternalColumnsTest(QueryTest):
sess = create_session()
def go():
- o1 = sess.query(Order).options(eagerload_all('address.user')).get(1)
+ o1 = sess.query(Order).options(joinedload_all('address.user')).get(1)
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
sess = create_session()
def go():
- o1 = sess.query(Order).options(eagerload_all('address.user')).first()
+ o1 = sess.query(Order).options(joinedload_all('address.user')).first()
eq_(o1.address.user.count, 1)
self.assert_sql_count(testing.db, go, 1)
@@ -3609,7 +3975,7 @@ class UpdateDeleteTest(_base.MappedTest):
def setup_mappers(cls):
mapper(User, users)
mapper(Document, documents, properties={
- 'user': relationship(User, lazy=False, backref=backref('documents', lazy=True))
+ 'user': relationship(User, lazy='joined', backref=backref('documents', lazy='select'))
})
@testing.resolve_artifact_names
@@ -3832,11 +4198,11 @@ class UpdateDeleteTest(_base.MappedTest):
eq_(sess.query(Document.title).order_by(Document.id).all(), zip(['foofoo','barbar', 'baz']))
@testing.resolve_artifact_names
- def test_update_with_explicit_eagerload(self):
+ def test_update_with_explicit_joinedload(self):
sess = create_session(bind=testing.db, autocommit=False)
john,jack,jill,jane = sess.query(User).order_by(User.id).all()
- sess.query(User).options(eagerload(User.documents)).filter(User.age > 29).update({'age': User.age - 10}, synchronize_session='fetch')
+ sess.query(User).options(joinedload(User.documents)).filter(User.age > 29).update({'age': User.age - 10}, synchronize_session='fetch')
eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py
index 116bc1077..7f67631a9 100644
--- a/test/orm/test_relationships.py
+++ b/test/orm/test_relationships.py
@@ -342,7 +342,7 @@ class RelationshipTest3(_base.MappedTest):
primaryjoin=sa.and_(pages.c.jobno==pageversions.c.jobno,
pages.c.pagename==pageversions.c.pagename),
order_by=pageversions.c.version,
- backref=backref('page',lazy=False)
+ backref=backref('page',lazy='joined')
)})
mapper(PageComment, pagecomments, properties={
'page': relationship(
@@ -666,7 +666,7 @@ class RelationshipTest5(_base.MappedTest):
order_by=sa.asc(container_select.c.type),
properties=dict(
lineItems=relationship(LineItem,
- lazy=True,
+ lazy='select',
cascade='all, delete-orphan',
order_by=sa.asc(items.c.id),
primaryjoin=sa.and_(
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index 831707278..fca3bf757 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -8,7 +8,7 @@ import sqlalchemy as sa
from sqlalchemy.test import engines, testing, config
from sqlalchemy import Integer, String, Sequence
from sqlalchemy.test.schema import Table, Column
-from sqlalchemy.orm import mapper, relationship, backref, eagerload
+from sqlalchemy.orm import mapper, relationship, backref, joinedload
from sqlalchemy.test.testing import eq_
from test.engine import _base as engine_base
from test.orm import _base, _fixtures
@@ -889,7 +889,7 @@ class SessionTest(_fixtures.FixtureTest):
s.add(User(name="ed", addresses=[Address(email_address="ed1")]))
s.commit()
- user = s.query(User).options(eagerload(User.addresses)).one()
+ user = s.query(User).options(joinedload(User.addresses)).one()
user.addresses[0].user # lazyload
eq_(user, User(name="ed", addresses=[Address(email_address="ed1")]))
@@ -897,7 +897,7 @@ class SessionTest(_fixtures.FixtureTest):
gc_collect()
assert len(s.identity_map) == 0
- user = s.query(User).options(eagerload(User.addresses)).one()
+ user = s.query(User).options(joinedload(User.addresses)).one()
user.addresses[0].email_address='ed2'
user.addresses[0].user # lazyload
del user
@@ -905,7 +905,7 @@ class SessionTest(_fixtures.FixtureTest):
assert len(s.identity_map) == 2
s.commit()
- user = s.query(User).options(eagerload(User.addresses)).one()
+ user = s.query(User).options(joinedload(User.addresses)).one()
eq_(user, User(name="ed", addresses=[Address(email_address="ed2")]))
@testing.resolve_artifact_names
@@ -918,7 +918,7 @@ class SessionTest(_fixtures.FixtureTest):
s.add(User(name="ed", address=Address(email_address="ed1")))
s.commit()
- user = s.query(User).options(eagerload(User.address)).one()
+ user = s.query(User).options(joinedload(User.address)).one()
user.address.user
eq_(user, User(name="ed", address=Address(email_address="ed1")))
@@ -926,7 +926,7 @@ class SessionTest(_fixtures.FixtureTest):
gc_collect()
assert len(s.identity_map) == 0
- user = s.query(User).options(eagerload(User.address)).one()
+ user = s.query(User).options(joinedload(User.address)).one()
user.address.email_address='ed2'
user.address.user # lazyload
@@ -935,7 +935,7 @@ class SessionTest(_fixtures.FixtureTest):
assert len(s.identity_map) == 2
s.commit()
- user = s.query(User).options(eagerload(User.address)).one()
+ user = s.query(User).options(joinedload(User.address)).one()
eq_(user, User(name="ed", address=Address(email_address="ed2")))
@testing.resolve_artifact_names
diff --git a/test/orm/test_subquery_relations.py b/test/orm/test_subquery_relations.py
new file mode 100644
index 000000000..5b9a46d07
--- /dev/null
+++ b/test/orm/test_subquery_relations.py
@@ -0,0 +1,784 @@
+from sqlalchemy.test.testing import eq_, is_, is_not_
+from sqlalchemy.test import testing
+from sqlalchemy.test.schema import Table, Column
+from sqlalchemy import Integer, String, ForeignKey
+from sqlalchemy.orm import backref, subqueryload, subqueryload_all, \
+ mapper, relationship, clear_mappers,\
+ create_session, lazyload, aliased, joinedload,\
+ deferred, undefer
+from sqlalchemy.test.testing import eq_, assert_raises
+from sqlalchemy.test.assertsql import CompiledSQL
+from test.orm import _base, _fixtures
+import sqlalchemy as sa
+
+class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
+ run_inserts = 'once'
+ run_deletes = None
+
+ @testing.resolve_artifact_names
+ def test_basic(self):
+ mapper(User, users, properties={
+ 'addresses':relationship(
+ mapper(Address, addresses),
+ order_by=Address.id)
+ })
+ sess = create_session()
+
+ q = sess.query(User).options(subqueryload(User.addresses))
+
+ def go():
+ eq_(
+ [User(id=7, addresses=[
+ Address(id=1, email_address='jack@bean.com')])],
+ q.filter(User.id==7).all()
+ )
+
+ self.assert_sql_count(testing.db, go, 2)
+
+ def go():
+ eq_(
+ self.static.user_address_result,
+ q.order_by(User.id).all()
+ )
+ self.assert_sql_count(testing.db, go, 2)
+
+ @testing.resolve_artifact_names
+ def test_many_to_many(self):
+ mapper(Keyword, keywords)
+ mapper(Item, items, properties = dict(
+ keywords = relationship(Keyword, secondary=item_keywords,
+ lazy='subquery', order_by=keywords.c.id)))
+
+ q = create_session().query(Item).order_by(Item.id)
+ def go():
+ eq_(self.static.item_keyword_result, q.all())
+ self.assert_sql_count(testing.db, go, 2)
+
+ def go():
+ eq_(self.static.item_keyword_result[0:2],
+ q.join('keywords').filter(Keyword.name == 'red').all())
+ self.assert_sql_count(testing.db, go, 2)
+
+ def go():
+ eq_(self.static.item_keyword_result[0:2],
+ (q.join('keywords', aliased=True).
+ filter(Keyword.name == 'red')).all())
+ self.assert_sql_count(testing.db, go, 2)
+
+ @testing.resolve_artifact_names
+ def test_orderby(self):
+ mapper(User, users, properties = {
+ 'addresses':relationship(mapper(Address, addresses),
+ lazy='subquery', order_by=addresses.c.email_address),
+ })
+ q = create_session().query(User)
+ eq_([
+ User(id=7, addresses=[
+ Address(id=1)
+ ]),
+ User(id=8, addresses=[
+ Address(id=3, email_address='ed@bettyboop.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ Address(id=2, email_address='ed@wood.com')
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=10, addresses=[])
+ ], q.order_by(User.id).all())
+
+ @testing.resolve_artifact_names
+ def test_orderby_multi(self):
+ mapper(User, users, properties = {
+ 'addresses':relationship(mapper(Address, addresses),
+ lazy='subquery',
+ order_by=[
+ addresses.c.email_address,
+ addresses.c.id]),
+ })
+ q = create_session().query(User)
+ eq_([
+ User(id=7, addresses=[
+ Address(id=1)
+ ]),
+ User(id=8, addresses=[
+ Address(id=3, email_address='ed@bettyboop.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ Address(id=2, email_address='ed@wood.com')
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=10, addresses=[])
+ ], q.order_by(User.id).all())
+
+ @testing.resolve_artifact_names
+ def test_orderby_related(self):
+ """A regular mapper select on a single table can
+ order by a relationship to a second table"""
+
+ mapper(Address, addresses)
+ mapper(User, users, properties = dict(
+ addresses = relationship(Address,
+ lazy='subquery',
+ order_by=addresses.c.id),
+ ))
+
+ q = create_session().query(User)
+ l = q.filter(User.id==Address.user_id).\
+ order_by(Address.email_address).all()
+
+ eq_([
+ User(id=8, addresses=[
+ Address(id=2, email_address='ed@wood.com'),
+ Address(id=3, email_address='ed@bettyboop.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=7, addresses=[
+ Address(id=1)
+ ]),
+ ], l)
+
+ @testing.resolve_artifact_names
+ def test_orderby_desc(self):
+ mapper(Address, addresses)
+ mapper(User, users, properties = dict(
+ addresses = relationship(Address, lazy='subquery',
+ order_by=[
+ sa.desc(addresses.c.email_address)
+ ]),
+ ))
+ sess = create_session()
+ eq_([
+ User(id=7, addresses=[
+ Address(id=1)
+ ]),
+ User(id=8, addresses=[
+ Address(id=2, email_address='ed@wood.com'),
+ Address(id=4, email_address='ed@lala.com'),
+ Address(id=3, email_address='ed@bettyboop.com'),
+ ]),
+ User(id=9, addresses=[
+ Address(id=5)
+ ]),
+ User(id=10, addresses=[])
+ ], sess.query(User).order_by(User.id).all())
+
+ _pathing_runs = [
+ ( "lazyload", "lazyload", "lazyload", 15 ),
+ ("subqueryload", "lazyload", "lazyload", 12),
+ ("subqueryload", "subqueryload", "lazyload", 8),
+ ("joinedload", "subqueryload", "lazyload", 7),
+ ("lazyload", "lazyload", "subqueryload", 12),
+ ("subqueryload", "subqueryload", "subqueryload", 4),
+ ("subqueryload", "subqueryload", "joinedload", 3),
+ ]
+# _pathing_runs = [("subqueryload", "subqueryload", "joinedload", 3)]
+# _pathing_runs = [("subqueryload", "subqueryload", "subqueryload", 4)]
+
+ def test_options_pathing(self):
+ self._do_options_test(self._pathing_runs)
+
+ def test_mapper_pathing(self):
+ self._do_mapper_test(self._pathing_runs)
+
+ @testing.resolve_artifact_names
+ def _do_options_test(self, configs):
+ mapper(User, users, properties={
+ 'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o
+ })
+ mapper(Order, orders, properties={
+ 'items':relationship(Item,
+ secondary=order_items, order_by=items.c.id), #m2m
+ })
+ mapper(Item, items, properties={
+ 'keywords':relationship(Keyword,
+ secondary=item_keywords,
+ order_by=keywords.c.id) #m2m
+ })
+ mapper(Keyword, keywords)
+
+ callables = {
+ 'joinedload':joinedload,
+ 'subqueryload':subqueryload
+ }
+
+ for o, i, k, count in configs:
+ options = []
+ if o in callables:
+ options.append(callables[o](User.orders))
+ if i in callables:
+ options.append(callables[i](User.orders, Order.items))
+ if k in callables:
+ options.append(callables[k](User.orders, Order.items, Item.keywords))
+
+ self._do_query_tests(options, count)
+
+ @testing.resolve_artifact_names
+ def _do_mapper_test(self, configs):
+ opts = {
+ 'lazyload':'select',
+ 'joinedload':'joined',
+ 'subqueryload':'subquery',
+ }
+
+ for o, i, k, count in configs:
+ mapper(User, users, properties={
+ 'orders':relationship(Order, lazy=opts[o], order_by=orders.c.id),
+ })
+ mapper(Order, orders, properties={
+ 'items':relationship(Item,
+ secondary=order_items, lazy=opts[i], order_by=items.c.id),
+ })
+ mapper(Item, items, properties={
+ 'keywords':relationship(Keyword,
+ lazy=opts[k],
+ secondary=item_keywords,
+ order_by=keywords.c.id)
+ })
+ mapper(Keyword, keywords)
+
+ try:
+ self._do_query_tests([], count)
+ finally:
+ clear_mappers()
+
+ @testing.resolve_artifact_names
+ def _do_query_tests(self, opts, count):
+ sess = create_session()
+ def go():
+ eq_(
+ sess.query(User).options(*opts).order_by(User.id).all(),
+ self.static.user_item_keyword_result
+ )
+ self.assert_sql_count(testing.db, go, count)
+
+ eq_(
+ sess.query(User).options(*opts).filter(User.name=='fred').
+ order_by(User.id).all(),
+ self.static.user_item_keyword_result[2:3]
+ )
+
+ sess = create_session()
+ eq_(
+ sess.query(User).options(*opts).join(User.orders).
+ filter(Order.id==3).\
+ order_by(User.id).all(),
+ self.static.user_item_keyword_result[0:1]
+ )
+
+
+ @testing.resolve_artifact_names
+ def test_cyclical(self):
+ """A circular eager relationship breaks the cycle with a lazy loader"""
+
+ mapper(Address, addresses)
+ mapper(User, users, properties = dict(
+ addresses = relationship(Address, lazy='subquery',
+ backref=sa.orm.backref('user', lazy='subquery'),
+ order_by=Address.id)
+ ))
+ is_(sa.orm.class_mapper(User).get_property('addresses').lazy, 'subquery')
+ is_(sa.orm.class_mapper(Address).get_property('user').lazy, 'subquery')
+
+ sess = create_session()
+ eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
+
+ @testing.resolve_artifact_names
+ def test_double(self):
+ """Eager loading with two relationships simultaneously,
+ from the same table, using aliases."""
+
+ openorders = sa.alias(orders, 'openorders')
+ closedorders = sa.alias(orders, 'closedorders')
+
+ mapper(Address, addresses)
+ mapper(Order, orders)
+
+ open_mapper = mapper(Order, openorders, non_primary=True)
+ closed_mapper = mapper(Order, closedorders, non_primary=True)
+
+ mapper(User, users, properties = dict(
+ addresses = relationship(Address, lazy='subquery',
+ order_by=addresses.c.id),
+ open_orders = relationship(
+ open_mapper,
+ primaryjoin=sa.and_(openorders.c.isopen == 1,
+ users.c.id==openorders.c.user_id),
+ lazy='subquery', order_by=openorders.c.id),
+ closed_orders = relationship(
+ closed_mapper,
+ primaryjoin=sa.and_(closedorders.c.isopen == 0,
+ users.c.id==closedorders.c.user_id),
+ lazy='subquery', order_by=closedorders.c.id)))
+
+ q = create_session().query(User).order_by(User.id)
+
+ def go():
+ eq_([
+ User(
+ id=7,
+ addresses=[Address(id=1)],
+ open_orders = [Order(id=3)],
+ closed_orders = [Order(id=1), Order(id=5)]
+ ),
+ User(
+ id=8,
+ addresses=[Address(id=2), Address(id=3), Address(id=4)],
+ open_orders = [],
+ closed_orders = []
+ ),
+ User(
+ id=9,
+ addresses=[Address(id=5)],
+ open_orders = [Order(id=4)],
+ closed_orders = [Order(id=2)]
+ ),
+ User(id=10)
+
+ ], q.all())
+ self.assert_sql_count(testing.db, go, 4)
+
+ @testing.resolve_artifact_names
+ def test_double_same_mappers(self):
+ """Eager loading with two relationships simulatneously,
+ from the same table, using aliases."""
+
+ mapper(Address, addresses)
+ mapper(Order, orders, properties={
+ 'items': relationship(Item, secondary=order_items, lazy='subquery',
+ order_by=items.c.id)})
+ mapper(Item, items)
+ mapper(User, users, properties=dict(
+ addresses=relationship(Address, lazy='subquery', order_by=addresses.c.id),
+ open_orders=relationship(
+ Order,
+ primaryjoin=sa.and_(orders.c.isopen == 1,
+ users.c.id==orders.c.user_id),
+ lazy='subquery', order_by=orders.c.id),
+ closed_orders=relationship(
+ Order,
+ primaryjoin=sa.and_(orders.c.isopen == 0,
+ users.c.id==orders.c.user_id),
+ lazy='subquery', order_by=orders.c.id)))
+ q = create_session().query(User).order_by(User.id)
+
+ def go():
+ eq_([
+ User(id=7,
+ addresses=[
+ Address(id=1)],
+ open_orders=[Order(id=3,
+ items=[
+ Item(id=3),
+ Item(id=4),
+ Item(id=5)])],
+ closed_orders=[Order(id=1,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)]),
+ Order(id=5,
+ items=[
+ Item(id=5)])]),
+ User(id=8,
+ addresses=[
+ Address(id=2),
+ Address(id=3),
+ Address(id=4)],
+ open_orders = [],
+ closed_orders = []),
+ User(id=9,
+ addresses=[
+ Address(id=5)],
+ open_orders=[
+ Order(id=4,
+ items=[
+ Item(id=1),
+ Item(id=5)])],
+ closed_orders=[
+ Order(id=2,
+ items=[
+ Item(id=1),
+ Item(id=2),
+ Item(id=3)])]),
+ User(id=10)
+ ], q.all())
+ self.assert_sql_count(testing.db, go, 6)
+
+ @testing.fails_on('maxdb', 'FIXME: unknown')
+ @testing.resolve_artifact_names
+ def test_limit(self):
+ """Limit operations combined with lazy-load relationships."""
+
+ mapper(Item, items)
+ mapper(Order, orders, properties={
+ 'items':relationship(Item, secondary=order_items, lazy='subquery',
+ order_by=items.c.id)
+ })
+ mapper(User, users, properties={
+ 'addresses':relationship(mapper(Address, addresses),
+ lazy='subquery',
+ order_by=addresses.c.id),
+ 'orders':relationship(Order, lazy='select', order_by=orders.c.id)
+ })
+
+ sess = create_session()
+ q = sess.query(User)
+
+ l = q.order_by(User.id).limit(2).offset(1).all()
+ eq_(self.static.user_all_result[1:3], l)
+
+ sess = create_session()
+ l = q.order_by(sa.desc(User.id)).limit(2).offset(2).all()
+ eq_(list(reversed(self.static.user_all_result[0:2])), l)
+
+ @testing.resolve_artifact_names
+ def test_one_to_many_scalar(self):
+ mapper(User, users, properties = dict(
+ address = relationship(mapper(Address, addresses),
+ lazy='subquery', uselist=False)
+ ))
+ q = create_session().query(User)
+
+ def go():
+ l = q.filter(users.c.id == 7).all()
+ eq_([User(id=7, address=Address(id=1))], l)
+ self.assert_sql_count(testing.db, go, 2)
+
+ @testing.fails_on('maxdb', 'FIXME: unknown')
+ @testing.resolve_artifact_names
+ def test_many_to_one(self):
+ mapper(Address, addresses, properties = dict(
+ user = relationship(mapper(User, users), lazy='subquery')
+ ))
+ sess = create_session()
+ q = sess.query(Address)
+
+ def go():
+ a = q.filter(addresses.c.id==1).one()
+ is_not_(a.user, None)
+ u1 = sess.query(User).get(7)
+ is_(a.user, u1)
+ self.assert_sql_count(testing.db, go, 2)
+
+ @testing.resolve_artifact_names
+ def test_double_with_aggregate(self):
+ max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')],
+ group_by=[orders.c.user_id]
+ ).alias('max_orders_by_user')
+
+ max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).\
+ alias('max_orders')
+
+ mapper(Order, orders)
+ mapper(User, users, properties={
+ 'orders':relationship(Order, backref='user', lazy='subquery',
+ order_by=orders.c.id),
+ 'max_order':relationship(
+ mapper(Order, max_orders, non_primary=True),
+ lazy='subquery', uselist=False)
+ })
+
+ q = create_session().query(User)
+
+ def go():
+ eq_([
+ User(id=7, orders=[
+ Order(id=1),
+ Order(id=3),
+ Order(id=5),
+ ],
+ max_order=Order(id=5)
+ ),
+ User(id=8, orders=[]),
+ User(id=9, orders=[Order(id=2),Order(id=4)],
+ max_order=Order(id=4)
+ ),
+ User(id=10),
+ ], q.order_by(User.id).all())
+ self.assert_sql_count(testing.db, go, 3)
+
+ @testing.resolve_artifact_names
+ def test_uselist_false_warning(self):
+ """test that multiple rows received by a
+ uselist=False raises a warning."""
+
+ mapper(User, users, properties={
+ 'order':relationship(Order, uselist=False)
+ })
+ mapper(Order, orders)
+ s = create_session()
+ assert_raises(sa.exc.SAWarning,
+ s.query(User).options(subqueryload(User.order)).all)
+
+class OrderBySecondaryTest(_base.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('m2m', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('aid', Integer, ForeignKey('a.id')),
+ Column('bid', Integer, ForeignKey('b.id')))
+
+ Table('a', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(50)))
+ Table('b', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('data', String(50)))
+
+ @classmethod
+ def fixtures(cls):
+ return dict(
+ a=(('id', 'data'),
+ (1, 'a1'),
+ (2, 'a2')),
+
+ b=(('id', 'data'),
+ (1, 'b1'),
+ (2, 'b2'),
+ (3, 'b3'),
+ (4, 'b4')),
+
+ m2m=(('id', 'aid', 'bid'),
+ (2, 1, 1),
+ (4, 2, 4),
+ (1, 1, 3),
+ (6, 2, 2),
+ (3, 1, 2),
+ (5, 2, 3)))
+
+ @testing.resolve_artifact_names
+ def test_ordering(self):
+ class A(_base.ComparableEntity):pass
+ class B(_base.ComparableEntity):pass
+
+ mapper(A, a, properties={
+ 'bs':relationship(B, secondary=m2m, lazy='subquery', order_by=m2m.c.id)
+ })
+ mapper(B, b)
+
+ sess = create_session()
+ def go():
+ eq_(sess.query(A).all(), [
+ A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
+ A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
+ ])
+ self.assert_sql_count(testing.db, go, 2)
+
+class SelfReferentialTest(_base.MappedTest):
+ @classmethod
+ def define_tables(cls, metadata):
+ Table('nodes', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id')),
+ Column('data', String(30)))
+
+ @testing.fails_on('maxdb', 'FIXME: unknown')
+ @testing.resolve_artifact_names
+ def test_basic(self):
+ class Node(_base.ComparableEntity):
+ def append(self, node):
+ self.children.append(node)
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node,
+ lazy='subquery',
+ join_depth=3, order_by=nodes.c.id)
+ })
+ sess = create_session()
+ n1 = Node(data='n1')
+ n1.append(Node(data='n11'))
+ n1.append(Node(data='n12'))
+ n1.append(Node(data='n13'))
+ n1.children[1].append(Node(data='n121'))
+ n1.children[1].append(Node(data='n122'))
+ n1.children[1].append(Node(data='n123'))
+ n2 = Node(data='n2')
+ n2.append(Node(data='n21'))
+ n2.children[0].append(Node(data='n211'))
+ n2.children[0].append(Node(data='n212'))
+
+ sess.add(n1)
+ sess.add(n2)
+ sess.flush()
+ sess.expunge_all()
+ def go():
+ d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).\
+ order_by(Node.data).all()
+ eq_([Node(data='n1', children=[
+ Node(data='n11'),
+ Node(data='n12', children=[
+ Node(data='n121'),
+ Node(data='n122'),
+ Node(data='n123')
+ ]),
+ Node(data='n13')
+ ]),
+ Node(data='n2', children=[
+ Node(data='n21', children=[
+ Node(data='n211'),
+ Node(data='n212'),
+ ])
+ ])
+ ], d)
+ self.assert_sql_count(testing.db, go, 4)
+
+
+
+ @testing.resolve_artifact_names
+ def test_lazy_fallback_doesnt_affect_eager(self):
+ class Node(_base.ComparableEntity):
+ def append(self, node):
+ self.children.append(node)
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node, lazy='subquery', join_depth=1,
+ order_by=nodes.c.id)
+ })
+ sess = create_session()
+ n1 = Node(data='n1')
+ n1.append(Node(data='n11'))
+ n1.append(Node(data='n12'))
+ n1.append(Node(data='n13'))
+ n1.children[1].append(Node(data='n121'))
+ n1.children[1].append(Node(data='n122'))
+ n1.children[1].append(Node(data='n123'))
+ sess.add(n1)
+ sess.flush()
+ sess.expunge_all()
+
+ def go():
+ allnodes = sess.query(Node).order_by(Node.data).all()
+ n12 = allnodes[2]
+ eq_(n12.data, 'n12')
+ eq_([
+ Node(data='n121'),
+ Node(data='n122'),
+ Node(data='n123')
+ ], list(n12.children))
+ self.assert_sql_count(testing.db, go, 4)
+
+ @testing.resolve_artifact_names
+ def test_with_deferred(self):
+ class Node(_base.ComparableEntity):
+ def append(self, node):
+ self.children.append(node)
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node, lazy='subquery', join_depth=3,
+ order_by=nodes.c.id),
+ 'data':deferred(nodes.c.data)
+ })
+ sess = create_session()
+ n1 = Node(data='n1')
+ n1.append(Node(data='n11'))
+ n1.append(Node(data='n12'))
+ sess.add(n1)
+ sess.flush()
+ sess.expunge_all()
+
+ def go():
+ eq_(
+ Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+ sess.query(Node).order_by(Node.id).first(),
+ )
+ self.assert_sql_count(testing.db, go, 6)
+
+ sess.expunge_all()
+
+ def go():
+ eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+ sess.query(Node).options(undefer('data')).order_by(Node.id).first())
+ self.assert_sql_count(testing.db, go, 5)
+
+ sess.expunge_all()
+
+ def go():
+ eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
+ sess.query(Node).options(undefer('data'),
+ undefer('children.data')).first())
+ self.assert_sql_count(testing.db, go, 3)
+
+
+ @testing.resolve_artifact_names
+ def test_options(self):
+ class Node(_base.ComparableEntity):
+ def append(self, node):
+ self.children.append(node)
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node, order_by=nodes.c.id)
+ }, order_by=nodes.c.id)
+ sess = create_session()
+ n1 = Node(data='n1')
+ n1.append(Node(data='n11'))
+ n1.append(Node(data='n12'))
+ n1.append(Node(data='n13'))
+ n1.children[1].append(Node(data='n121'))
+ n1.children[1].append(Node(data='n122'))
+ n1.children[1].append(Node(data='n123'))
+ sess.add(n1)
+ sess.flush()
+ sess.expunge_all()
+ def go():
+ d = sess.query(Node).filter_by(data='n1').\
+ options(subqueryload_all('children.children')).first()
+ eq_(Node(data='n1', children=[
+ Node(data='n11'),
+ Node(data='n12', children=[
+ Node(data='n121'),
+ Node(data='n122'),
+ Node(data='n123')
+ ]),
+ Node(data='n13')
+ ]), d)
+ self.assert_sql_count(testing.db, go, 3)
+
+ @testing.fails_on('maxdb', 'FIXME: unknown')
+ @testing.resolve_artifact_names
+ def test_no_depth(self):
+ """no join depth is set, so no eager loading occurs."""
+ class Node(_base.ComparableEntity):
+ def append(self, node):
+ self.children.append(node)
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node, lazy='subquery')
+ })
+ sess = create_session()
+ n1 = Node(data='n1')
+ n1.append(Node(data='n11'))
+ n1.append(Node(data='n12'))
+ n1.append(Node(data='n13'))
+ n1.children[1].append(Node(data='n121'))
+ n1.children[1].append(Node(data='n122'))
+ n1.children[1].append(Node(data='n123'))
+ n2 = Node(data='n2')
+ n2.append(Node(data='n21'))
+ sess.add(n1)
+ sess.add(n2)
+ sess.flush()
+ sess.expunge_all()
+ def go():
+ d = sess.query(Node).filter(Node.data.in_(['n1', 'n2'])).order_by(Node.data).all()
+ eq_([
+ Node(data='n1', children=[
+ Node(data='n11'),
+ Node(data='n12', children=[
+ Node(data='n121'),
+ Node(data='n122'),
+ Node(data='n123')
+ ]),
+ Node(data='n13')
+ ]),
+ Node(data='n2', children=[
+ Node(data='n21')
+ ])
+ ], d)
+ self.assert_sql_count(testing.db, go, 4)
+
+
diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py
index 1a0158035..fe4fac89e 100644
--- a/test/orm/test_unitofwork.py
+++ b/test/orm/test_unitofwork.py
@@ -35,7 +35,7 @@ class HistoryTest(_fixtures.FixtureTest):
def test_backref(self):
am = mapper(Address, addresses)
m = mapper(User, users, properties=dict(
- addresses = relationship(am, backref='user', lazy=False)))
+ addresses = relationship(am, backref='user', lazy='joined')))
session = create_session(autocommit=False)
@@ -173,7 +173,7 @@ class UnicodeSchemaTest(engine_base.AltEngineTest, _base.MappedTest):
assert new_a1.t2s[0].d == b1.d
session.expunge_all()
- new_a1 = (session.query(A).options(sa.orm.eagerload('t2s')).
+ new_a1 = (session.query(A).options(sa.orm.joinedload('t2s')).
filter(t1.c.a == a1.a)).one()
assert new_a1.a == a1.a
assert new_a1.t2s[0].d == b1.d
@@ -1052,7 +1052,7 @@ class OneToManyTest(_fixtures.FixtureTest):
"""Basic save of one to many."""
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True)
+ addresses = relationship(mapper(Address, addresses), lazy='select')
))
u = User(name= 'one2manytester')
a = Address(email_address='one2many@test.org')
@@ -1092,7 +1092,7 @@ class OneToManyTest(_fixtures.FixtureTest):
"""Modifying the child items of an object."""
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True)))
+ addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u1.addresses = []
@@ -1140,7 +1140,7 @@ class OneToManyTest(_fixtures.FixtureTest):
"""
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True)))
+ addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
@@ -1164,7 +1164,7 @@ class OneToManyTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_child_move_2(self):
m = mapper(User, users, properties=dict(
- addresses = relationship(mapper(Address, addresses), lazy=True)))
+ addresses = relationship(mapper(Address, addresses), lazy='select')))
u1 = User(name='user1')
u2 = User(name='user2')
@@ -1188,7 +1188,7 @@ class OneToManyTest(_fixtures.FixtureTest):
def test_o2m_delete_parent(self):
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
- lazy=True,
+ lazy='select',
uselist=False)))
u = User(name='one2onetester')
@@ -1211,7 +1211,7 @@ class OneToManyTest(_fixtures.FixtureTest):
def test_one_to_one(self):
m = mapper(User, users, properties=dict(
address = relationship(mapper(Address, addresses),
- lazy=True,
+ lazy='select',
uselist=False)))
u = User(name='one2onetester')
@@ -1231,7 +1231,7 @@ class OneToManyTest(_fixtures.FixtureTest):
def test_bidirectional(self):
m1 = mapper(User, users)
m2 = mapper(Address, addresses, properties=dict(
- user = relationship(m1, lazy=False, backref='addresses')))
+ user = relationship(m1, lazy='joined', backref='addresses')))
u = User(name='test')
@@ -1550,7 +1550,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
def test_m2o_one_to_one(self):
# TODO: put assertion in here !!!
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy=True, uselist=False)))
+ user = relationship(mapper(User, users), lazy='select', uselist=False)))
session = create_session()
@@ -1601,7 +1601,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_many_to_one_1(self):
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy=True)))
+ user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
@@ -1626,7 +1626,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_many_to_one_2(self):
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy=True)))
+ user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
a2 = Address(email_address='emailaddress2')
@@ -1657,7 +1657,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_many_to_one_3(self):
m = mapper(Address, addresses, properties=dict(
- user = relationship(mapper(User, users), lazy=True)))
+ user = relationship(mapper(User, users), lazy='select')))
a1 = Address(email_address='emailaddress1')
u1 = User(name='user1')
@@ -1685,7 +1685,7 @@ class ManyToOneTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_bidirectional_no_load(self):
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy=None)})
+ 'addresses':relationship(Address, backref='user', lazy='noload')})
mapper(Address, addresses)
# try it on unsaved objects
@@ -1717,7 +1717,7 @@ class ManyToManyTest(_fixtures.FixtureTest):
m = mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
item_keywords,
- lazy=False,
+ lazy='joined',
order_by=keywords.c.name)))
data = [Item,
@@ -1823,7 +1823,7 @@ class ManyToManyTest(_fixtures.FixtureTest):
"""
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, item_keywords, lazy=False),
+ keywords = relationship(Keyword, item_keywords, lazy='joined'),
))
i = Item(description='i1')
@@ -1864,7 +1864,7 @@ class ManyToManyTest(_fixtures.FixtureTest):
mapper(Item, items, properties=dict(
keywords=relationship(Keyword,
secondary=item_keywords,
- lazy=False,
+ lazy='joined',
order_by=keywords.c.name)))
k1 = Keyword(name='keyword 1')
@@ -1904,14 +1904,14 @@ class ManyToManyTest(_fixtures.FixtureTest):
primary_key=[item_keywords.c.item_id, item_keywords.c.keyword_id],
properties=dict(
keyword=relationship(mapper(Keyword, keywords, non_primary=True),
- lazy=False,
+ lazy='joined',
uselist=False,
order_by=keywords.c.name # note here is a valid place where order_by can be used
))) # on a scalar relationship(); to determine eager ordering of
# the parent object within its collection.
mapper(Item, items, properties=dict(
- keywords=relationship(IKAssociation, lazy=False)))
+ keywords=relationship(IKAssociation, lazy='joined')))
session = create_session()
@@ -1946,7 +1946,7 @@ class SaveTest2(_fixtures.FixtureTest):
def test_m2o_nonmatch(self):
mapper(User, users)
mapper(Address, addresses, properties=dict(
- user = relationship(User, lazy=True, uselist=False)))
+ user = relationship(User, lazy='select', uselist=False)))
session = create_session()
@@ -2003,7 +2003,7 @@ class SaveTest3(_base.MappedTest):
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
- keywords = relationship(Keyword, secondary=assoc, lazy=False),))
+ keywords = relationship(Keyword, secondary=assoc, lazy='joined'),))
i = Item()
k1 = Keyword()
diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py
index f146e57b8..07e545bd1 100644
--- a/test/orm/test_versioning.py
+++ b/test/orm/test_versioning.py
@@ -1,6 +1,6 @@
import sqlalchemy as sa
from sqlalchemy.test import engines, testing
-from sqlalchemy import Integer, String, ForeignKey, literal_column, orm
+from sqlalchemy import Integer, String, ForeignKey, literal_column, orm, exc
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, column_property, sessionmaker
from sqlalchemy.test.testing import eq_, ne_, assert_raises, assert_raises_message
@@ -19,6 +19,7 @@ def make_uuid():
return _uuids.pop(0)
class VersioningTest(_base.MappedTest):
+
@classmethod
def define_tables(cls, metadata):
Table('version_table', metadata,
@@ -130,14 +131,8 @@ class VersioningTest(_base.MappedTest):
s1.query(Foo).with_lockmode('read').get, f1s1.id
)
- # load, version is wrong
- assert_raises(
- sa.orm.exc.ConcurrentModificationError,
- s1.refresh, f1s1, lockmode='read'
- )
-
- # reload it
- s1.query(Foo).populate_existing().get(f1s1.id)
+ # reload it - this expires the old version first
+ s1.refresh(f1s1, lockmode='read')
# now assert version OK
s1.query(Foo).with_lockmode('read').get(f1s1.id)
@@ -145,9 +140,36 @@ class VersioningTest(_base.MappedTest):
# assert brand new load is OK too
s1.close()
s1.query(Foo).with_lockmode('read').get(f1s1.id)
+
+
+ @testing.emits_warning(r'.*does not support updated rowcount')
+ @engines.close_open_connections
+ @testing.requires.update_nowait
+ @testing.resolve_artifact_names
+ def test_versioncheck_for_update(self):
+ """query.with_lockmode performs a 'version check' on an already loaded instance"""
+
+ s1 = create_session(autocommit=False)
+
+ mapper(Foo, version_table, version_id_col=version_table.c.version_id)
+ f1s1 = Foo(value='f1 value')
+ s1.add(f1s1)
+ s1.commit()
+
+ s2 = create_session(autocommit=False)
+ f1s2 = s2.query(Foo).get(f1s1.id)
+ s2.refresh(f1s2, lockmode='update')
+ f1s2.value='f1 new value'
+ assert_raises(
+ exc.DBAPIError,
+ s1.refresh, f1s1, lockmode='update_nowait'
+ )
+ s1.rollback()
-
+ s2.commit()
+ s1.refresh(f1s1, lockmode='update_nowait')
+ assert f1s1.version_id == f1s2.version_id
@testing.emits_warning(r'.*does not support updated rowcount')
@engines.close_open_connections
diff --git a/test/perf/masseagerload.py b/test/perf/masseagerload.py
index 3d251c816..2ed8d2803 100644
--- a/test/perf/masseagerload.py
+++ b/test/perf/masseagerload.py
@@ -17,7 +17,7 @@ subitems = Table('subitems', meta,
class Item(object):pass
class SubItem(object):pass
-mapper(Item, items, properties={'subs':relationship(SubItem, lazy=False)})
+mapper(Item, items, properties={'subs':relationship(SubItem, lazy='joined')})
mapper(SubItem, subitems)
def load():
@@ -35,8 +35,8 @@ def load():
#print l
subitems.insert().execute(*l)
-@profiling.profiled('masseagerload', always=True, sort=['cumulative'])
-def masseagerload(session):
+@profiling.profiled('massjoinedload', always=True, sort=['cumulative'])
+def massjoinedload(session):
session.begin()
query = session.query(Item)
l = query.all()
@@ -46,7 +46,7 @@ def all():
meta.create_all()
try:
load()
- masseagerload(create_session())
+ massjoinedload(create_session())
finally:
meta.drop_all()
diff --git a/test/perf/objupdatespeed.py b/test/perf/objupdatespeed.py
index f98873afd..fad22189a 100644
--- a/test/perf/objupdatespeed.py
+++ b/test/perf/objupdatespeed.py
@@ -27,7 +27,7 @@ class Email(object):
getattr(self, 'address', None))
mapper(Person, Person_table, properties={
- 'emails': relationship(Email, backref='owner', lazy=False)
+ 'emails': relationship(Email, backref='owner', lazy='joined')
})
mapper(Email, Email_table)
compile_mappers()
diff --git a/test/perf/ormsession.py b/test/perf/ormsession.py
index fc3e2e206..0b01fc5a3 100644
--- a/test/perf/ormsession.py
+++ b/test/perf/ormsession.py
@@ -61,14 +61,14 @@ def define_tables():
@profiled('mapper')
def setup_mappers():
mapper(Item, items, properties={
- 'subitems': relationship(SubItem, backref='item', lazy=True)
+ 'subitems': relationship(SubItem, backref='item', lazy='select')
})
mapper(SubItem, subitems)
mapper(Customer, customers, properties={
- 'purchases': relationship(Purchase, lazy=True, backref='customer')
+ 'purchases': relationship(Purchase, lazy='select', backref='customer')
})
mapper(Purchase, purchases, properties={
- 'items': relationship(Item, lazy=True, secondary=purchaseitems)
+ 'items': relationship(Item, lazy='select', secondary=purchaseitems)
})
@profiled('inserts')
@@ -152,8 +152,8 @@ def run_queries():
q = session.query(Purchase). \
order_by(desc(Purchase.purchase_date)). \
limit(50).\
- options(eagerload('items'), eagerload('items.subitems'),
- eagerload('customer'))
+ options(joinedload('items'), joinedload('items.subitems'),
+ joinedload('customer'))
report = []
# "write" the report. pretend it's going to a web template or something,
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index d6a3804be..5d61cbdec 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -171,9 +171,14 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
)
self.assert_compile(
- select([cast("data", Integer)], use_labels=True), # this will work with plain Integer in 0.6
+ select([cast("data", Integer)], use_labels=True),
"SELECT CAST(:param_1 AS INTEGER) AS anon_1"
)
+
+ self.assert_compile(
+ select([func.sum(func.lala(table1.c.myid).label('foo')).label('bar')]),
+ "SELECT sum(lala(mytable.myid)) AS bar FROM mytable"
+ )
def test_paramstyles(self):
stmt = text("select :foo, :bar, :bat from sometable")
@@ -1668,7 +1673,7 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
check_results(postgresql.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '%(param_1)s')
# then the Oracle engine
- check_results(oracle.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'CLOB', 'VARCHAR(20)'], ':param_1')
+ check_results(oracle.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'CLOB', 'VARCHAR(20 CHAR)'], ':param_1')
# then the sqlite engine
check_results(sqlite.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '?')
@@ -1755,15 +1760,129 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
else:
self.assert_compile(s1, "SELECT %s FROM (SELECT %s FROM mytable)" % (expr,expr))
+ def test_hints(self):
+ s = select([table1.c.myid]).with_hint(table1, "test hint %(name)s")
+
+ s2 = select([table1.c.myid]).\
+ with_hint(table1, "index(%(name)s idx)", 'oracle').\
+ with_hint(table1, "WITH HINT INDEX idx", 'sybase')
+
+ a1 = table1.alias()
+ s3 = select([a1.c.myid]).with_hint(a1, "index(%(name)s hint)")
+
+ subs4 = select([
+ table1, table2
+ ]).select_from(table1.join(table2, table1.c.myid==table2.c.otherid)).\
+ with_hint(table1, 'hint1')
+
+ s4 = select([table3]).select_from(
+ table3.join(
+ subs4,
+ subs4.c.othername==table3.c.otherstuff
+ )
+ ).\
+ with_hint(table3, 'hint3')
+
+ subs5 = select([
+ table1, table2
+ ]).select_from(table1.join(table2, table1.c.myid==table2.c.otherid))
+ s5 = select([table3]).select_from(
+ table3.join(
+ subs5,
+ subs5.c.othername==table3.c.otherstuff
+ )
+ ).\
+ with_hint(table3, 'hint3').\
+ with_hint(table1, 'hint1')
+
+ t1 = table('QuotedName', column('col1'))
+ s6 = select([t1.c.col1]).where(t1.c.col1>10).with_hint(t1, '%(name)s idx1')
+ a2 = t1.alias('SomeName')
+ s7 = select([a2.c.col1]).where(a2.c.col1>10).with_hint(a2, '%(name)s idx1')
+
+ mysql_d, oracle_d, sybase_d = \
+ mysql.dialect(), \
+ oracle.dialect(), \
+ sybase.dialect()
+
+ for stmt, dialect, expected in [
+ (s, mysql_d,
+ "SELECT mytable.myid FROM mytable test hint mytable"),
+ (s, oracle_d,
+ "SELECT /*+ test hint mytable */ mytable.myid FROM mytable"),
+ (s, sybase_d,
+ "SELECT mytable.myid FROM mytable test hint mytable"),
+ (s2, mysql_d,
+ "SELECT mytable.myid FROM mytable"),
+ (s2, oracle_d,
+ "SELECT /*+ index(mytable idx) */ mytable.myid FROM mytable"),
+ (s2, sybase_d,
+ "SELECT mytable.myid FROM mytable WITH HINT INDEX idx"),
+ (s3, mysql_d,
+ "SELECT mytable_1.myid FROM mytable AS mytable_1 "
+ "index(mytable_1 hint)"),
+ (s3, oracle_d,
+ "SELECT /*+ index(mytable_1 hint) */ mytable_1.myid FROM "
+ "mytable mytable_1"),
+ (s3, sybase_d,
+ "SELECT mytable_1.myid FROM mytable AS mytable_1 "
+ "index(mytable_1 hint)"),
+ (s4, mysql_d,
+ "SELECT thirdtable.userid, thirdtable.otherstuff FROM thirdtable "
+ "hint3 INNER JOIN (SELECT mytable.myid, mytable.name, "
+ "mytable.description, myothertable.otherid, "
+ "myothertable.othername FROM mytable hint1 INNER "
+ "JOIN myothertable ON mytable.myid = myothertable.otherid) "
+ "ON othername = thirdtable.otherstuff"),
+ (s4, sybase_d,
+ "SELECT thirdtable.userid, thirdtable.otherstuff FROM thirdtable "
+ "hint3 JOIN (SELECT mytable.myid, mytable.name, "
+ "mytable.description, myothertable.otherid, "
+ "myothertable.othername FROM mytable hint1 "
+ "JOIN myothertable ON mytable.myid = myothertable.otherid) "
+ "ON othername = thirdtable.otherstuff"),
+ (s4, oracle_d,
+ "SELECT /*+ hint3 */ thirdtable.userid, thirdtable.otherstuff "
+ "FROM thirdtable JOIN (SELECT /*+ hint1 */ mytable.myid,"
+ " mytable.name, mytable.description, myothertable.otherid,"
+ " myothertable.othername FROM mytable JOIN myothertable ON"
+ " mytable.myid = myothertable.otherid) ON othername ="
+ " thirdtable.otherstuff"),
+# TODO: figure out dictionary ordering solution here
+# (s5, oracle_d,
+# "SELECT /*+ hint3 */ /*+ hint1 */ thirdtable.userid, "
+# "thirdtable.otherstuff "
+# "FROM thirdtable JOIN (SELECT mytable.myid,"
+# " mytable.name, mytable.description, myothertable.otherid,"
+# " myothertable.othername FROM mytable JOIN myothertable ON"
+# " mytable.myid = myothertable.otherid) ON othername ="
+# " thirdtable.otherstuff"),
+ (s6, oracle_d,
+ """SELECT /*+ "QuotedName" idx1 */ "QuotedName".col1 """
+ """FROM "QuotedName" WHERE "QuotedName".col1 > :col1_1"""),
+ (s7, oracle_d,
+ """SELECT /*+ SomeName idx1 */ "SomeName".col1 FROM """
+ """"QuotedName" "SomeName" WHERE "SomeName".col1 > :col1_1"""),
+ ]:
+ self.assert_compile(
+ stmt,
+ expected,
+ dialect=dialect
+ )
+
class CRUDTest(TestBase, AssertsCompiledSQL):
def test_insert(self):
# generic insert, will create bind params for all columns
- self.assert_compile(insert(table1), "INSERT INTO mytable (myid, name, description) VALUES (:myid, :name, :description)")
+ self.assert_compile(insert(table1),
+ "INSERT INTO mytable (myid, name, description) "
+ "VALUES (:myid, :name, :description)")
# insert with user-supplied bind params for specific columns,
# cols provided literally
self.assert_compile(
- insert(table1, {table1.c.myid : bindparam('userid'), table1.c.name : bindparam('username')}),
+ insert(table1, {
+ table1.c.myid : bindparam('userid'),
+ table1.c.name : bindparam('username')}),
"INSERT INTO mytable (myid, name) VALUES (:userid, :username)")
# insert with user-supplied bind params for specific columns, cols
@@ -1781,33 +1900,79 @@ class CRUDTest(TestBase, AssertsCompiledSQL):
)
self.assert_compile(
- insert(table1, values={table1.c.myid : bindparam('userid')}).values({table1.c.name : bindparam('username')}),
+ insert(table1, values={
+ table1.c.myid : bindparam('userid')
+ }).values({table1.c.name : bindparam('username')}),
"INSERT INTO mytable (myid, name) VALUES (:userid, :username)"
)
- self.assert_compile(insert(table1, values=dict(myid=func.lala())), "INSERT INTO mytable (myid) VALUES (lala())")
+ self.assert_compile(
+ insert(table1, values=dict(myid=func.lala())),
+ "INSERT INTO mytable (myid) VALUES (lala())")
def test_inline_insert(self):
metadata = MetaData()
table = Table('sometable', metadata,
Column('id', Integer, primary_key=True),
Column('foo', Integer, default=func.foobar()))
- self.assert_compile(table.insert(values={}, inline=True), "INSERT INTO sometable (foo) VALUES (foobar())")
- self.assert_compile(table.insert(inline=True), "INSERT INTO sometable (foo) VALUES (foobar())", params={})
+ self.assert_compile(
+ table.insert(values={}, inline=True),
+ "INSERT INTO sometable (foo) VALUES (foobar())")
+ self.assert_compile(
+ table.insert(inline=True),
+ "INSERT INTO sometable (foo) VALUES (foobar())", params={})
def test_update(self):
- self.assert_compile(update(table1, table1.c.myid == 7), "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", params = {table1.c.name:'fred'})
- self.assert_compile(table1.update().where(table1.c.myid==7).values({table1.c.myid:5}), "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", checkparams={'myid':5, 'myid_1':7})
- self.assert_compile(update(table1, table1.c.myid == 7), "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1", params = {'name':'fred'})
- self.assert_compile(update(table1, values = {table1.c.name : table1.c.myid}), "UPDATE mytable SET name=mytable.myid")
- self.assert_compile(update(table1, whereclause = table1.c.name == bindparam('crit'), values = {table1.c.name : 'hi'}), "UPDATE mytable SET name=:name WHERE mytable.name = :crit", params = {'crit' : 'notthere'}, checkparams={'crit':'notthere', 'name':'hi'})
- self.assert_compile(update(table1, table1.c.myid == 12, values = {table1.c.name : table1.c.myid}), "UPDATE mytable SET name=mytable.myid, description=:description WHERE mytable.myid = :myid_1", params = {'description':'test'}, checkparams={'description':'test', 'myid_1':12})
- self.assert_compile(update(table1, table1.c.myid == 12, values = {table1.c.myid : 9}), "UPDATE mytable SET myid=:myid, description=:description WHERE mytable.myid = :myid_1", params = {'myid_1': 12, 'myid': 9, 'description': 'test'})
- self.assert_compile(update(table1, table1.c.myid ==12), "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1", params={'myid':18}, checkparams={'myid':18, 'myid_1':12})
+ self.assert_compile(
+ update(table1, table1.c.myid == 7),
+ "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1",
+ params = {table1.c.name:'fred'})
+ self.assert_compile(
+ table1.update().where(table1.c.myid==7).
+ values({table1.c.myid:5}),
+ "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1",
+ checkparams={'myid':5, 'myid_1':7})
+ self.assert_compile(
+ update(table1, table1.c.myid == 7),
+ "UPDATE mytable SET name=:name WHERE mytable.myid = :myid_1",
+ params = {'name':'fred'})
+ self.assert_compile(
+ update(table1, values = {table1.c.name : table1.c.myid}),
+ "UPDATE mytable SET name=mytable.myid")
+ self.assert_compile(
+ update(table1,
+ whereclause = table1.c.name == bindparam('crit'),
+ values = {table1.c.name : 'hi'}),
+ "UPDATE mytable SET name=:name WHERE mytable.name = :crit",
+ params = {'crit' : 'notthere'},
+ checkparams={'crit':'notthere', 'name':'hi'})
+ self.assert_compile(
+ update(table1, table1.c.myid == 12,
+ values = {table1.c.name : table1.c.myid}),
+ "UPDATE mytable SET name=mytable.myid, description="
+ ":description WHERE mytable.myid = :myid_1",
+ params = {'description':'test'},
+ checkparams={'description':'test', 'myid_1':12})
+ self.assert_compile(
+ update(table1, table1.c.myid == 12,
+ values = {table1.c.myid : 9}),
+ "UPDATE mytable SET myid=:myid, description=:description "
+ "WHERE mytable.myid = :myid_1",
+ params = {'myid_1': 12, 'myid': 9, 'description': 'test'})
+ self.assert_compile(
+ update(table1, table1.c.myid ==12),
+ "UPDATE mytable SET myid=:myid WHERE mytable.myid = :myid_1",
+ params={'myid':18}, checkparams={'myid':18, 'myid_1':12})
s = table1.update(table1.c.myid == 12, values = {table1.c.name : 'lala'})
c = s.compile(column_keys=['id', 'name'])
- self.assert_compile(update(table1, table1.c.myid == 12, values = {table1.c.name : table1.c.myid}).values({table1.c.name:table1.c.name + 'foo'}), "UPDATE mytable SET name=(mytable.name || :name_1), description=:description WHERE mytable.myid = :myid_1", params = {'description':'test'})
- self.assert_(str(s) == str(c))
+ self.assert_compile(
+ update(table1, table1.c.myid == 12,
+ values = {table1.c.name : table1.c.myid}
+ ).values({table1.c.name:table1.c.name + 'foo'}),
+ "UPDATE mytable SET name=(mytable.name || :name_1), "
+ "description=:description WHERE mytable.myid = :myid_1",
+ params = {'description':'test'})
+ eq_(str(s), str(c))
self.assert_compile(update(table1,
(table1.c.myid == func.hoho(4)) &
@@ -1815,28 +1980,45 @@ class CRUDTest(TestBase, AssertsCompiledSQL):
values = {
table1.c.name : table1.c.name + "lala",
table1.c.myid : func.do_stuff(table1.c.myid, literal('hoho'))
- }), "UPDATE mytable SET myid=do_stuff(mytable.myid, :param_1), name=(mytable.name || :name_1) "
- "WHERE mytable.myid = hoho(:hoho_1) AND mytable.name = :param_2 || mytable.name || :param_3")
+ }), "UPDATE mytable SET myid=do_stuff(mytable.myid, :param_1), "
+ "name=(mytable.name || :name_1) "
+ "WHERE mytable.myid = hoho(:hoho_1) AND mytable.name = :param_2 || "
+ "mytable.name || :param_3")
def test_correlated_update(self):
# test against a straight text subquery
- u = update(table1, values = {table1.c.name : text("(select name from mytable where id=mytable.id)")})
- self.assert_compile(u, "UPDATE mytable SET name=(select name from mytable where id=mytable.id)")
+ u = update(table1, values = {
+ table1.c.name :
+ text("(select name from mytable where id=mytable.id)")})
+ self.assert_compile(u,
+ "UPDATE mytable SET name=(select name from mytable "
+ "where id=mytable.id)")
mt = table1.alias()
- u = update(table1, values = {table1.c.name : select([mt.c.name], mt.c.myid==table1.c.myid)})
- self.assert_compile(u, "UPDATE mytable SET name=(SELECT mytable_1.name FROM mytable AS mytable_1 WHERE mytable_1.myid = mytable.myid)")
+ u = update(table1, values = {
+ table1.c.name :
+ select([mt.c.name], mt.c.myid==table1.c.myid)
+ })
+ self.assert_compile(u,
+ "UPDATE mytable SET name=(SELECT mytable_1.name FROM "
+ "mytable AS mytable_1 WHERE mytable_1.myid = mytable.myid)")
# test against a regular constructed subquery
s = select([table2], table2.c.otherid == table1.c.myid)
u = update(table1, table1.c.name == 'jack', values = {table1.c.name : s})
- self.assert_compile(u, "UPDATE mytable SET name=(SELECT myothertable.otherid, myothertable.othername FROM myothertable WHERE myothertable.otherid = mytable.myid) WHERE mytable.name = :name_1")
+ self.assert_compile(u,
+ "UPDATE mytable SET name=(SELECT myothertable.otherid, "
+ "myothertable.othername FROM myothertable WHERE "
+ "myothertable.otherid = mytable.myid) WHERE mytable.name = :name_1")
# test a non-correlated WHERE clause
s = select([table2.c.othername], table2.c.otherid == 7)
u = update(table1, table1.c.name==s)
- self.assert_compile(u, "UPDATE mytable SET myid=:myid, name=:name, description=:description WHERE mytable.name = "\
- "(SELECT myothertable.othername FROM myothertable WHERE myothertable.otherid = :otherid_1)")
+ self.assert_compile(u,
+ "UPDATE mytable SET myid=:myid, name=:name, "
+ "description=:description WHERE mytable.name = "
+ "(SELECT myothertable.othername FROM myothertable "
+ "WHERE myothertable.otherid = :otherid_1)")
# test one that is actually correlated...
s = select([table2.c.othername], table2.c.otherid == table1.c.myid)
diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py
index a6f8c5956..5457c7a79 100644
--- a/test/sql/test_generative.py
+++ b/test/sql/test_generative.py
@@ -207,6 +207,31 @@ class ClauseTest(TestBase, AssertsCompiledSQL):
assert c1 == str(clause)
assert str(clause2) == str(t1.join(t2, t1.c.col2==t2.c.col3))
+ def test_aliased_column_adapt(self):
+ clause = t1.select()
+
+ aliased = t1.select().alias()
+ aliased2 = t1.alias()
+
+ adapter = sql_util.ColumnAdapter(aliased)
+
+ f = select([
+ adapter.columns[c]
+ for c in aliased2.c
+ ]).select_from(aliased)
+
+ s = select([aliased2]).select_from(aliased)
+ eq_(str(s), str(f))
+
+ f = select([
+ adapter.columns[func.count(aliased2.c.col1)]
+ ]).select_from(aliased)
+ eq_(
+ str(select([func.count(aliased2.c.col1)]).select_from(aliased)),
+ str(f)
+ )
+
+
def test_text(self):
clause = text("select * from table where foo=:bar", bindparams=[bindparam('bar')])
c1 = str(clause)
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index 7e130ee09..089ef727d 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -42,7 +42,7 @@ class AdaptTest(TestBase):
(DATE, "DATE"),
(TIME, "TIME"),
(CLOB, "CLOB"),
- (VARCHAR(10), "VARCHAR(10)"),
+ (VARCHAR(10), ("VARCHAR(10)","VARCHAR(10 CHAR)")),
(NVARCHAR(10), ("NVARCHAR(10)", "NATIONAL VARCHAR(10)", "NVARCHAR2(10)")),
(CHAR, "CHAR"),
(NCHAR, ("NCHAR", "NATIONAL CHAR")),
@@ -249,46 +249,6 @@ class UserDefinedTest(TestBase):
def teardown_class(cls):
metadata.drop_all()
-class ColumnsTest(TestBase, AssertsExecutionResults):
-
- def testcolumns(self):
- expectedResults = { 'int_column': 'int_column INTEGER',
- 'smallint_column': 'smallint_column SMALLINT',
- 'varchar_column': 'varchar_column VARCHAR(20)',
- 'numeric_column': 'numeric_column NUMERIC(12, 3)',
- 'float_column': 'float_column FLOAT(25)',
- }
-
- db = testing.db
- if testing.against('oracle') or \
- testing.against('sqlite') or \
- testing.against('firebird'):
- expectedResults['float_column'] = 'float_column FLOAT'
-
- if testing.against('maxdb'):
- expectedResults['numeric_column'] = (
- expectedResults['numeric_column'].replace('NUMERIC', 'FIXED'))
-
- if testing.against('mssql'):
- for key, value in expectedResults.items():
- expectedResults[key] = '%s NULL' % value
-
- testTable = Table('testColumns', MetaData(db),
- Column('int_column', Integer),
- Column('smallint_column', SmallInteger),
- Column('varchar_column', String(20)),
- Column('numeric_column', Numeric(12,3)),
- Column('float_column', Float(25)),
- )
-
- for aCol in testTable.c:
- eq_(
- expectedResults[aCol.name],
- db.dialect.ddl_compiler(
- db.dialect, schema.CreateTable(testTable)).
- get_column_specification(aCol)
- )
-
class UnicodeTest(TestBase, AssertsExecutionResults):
"""tests the Unicode type. also tests the TypeDecorator with instances in the types package."""
@@ -319,7 +279,11 @@ class UnicodeTest(TestBase, AssertsExecutionResults):
testing.against('oracle+cx_oracle'):
assert testing.db.dialect.returns_unicode_strings == 'conditional'
return
-
+
+ if testing.against('mssql+pymssql'):
+ assert testing.db.dialect.returns_unicode_strings == ('charset' in testing.db.url.query)
+ return
+
assert testing.db.dialect.returns_unicode_strings == \
((testing.db.name, testing.db.driver) in \
(
@@ -1142,7 +1106,8 @@ class NumericTest(TestBase):
[15.7563],
filter_ = lambda n:n is not None and round(n, 5) or None
)
-
+
+ @testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
def test_precision_decimal(self):
numbers = set([
decimal.Decimal("54.234246451650"),
@@ -1156,6 +1121,7 @@ class NumericTest(TestBase):
numbers,
)
+ @testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
def test_enotation_decimal(self):
"""test exceedingly small decimals.
@@ -1209,6 +1175,7 @@ class NumericTest(TestBase):
@testing.fails_on('postgresql+pg8000', 'TODO')
@testing.fails_on("firebird", "Precision must be from 1 to 18")
@testing.fails_on("sybase+pysybase", "TODO")
+ @testing.fails_on('mssql+pymssql', 'FIXME: improve pymssql dec handling')
def test_many_significant_digits(self):
numbers = set([
decimal.Decimal("31943874831932418390.01"),
diff --git a/test/zblog/mappers.py b/test/zblog/mappers.py
index 45d101512..4ad542ec1 100644
--- a/test/zblog/mappers.py
+++ b/test/zblog/mappers.py
@@ -22,7 +22,7 @@ def zblog_mappers():
# ways. this will also attach a 'blogs' property to the user mapper.
mapper(Blog, tables.blogs, properties={
'id':tables.blogs.c.blog_id,
- 'owner':relationship(user.User, lazy=False,
+ 'owner':relationship(user.User, lazy='joined',
backref=backref('blogs', cascade="all, delete-orphan")),
})
@@ -38,7 +38,7 @@ def zblog_mappers():
primary_key=[tables.topic_xref.c.post_id,
tables.topic_xref.c.topic_id],
properties={
- 'topic':relationship(Topic, lazy=False),
+ 'topic':relationship(Topic, lazy='joined'),
})
# Post mapper, these are posts within a blog.
@@ -66,11 +66,11 @@ def zblog_mappers():
mapper(Post, posts_with_ccount, properties={
'id':posts_with_ccount.c.post_id,
'body':deferred(tables.posts.c.body),
- 'user':relationship(user.User, lazy=True,
+ 'user':relationship(user.User, lazy='select',
backref=backref('posts', cascade="all, delete-orphan")),
- 'blog':relationship(Blog, lazy=True,
+ 'blog':relationship(Blog, lazy='select',
backref=backref('posts', cascade="all, delete-orphan")),
- 'topics':relationship(TopicAssociation, lazy=False,
+ 'topics':relationship(TopicAssociation, lazy='joined',
cascade="all, delete-orphan",
backref='post')
}, order_by=[desc(posts_with_ccount.c.datetime)])
@@ -81,21 +81,21 @@ def zblog_mappers():
# list of child comments.
mapper(Comment, tables.comments, properties={
'id':tables.comments.c.comment_id,
- 'post':relationship(Post, lazy=True,
+ 'post':relationship(Post, lazy='select',
backref=backref('comments',
cascade="all, delete-orphan")),
- 'user':relationship(user.User, lazy=False,
+ 'user':relationship(user.User, lazy='joined',
backref=backref('comments',
cascade="all, delete-orphan")),
'parent':relationship(Comment,
primaryjoin=(tables.comments.c.parent_comment_id ==
tables.comments.c.comment_id),
foreign_keys=[tables.comments.c.comment_id],
- lazy=True, uselist=False),
+ lazy='select', uselist=False),
'replies':relationship(Comment,
primaryjoin=(tables.comments.c.parent_comment_id ==
tables.comments.c.comment_id),
- lazy=True, uselist=True, cascade="all"),
+ lazy='select', uselist=True, cascade="all"),
})
# we define one special find-by for the comments of a post, which is going to