summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/orm
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/orm')
-rw-r--r--lib/sqlalchemy/orm/__init__.py1639
-rw-r--r--lib/sqlalchemy/orm/attributes.py307
-rw-r--r--lib/sqlalchemy/orm/base.py453
-rw-r--r--lib/sqlalchemy/orm/collections.py150
-rw-r--r--lib/sqlalchemy/orm/dependency.py2
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py6
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py216
-rw-r--r--lib/sqlalchemy/orm/dynamic.py22
-rw-r--r--lib/sqlalchemy/orm/evaluator.py2
-rw-r--r--lib/sqlalchemy/orm/events.py311
-rw-r--r--lib/sqlalchemy/orm/exc.py20
-rw-r--r--lib/sqlalchemy/orm/identity.py4
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py85
-rw-r--r--lib/sqlalchemy/orm/interfaces.py452
-rw-r--r--lib/sqlalchemy/orm/loading.py28
-rw-r--r--lib/sqlalchemy/orm/mapper.py582
-rw-r--r--lib/sqlalchemy/orm/path_registry.py261
-rw-r--r--lib/sqlalchemy/orm/persistence.py134
-rw-r--r--lib/sqlalchemy/orm/properties.py1212
-rw-r--r--lib/sqlalchemy/orm/query.py562
-rw-r--r--lib/sqlalchemy/orm/relationships.py1618
-rw-r--r--lib/sqlalchemy/orm/scoping.py4
-rw-r--r--lib/sqlalchemy/orm/session.py152
-rw-r--r--lib/sqlalchemy/orm/state.py129
-rw-r--r--lib/sqlalchemy/orm/strategies.py395
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py924
-rw-r--r--lib/sqlalchemy/orm/sync.py2
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py10
-rw-r--r--lib/sqlalchemy/orm/util.py536
29 files changed, 5673 insertions, 4545 deletions
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 1173d5d09..7825a70ac 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -1,5 +1,5 @@
# orm/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,11 +24,13 @@ from .mapper import (
from .interfaces import (
EXT_CONTINUE,
EXT_STOP,
- MapperExtension,
PropComparator,
+ )
+from .deprecated_interfaces import (
+ MapperExtension,
SessionExtension,
AttributeExtension,
- )
+)
from .util import (
aliased,
join,
@@ -39,14 +41,13 @@ from .util import (
with_parent,
with_polymorphic,
)
-from .properties import (
- ColumnProperty,
+from .properties import ColumnProperty
+from .relationships import RelationshipProperty
+from .descriptor_props import (
ComparableProperty,
CompositeProperty,
- RelationshipProperty,
- PropertyLoader,
SynonymProperty,
- )
+ )
from .relationships import (
foreign,
remote,
@@ -61,75 +62,10 @@ from .scoping import (
scoped_session
)
from . import mapper as mapperlib
-from . import strategies
-from .query import AliasOption, Query
-from ..sql import util as sql_util
-from .. import util as sa_util
-
-from . import interfaces
-
-# here, we can establish InstrumentationManager back
-# in sqlalchemy.orm and sqlalchemy.orm.interfaces, which
-# also re-establishes the extended instrumentation system.
-#from ..ext import instrumentation as _ext_instrumentation
-#InstrumentationManager = \
-# interfaces.InstrumentationManager = \
-# _ext_instrumentation.InstrumentationManager
-
-__all__ = (
- 'EXT_CONTINUE',
- 'EXT_STOP',
- 'MapperExtension',
- 'AttributeExtension',
- 'PropComparator',
- 'Query',
- 'Session',
- 'aliased',
- 'backref',
- 'class_mapper',
- 'clear_mappers',
- 'column_property',
- 'comparable_property',
- 'compile_mappers',
- 'configure_mappers',
- 'composite',
- 'contains_alias',
- 'contains_eager',
- 'create_session',
- 'defer',
- 'deferred',
- 'dynamic_loader',
- 'eagerload',
- 'eagerload_all',
- 'foreign',
- 'immediateload',
- 'join',
- 'joinedload',
- 'joinedload_all',
- 'lazyload',
- 'mapper',
- 'make_transient',
- 'noload',
- 'object_mapper',
- 'object_session',
- 'outerjoin',
- 'polymorphic_union',
- 'reconstructor',
- 'relationship',
- 'relation',
- 'remote',
- 'scoped_session',
- 'sessionmaker',
- 'subqueryload',
- 'subqueryload_all',
- 'synonym',
- 'undefer',
- 'undefer_group',
- 'validates',
- 'was_deleted',
- 'with_polymorphic'
- )
-
+from .query import AliasOption, Query, Bundle
+from ..util.langhelpers import public_factory
+from .. import util as _sa_util
+from . import strategies as _strategies
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
@@ -167,501 +103,7 @@ def create_session(bind=None, **kwargs):
kwargs.setdefault('expire_on_commit', False)
return Session(bind=bind, **kwargs)
-
-def relationship(argument, secondary=None, **kwargs):
- """Provide a relationship of a primary Mapper to a secondary Mapper.
-
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
-
- A typical :func:`.relationship`, used in a classical mapping::
-
- mapper(Parent, properties={
- 'children': relationship(Child)
- })
-
- Some arguments accepted by :func:`.relationship` optionally accept a
- callable function, which when called produces the desired value.
- The callable is invoked by the parent :class:`.Mapper` at "mapper
- initialization" time, which happens only when mappers are first used, and
- is assumed to be after all mappings have been constructed. This can be
- used to resolve order-of-declaration and other dependency issues, such as
- if ``Child`` is declared below ``Parent`` in the same file::
-
- mapper(Parent, properties={
- "children":relationship(lambda: Child,
- order_by=lambda: Child.id)
- })
-
- When using the :ref:`declarative_toplevel` extension, the Declarative
- initializer allows string arguments to be passed to :func:`.relationship`.
- These string arguments are converted into callables that evaluate
- the string as Python code, using the Declarative
- class-registry as a namespace. This allows the lookup of related
- classes to be automatic via their string name, and removes the need to
- import related classes at all into the local module space::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
-
- class Parent(Base):
- __tablename__ = 'parent'
- id = Column(Integer, primary_key=True)
- children = relationship("Child", order_by="Child.id")
-
- A full array of examples and reference documentation regarding
- :func:`.relationship` is at :ref:`relationship_config_toplevel`.
-
- :param argument:
- a mapped class, or actual :class:`.Mapper` instance, representing the
- target of the relationship.
-
- ``argument`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param secondary:
- for a many-to-many relationship, specifies the intermediary
- table, and is an instance of :class:`.Table`. The ``secondary`` keyword
- argument should generally only
- be used for a table that is not otherwise expressed in any class
- mapping, unless this relationship is declared as view only, otherwise
- conflicting persistence operations can occur.
-
- ``secondary`` may
- also be passed as a callable function which is evaluated at
- mapper initialization time.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- many-to-one reference should be loaded when replaced, if
- not already loaded. Normally, history tracking logic for
- simple many-to-ones only needs to be aware of the "new"
- value in order to perform a flush. This flag is available
- for applications that make use of
- :func:`.attributes.get_history` which also need to know
- the "previous" value of the attribute.
-
- :param backref:
- indicates the string name of a property to be placed on the related
- mapper's class that will handle this relationship in the other
- direction. The other property will be created automatically
- when the mappers are configured. Can also be passed as a
- :func:`backref` object to control the configuration of the
- new relationship.
-
- :param back_populates:
- Takes a string name and has the same meaning as ``backref``,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate ``back_populates``
- to this relationship to ensure proper functioning.
-
- :param cascade:
- a comma-separated list of cascade rules which determines how
- Session operations should be "cascaded" from parent to child.
- This defaults to ``False``, which means the default cascade
- should be used. The default value is ``"save-update, merge"``.
-
- Available cascades are:
-
- * ``save-update`` - cascade the :meth:`.Session.add`
- operation. This cascade applies both to future and
- past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
- meaning new items added to a collection or scalar relationship
- get placed into the same session as that of the parent, and
- also applies to items which have been removed from this
- relationship but are still part of unflushed history.
-
- * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
- operation
-
- * ``expunge`` - cascade the :meth:`.Session.expunge`
- operation
-
- * ``delete`` - cascade the :meth:`.Session.delete`
- operation
-
- * ``delete-orphan`` - if an item of the child's type is
- detached from its parent, mark it for deletion.
-
- .. versionchanged:: 0.7
- This option does not prevent
- a new instance of the child object from being persisted
- without a parent to start with; to constrain against
- that case, ensure the child's foreign key column(s)
- is configured as NOT NULL
-
- * ``refresh-expire`` - cascade the :meth:`.Session.expire`
- and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
-
- * ``all`` - shorthand for "save-update,merge, refresh-expire,
- expunge, delete"
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param cascade_backrefs=True:
- a boolean value indicating if the ``save-update`` cascade should
- operate along an assignment event intercepted by a backref.
- When set to ``False``,
- the attribute managed by this relationship will not cascade
- an incoming transient object into the session of a
- persistent parent, if the event is received via backref.
-
- That is::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B, backref="a", cascade_backrefs=False)
- })
-
- If an ``A()`` is present in the session, assigning it to
- the "a" attribute on a transient ``B()`` will not place
- the ``B()`` into the session. To set the flag in the other
- direction, i.e. so that ``A().bs.append(B())`` won't add
- a transient ``A()`` into the session for a persistent ``B()``::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B,
- backref=backref("a", cascade_backrefs=False)
- )
- })
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param collection_class:
- a class or callable that returns a new list-holding object. will
- be used in place of a plain list for storing elements.
- Behavior of this attribute is described in detail at
- :ref:`custom_collections`.
-
- :param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
-
- :param doc:
- docstring which will be applied to the resulting descriptor.
-
- :param extension:
- an :class:`.AttributeExtension` instance, or list of extensions,
- which will be prepended to the list of attribute listeners for
- the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- :param foreign_keys:
- a list of columns which are to be used as "foreign key" columns,
- or columns which refer to the value in a remote column, within the
- context of this :func:`.relationship` object's ``primaryjoin``
- condition. That is, if the ``primaryjoin`` condition of this
- :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
- are required to be present in ``a.id``, then the "foreign key" column
- of this :func:`.relationship` is ``b.a_id``.
-
- In normal cases, the ``foreign_keys`` parameter is **not required.**
- :func:`.relationship` will **automatically** determine which columns
- in the ``primaryjoin`` conditition are to be considered "foreign key"
- columns based on those :class:`.Column` objects that specify
- :class:`.ForeignKey`, or are otherwise listed as referencing columns
- in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
- needed when:
-
- 1. There is more than one way to construct a join from the local
- table to the remote table, as there are multiple foreign key
- references present. Setting ``foreign_keys`` will limit the
- :func:`.relationship` to consider just those columns specified
- here as "foreign".
-
- .. versionchanged:: 0.8
- A multiple-foreign key join ambiguity can be resolved by
- setting the ``foreign_keys`` parameter alone, without the
- need to explicitly set ``primaryjoin`` as well.
-
- 2. The :class:`.Table` being mapped does not actually have
- :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
- constructs present, often because the table
- was reflected from a database that does not support foreign key
- reflection (MySQL MyISAM).
-
- 3. The ``primaryjoin`` argument is used to construct a non-standard
- join condition, which makes use of columns or expressions that do
- not normally refer to their "parent" column, such as a join condition
- expressed by a complex comparison using a SQL function.
-
- The :func:`.relationship` construct will raise informative error messages
- that suggest the use of the ``foreign_keys`` parameter when presented
- with an ambiguous condition. In typical cases, if :func:`.relationship`
- doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
- not needed.
-
- ``foreign_keys`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. seealso::
-
- :ref:`relationship_foreign_keys`
-
- :ref:`relationship_custom_foreign`
-
- :func:`.foreign` - allows direct annotation of the "foreign" columns
- within a ``primaryjoin`` condition.
-
- .. versionadded:: 0.8
- The :func:`.foreign` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "foreign".
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param innerjoin=False:
- when ``True``, joined eager loads will use an inner join to join
- against related tables instead of an outer join. The purpose
- of this option is generally one of performance, as inner joins
- generally perform better than outer joins. Another reason can be
- the use of ``with_lockmode``, which does not support outer joins.
-
- This flag can be set to ``True`` when the relationship references an
- object via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is guaranteed
- to have one or at least one entry.
-
- :param join_depth:
- when non-``None``, an integer value indicating how many levels
- deep "eager" loaders should join on a self-referring or cyclical
- relationship. The number counts how many times the same Mapper
- shall be present in the loading condition along a particular join
- branch. When left at its default of ``None``, eager loaders
- will stop chaining when they encounter a the same target mapper
- which is already higher up in the chain. This option applies
- both to joined- and subquery- eager loaders.
-
- :param lazy='select': specifies
- how the related items should be loaded. Default value is
- ``select``. Values include:
-
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
- fetch for simple many-to-one references.
-
- * ``immediate`` - items should be loaded as the parents are loaded,
- using a separate SELECT statement, or identity map fetch for
- simple many-to-one references.
-
- .. versionadded:: 0.6.5
-
- * ``joined`` - items should be loaded "eagerly" in the same query as
- that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
- the join is "outer" or not is determined by the ``innerjoin``
- parameter.
-
- * ``subquery`` - items should be loaded "eagerly" as the parents are
- loaded, using one additional SQL statement, which issues a JOIN to a
- subquery of the original statement, for each collection requested.
-
- * ``noload`` - no loading should occur at any time. This is to
- support "write-only" attributes, or attributes which are
- populated in some manner specific to the application.
-
- * ``dynamic`` - the attribute will return a pre-configured
- :class:`~sqlalchemy.orm.query.Query` object for all read
- operations, onto which further filtering operations can be
- applied before iterating the results. See
- the section :ref:`dynamic_relationship` for more details.
-
- * True - a synonym for 'select'
-
- * False - a synonym for 'joined'
-
- * None - a synonym for 'noload'
-
- Detailed discussion of loader strategies is at :doc:`/orm/loading`.
-
- :param load_on_pending=False:
- Indicates loading behavior for transient or pending parent objects.
-
- .. versionchanged:: 0.8
- load_on_pending is superseded by
- :meth:`.Session.enable_relationship_loading`.
-
- When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
- "attached" to a :class:`.Session` but is not part of its pending
- collection.
-
- The load_on_pending flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before flush() proceeds. This flag
- is not not intended for general use.
-
- .. versionadded:: 0.6.5
-
- :param order_by:
- indicates the ordering that should be applied when loading these
- items. ``order_by`` is expected to refer to one of the :class:`.Column`
- objects to which the target class is mapped, or
- the attribute itself bound to the target class which refers
- to the column.
-
- ``order_by`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param passive_deletes=False:
- Indicates loading behavior during delete operations.
-
- A value of True indicates that unloaded child items should not
- be loaded during a delete operation on the parent. Normally,
- when a parent item is deleted, all child items are loaded so
- that they can either be marked as deleted, or have their
- foreign key to the parent set to NULL. Marking this flag as
- True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
- place which will handle updating/deleting child rows on the
- database side.
-
- Additionally, setting the flag to the string value 'all' will
- disable the "nulling out" of the child foreign keys, when there
- is no delete or delete-orphan cascade enabled. This is
- typically used when a triggering or error raise scenario is in
- place on the database side. Note that the foreign key
- attributes on in-session child objects will not be changed
- after a flush occurs so this is a very special use-case
- setting.
-
- :param passive_updates=True:
- Indicates loading and INSERT/UPDATE/DELETE behavior when the
- source of a foreign key value changes (i.e. an "on update"
- cascade), which are typically the primary key columns of the
- source row.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will
- handle propagation of an UPDATE from a source column to
- dependent rows. Note that with databases which enforce
- referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
- ON UPDATE CASCADE is required for this operation. The
- relationship() will update the value of the attribute on related
- items which are locally present in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE
- (i.e. SQLite, MySQL MyISAM tables).
-
- Also see the passive_updates flag on ``mapper()``.
-
- A future SQLAlchemy release will provide a "detect" feature for
- this flag.
-
- :param post_update:
- this indicates that the relationship should be handled by a
- second UPDATE statement after an INSERT or before a
- DELETE. Currently, it also will issue an UPDATE after the
- instance was UPDATEd as well, although this technically should
- be improved. This flag is used to handle saving bi-directional
- dependencies between two individual rows (i.e. each row
- references the other), where it would otherwise be impossible to
- INSERT or DELETE both rows fully since one row exists before the
- other. Use this flag when a particular mapping arrangement will
- incur two rows that are dependent on each other, such as a table
- that has a one-to-many relationship to a set of child rows, and
- also has a column that references a single child row within that
- list (i.e. both tables contain a foreign key to each other). If
- a ``flush()`` operation returns an error that a "cyclical
- dependency" was detected, this is a cue that you might want to
- use ``post_update`` to "break" the cycle.
-
- :param primaryjoin:
- a SQL expression that will be used as the primary
- join of this child object against the parent object, or in a
- many-to-many relationship the join of the primary object to the
- association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
-
- ``primaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param remote_side:
- used for self-referential relationships, indicates the column or
- list of columns that form the "remote side" of the relationship.
-
- ``remote_side`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. versionchanged:: 0.8
- The :func:`.remote` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "remote".
-
- :param query_class:
- a :class:`.Query` subclass that will be used as the base of the
- "appender query" returned by a "dynamic" relationship, that
- is, a relationship that specifies ``lazy="dynamic"`` or was
- otherwise constructed using the :func:`.orm.dynamic_loader`
- function.
-
- :param secondaryjoin:
- a SQL expression that will be used as the join of
- an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
-
- ``secondaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param single_parent=(True|False):
- when True, installs a validator which will prevent objects
- from being associated with more than one parent at a time.
- This is used for many-to-one or many-to-many relationships that
- should be treated either as one-to-one or one-to-many. Its
- usage is optional unless delete-orphan cascade is also
- set on this relationship(), in which case its required.
-
- :param uselist=(True|False):
- a boolean that indicates if this property should be loaded as a
- list or a scalar. In most cases, this value is determined
- automatically by ``relationship()``, based on the type and direction
- of the relationship - one to many forms a list, many to one
- forms a scalar, many to many is a list. If a scalar is desired
- where normally a list would be present, such as a bi-directional
- one-to-one relationship, set uselist to False.
-
- :param viewonly=False:
- when set to True, the relationship is used only for loading objects
- within the relationship, and has no effect on the unit-of-work
- flush process. Relationships with viewonly can specify any kind of
- join conditions to provide additional views of related objects
- onto a parent object. Note that the functionality of a viewonly
- relationship has its limits - complicated join conditions may
- not compile into eager or lazy loaders properly. If this is the
- case, use an alternative method.
-
- .. versionchanged:: 0.6
- :func:`relationship` was renamed from its previous name
- :func:`relation`.
-
- """
- return RelationshipProperty(argument, secondary=secondary, **kwargs)
-
+relationship = public_factory(RelationshipProperty, ".orm.relationship")
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
@@ -689,138 +131,8 @@ def dynamic_loader(argument, **kw):
return relationship(argument, **kw)
-def column_property(*cols, **kw):
- """Provide a column-level property for use with a Mapper.
-
- Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
-
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
-
- :param \*cols:
- list of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. Normally, history tracking logic for
- simple non-primary-key scalar values only needs to be
- aware of the "new" value in order to perform a flush. This
- flag is available for applications that make use of
- :func:`.attributes.get_history` or :meth:`.Session.is_modified`
- which also need to know
- the "previous" value of the attribute.
-
- .. versionadded:: 0.6.6
-
- :param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param group:
- a group name for this property when marked as deferred.
-
- :param deferred:
- when True, the column property is "deferred", meaning that
- it does not load immediately, and is instead loaded when the
- attribute is first accessed on an instance. See also
- :func:`~sqlalchemy.orm.deferred`.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param expire_on_flush=True:
- Disable expiry on flush. A column_property() which refers
- to a SQL expression (and not a single table-bound column)
- is considered to be a "read only" property; populating it
- has no effect on the state of data, and it can only return
- database state. For this reason a column_property()'s value
- is expired whenever the parent object is involved in a
- flush, that is, has any kind of "dirty" state within a flush.
- Setting this parameter to ``False`` will have the effect of
- leaving any existing value present after the flush proceeds.
- Note however that the :class:`.Session` with default expiration
- settings still expires
- all attributes after a :meth:`.Session.commit` call, however.
-
- .. versionadded:: 0.7.3
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an
- :class:`.AttributeExtension`
- instance, or list of extensions, which will be prepended
- to the list of attribute listeners for the resulting
- descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
-
- return ColumnProperty(*cols, **kw)
-
-
-def composite(class_, *cols, **kwargs):
- """Return a composite column-based property for use with a Mapper.
-
- See the mapping documentation section :ref:`mapper_composite` for a full
- usage example.
-
- The :class:`.MapperProperty` returned by :func:`.composite`
- is the :class:`.CompositeProperty`.
-
- :param class\_:
- The "composite type" class.
-
- :param \*cols:
- List of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. See the same flag on :func:`.column_property`.
-
- .. versionchanged:: 0.7
- This flag specifically becomes meaningful
- - previously it was a placeholder.
-
- :param group:
- A group name for this property when marked as deferred.
-
- :param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
-
- :param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an :class:`.AttributeExtension` instance,
- or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
- return CompositeProperty(class_, *cols, **kwargs)
+column_property = public_factory(ColumnProperty, ".orm.column_property")
+composite = public_factory(CompositeProperty, ".orm.composite")
def backref(name, **kwargs):
@@ -836,488 +148,33 @@ def backref(name, **kwargs):
return (name, kwargs)
-def deferred(*columns, **kwargs):
- """Return a :class:`.DeferredColumnProperty`, which indicates this
- object attributes should only be loaded from its corresponding
- table column when first accessed.
-
- Used with the "properties" dictionary sent to :func:`mapper`.
-
- See also:
-
- :ref:`deferred`
-
- """
- return ColumnProperty(deferred=True, *columns, **kwargs)
-
-
-def mapper(class_, local_table=None, *args, **params):
- """Return a new :class:`~.Mapper` object.
-
- This function is typically used behind the scenes
- via the Declarative extension. When using Declarative,
- many of the usual :func:`.mapper` arguments are handled
- by the Declarative extension itself, including ``class_``,
- ``local_table``, ``properties``, and ``inherits``.
- Other options are passed to :func:`.mapper` using
- the ``__mapper_args__`` class variable::
+def deferred(*columns, **kw):
+ """Indicate a column-based mapped attribute that by default will
+ not load unless accessed.
- class MyClass(Base):
- __tablename__ = 'my_table'
- id = Column(Integer, primary_key=True)
- type = Column(String(50))
- alt = Column("some_alt", Integer)
+ :param \*columns: columns to be mapped. This is typically a single
+ :class:`.Column` object, however a collection is supported in order
+ to support multiple columns mapped under the same attribute.
- __mapper_args__ = {
- 'polymorphic_on' : type
- }
+ :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`.
+ .. seealso::
- Explicit use of :func:`.mapper`
- is often referred to as *classical mapping*. The above
- declarative example is equivalent in classical form to::
-
- my_table = Table("my_table", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50)),
- Column("some_alt", Integer)
- )
-
- class MyClass(object):
- pass
-
- mapper(MyClass, my_table,
- polymorphic_on=my_table.c.type,
- properties={
- 'alt':my_table.c.some_alt
- })
-
- See also:
-
- :ref:`classical_mapping` - discussion of direct usage of
- :func:`.mapper`
-
- :param class\_: The class to be mapped. When using Declarative,
- this argument is automatically passed as the declared class
- itself.
-
- :param local_table: The :class:`.Table` or other selectable
- to which the class is mapped. May be ``None`` if
- this mapper inherits from another mapper using single-table
- inheritance. When using Declarative, this argument is
- automatically passed by the extension, based on what
- is configured via the ``__table__`` argument or via the
- :class:`.Table` produced as a result of the ``__tablename__``
- and :class:`.Column` arguments present.
-
- :param always_refresh: If True, all query operations for this mapped
- class will overwrite all data within object instances that already
- exist within the session, erasing any in-memory changes with
- whatever information was loaded from the database. Usage of this
- flag is highly discouraged; as an alternative, see the method
- :meth:`.Query.populate_existing`.
-
- :param allow_partial_pks: Defaults to True. Indicates that a
- composite primary key with some NULL values should be considered as
- possibly existing within the database. This affects whether a
- mapper will assign an incoming row to an existing identity, as well
- as if :meth:`.Session.merge` will check the database first for a
- particular primary key value. A "partial primary key" can occur if
- one has mapped to an OUTER JOIN, for example.
-
- :param batch: Defaults to ``True``, indicating that save operations
- of multiple entities can be batched together for efficiency.
- Setting to False indicates
- that an instance will be fully saved before saving the next
- instance. This is used in the extremely rare case that a
- :class:`.MapperEvents` listener requires being called
- in between individual row persistence operations.
-
- :param column_prefix: A string which will be prepended
- to the mapped attribute name when :class:`.Column`
- objects are automatically assigned as attributes to the
- mapped class. Does not affect explicitly specified
- column-based properties.
-
- See the section :ref:`column_prefix` for an example.
-
- :param concrete: If True, indicates this mapper should use concrete
- table inheritance with its parent mapper.
-
- See the section :ref:`concrete_inheritance` for an example.
-
- :param exclude_properties: A list or set of string column names to
- be excluded from mapping.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param extension: A :class:`.MapperExtension` instance or
- list of :class:`.MapperExtension` instances which will be applied
- to all operations by this :class:`.Mapper`. **Deprecated.**
- Please see :class:`.MapperEvents`.
-
- :param include_properties: An inclusive list or set of string column
- names to map.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param inherits: A mapped class or the corresponding :class:`.Mapper`
- of one indicating a superclass to which this :class:`.Mapper`
- should *inherit* from. The mapped class here must be a subclass
- of the other mapper's class. When using Declarative, this argument
- is passed automatically as a result of the natural class
- hierarchy of the declared classes.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param inherit_condition: For joined table inheritance, a SQL
- expression which will
- define how the two tables are joined; defaults to a natural join
- between the two tables.
-
- :param inherit_foreign_keys: When ``inherit_condition`` is used and the
- columns present are missing a :class:`.ForeignKey` configuration,
- this parameter can be used to specify which columns are "foreign".
- In most cases can be left as ``None``.
-
- :param legacy_is_orphan: Boolean, defaults to ``False``.
- When ``True``, specifies that "legacy" orphan consideration
- is to be applied to objects mapped by this mapper, which means
- that a pending (that is, not persistent) object is auto-expunged
- from an owning :class:`.Session` only when it is de-associated
- from *all* parents that specify a ``delete-orphan`` cascade towards
- this mapper. The new default behavior is that the object is auto-expunged
- when it is de-associated with *any* of its parents that specify
- ``delete-orphan`` cascade. This behavior is more consistent with
- that of a persistent object, and allows behavior to be consistent
- in more scenarios independently of whether or not an orphanable
- object has been flushed yet or not.
-
- See the change note and example at :ref:`legacy_is_orphan_addition`
- for more detail on this change.
-
- .. versionadded:: 0.8 - the consideration of a pending object as
- an "orphan" has been modified to more closely match the
- behavior as that of persistent objects, which is that the object
- is expunged from the :class:`.Session` as soon as it is
- de-associated from any of its orphan-enabled parents. Previously,
- the pending object would be expunged only if de-associated
- from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
- is added to :func:`.orm.mapper` which re-establishes the
- legacy behavior.
-
- :param non_primary: Specify that this :class:`.Mapper` is in addition
- to the "primary" mapper, that is, the one used for persistence.
- The :class:`.Mapper` created here may be used for ad-hoc
- mapping of the class to an alternate selectable, for loading
- only.
-
- The ``non_primary`` feature is rarely needed with modern
- usage.
-
- :param order_by: A single :class:`.Column` or list of :class:`.Column`
- objects for which selection operations should use as the default
- ordering for entities. By default mappers have no pre-defined
- ordering.
-
- :param passive_updates: Indicates UPDATE behavior of foreign key
- columns when a primary key column changes on a joined-table
- inheritance mapping. Defaults to ``True``.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will handle
- propagation of an UPDATE from a source column to dependent columns
- on joined-table rows.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The :class:`.Mapper` here will
- emit an UPDATE statement for the dependent columns during a
- primary key change.
-
- See also:
-
- :ref:`passive_updates` - description of a similar feature as
- used with :func:`.relationship`
-
- :param polymorphic_on: Specifies the column, attribute, or
- SQL expression used to determine the target class for an
- incoming row, when inheriting classes are present.
-
- This value is commonly a :class:`.Column` object that's
- present in the mapped :class:`.Table`::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":discriminator,
- "polymorphic_identity":"employee"
- }
-
- It may also be specified
- as a SQL expression, as in this example where we
- use the :func:`.case` construct to provide a conditional
- approach::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee"),
- "polymorphic_identity":"employee"
- }
-
- It may also refer to any attribute
- configured with :func:`.column_property`, or to the
- string name of one::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
- employee_type = column_property(
- case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee")
- )
-
- __mapper_args__ = {
- "polymorphic_on":employee_type,
- "polymorphic_identity":"employee"
- }
-
- .. versionchanged:: 0.7.4
- ``polymorphic_on`` may be specified as a SQL expression,
- or refer to any attribute configured with
- :func:`.column_property`, or to the string name of one.
-
- When setting ``polymorphic_on`` to reference an
- attribute or expression that's not present in the
- locally mapped :class:`.Table`, yet the value
- of the discriminator should be persisted to the database,
- the value of the
- discriminator is not automatically set on new
- instances; this must be handled by the user,
- either through manual means or via event listeners.
- A typical approach to establishing such a listener
- looks like::
-
- from sqlalchemy import event
- from sqlalchemy.orm import object_mapper
-
- @event.listens_for(Employee, "init", propagate=True)
- def set_identity(instance, *arg, **kw):
- mapper = object_mapper(instance)
- instance.discriminator = mapper.polymorphic_identity
-
- Where above, we assign the value of ``polymorphic_identity``
- for the mapped class to the ``discriminator`` attribute,
- thus persisting the value to the ``discriminator`` column
- in the database.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param polymorphic_identity: Specifies the value which
- identifies this particular class as returned by the
- column expression referred to by the ``polymorphic_on``
- setting. As rows are received, the value corresponding
- to the ``polymorphic_on`` column expression is compared
- to this value, indicating which subclass should
- be used for the newly reconstructed object.
-
- :param properties: A dictionary mapping the string names of object
- attributes to :class:`.MapperProperty` instances, which define the
- persistence behavior of that attribute. Note that :class:`.Column`
- objects present in
- the mapped :class:`.Table` are automatically placed into
- ``ColumnProperty`` instances upon mapping, unless overridden.
- When using Declarative, this argument is passed automatically,
- based on all those :class:`.MapperProperty` instances declared
- in the declared class body.
-
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
- This is normally simply the primary key of the ``local_table``, but
- can be overridden here.
-
- :param version_id_col: A :class:`.Column`
- that will be used to keep a running version id of mapped entities
- in the database. This is used during save operations to ensure that
- no other thread or process has updated the instance during the
- lifetime of the entity, else a
- :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
- thrown. By default the column must be of :class:`.Integer` type,
- unless ``version_id_generator`` specifies a new generation
- algorithm.
-
- :param version_id_generator: A callable which defines the algorithm
- used to generate new version ids. Defaults to an integer
- generator. Can be replaced with one that generates timestamps,
- uuids, etc. e.g.::
-
- import uuid
-
- class MyClass(Base):
- __tablename__ = 'mytable'
- id = Column(Integer, primary_key=True)
- version_uuid = Column(String(32))
-
- __mapper_args__ = {
- 'version_id_col':version_uuid,
- 'version_id_generator':lambda version:uuid.uuid4().hex
- }
-
- The callable receives the current version identifier as its
- single argument.
-
- :param with_polymorphic: A tuple in the form ``(<classes>,
- <selectable>)`` indicating the default style of "polymorphic"
- loading, that is, which tables are queried at once. <classes> is
- any single or list of mappers and/or classes indicating the
- inherited classes that should be loaded at once. The special value
- ``'*'`` may be used to indicate all descending classes should be
- loaded immediately. The second tuple argument <selectable>
- indicates a selectable that will be used to query for multiple
- classes.
-
- See also:
-
- :ref:`concrete_inheritance` - typically uses ``with_polymorphic``
- to specify a UNION statement to select from.
-
- :ref:`with_polymorphic` - usage example of the related
- :meth:`.Query.with_polymorphic` method
+ :ref:`deferred`
"""
- return Mapper(class_, local_table, *args, **params)
-
+ return ColumnProperty(deferred=True, *columns, **kw)
-def synonym(name, map_column=False, descriptor=None,
- comparator_factory=None, doc=None):
- """Denote an attribute name as a synonym to a mapped property.
- .. versionchanged:: 0.7
- :func:`.synonym` is superseded by the :mod:`~sqlalchemy.ext.hybrid`
- extension. See the documentation for hybrids
- at :ref:`hybrids_toplevel`.
+mapper = public_factory(Mapper, ".orm.mapper")
- Used with the ``properties`` dictionary sent to
- :func:`~sqlalchemy.orm.mapper`::
+synonym = public_factory(SynonymProperty, ".orm.synonym")
- class MyClass(object):
- def _get_status(self):
- return self._status
- def _set_status(self, value):
- self._status = value
- status = property(_get_status, _set_status)
-
- mapper(MyClass, sometable, properties={
- "status":synonym("_status", map_column=True)
- })
-
- Above, the ``status`` attribute of MyClass will produce
- expression behavior against the table column named ``status``,
- using the Python attribute ``_status`` on the mapped class
- to represent the underlying value.
-
- :param name: the name of the existing mapped property, which can be
- any other ``MapperProperty`` including column-based properties and
- relationships.
-
- :param map_column: if ``True``, an additional ``ColumnProperty`` is created
- on the mapper automatically, using the synonym's name as the keyname of
- the property, and the keyname of this ``synonym()`` as the name of the
- column to map.
-
- """
- return SynonymProperty(name, map_column=map_column,
- descriptor=descriptor,
- comparator_factory=comparator_factory,
- doc=doc)
+comparable_property = public_factory(ComparableProperty,
+ ".orm.comparable_property")
-def comparable_property(comparator_factory, descriptor=None):
- """Provides a method of applying a :class:`.PropComparator`
- to any Python descriptor attribute.
-
- .. versionchanged:: 0.7
- :func:`.comparable_property` is superseded by
- the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
- at :ref:`hybrid_custom_comparators`.
-
- Allows any Python descriptor to behave like a SQL-enabled
- attribute when used at the class level in queries, allowing
- redefinition of expression operator behavior.
-
- In the example below we redefine :meth:`.PropComparator.operate`
- to wrap both sides of an expression in ``func.lower()`` to produce
- case-insensitive comparison::
-
- from sqlalchemy.orm import comparable_property
- from sqlalchemy.orm.interfaces import PropComparator
- from sqlalchemy.sql import func
- from sqlalchemy import Integer, String, Column
- from sqlalchemy.ext.declarative import declarative_base
-
- class CaseInsensitiveComparator(PropComparator):
- def __clause_element__(self):
- return self.prop
-
- def operate(self, op, other):
- return op(
- func.lower(self.__clause_element__()),
- func.lower(other)
- )
-
- Base = declarative_base()
-
- class SearchWord(Base):
- __tablename__ = 'search_word'
- id = Column(Integer, primary_key=True)
- word = Column(String)
- word_insensitive = comparable_property(lambda prop, mapper:
- CaseInsensitiveComparator(mapper.c.word, mapper)
- )
-
-
- A mapping like the above allows the ``word_insensitive`` attribute
- to render an expression like::
-
- >>> print SearchWord.word_insensitive == "Trucks"
- lower(search_word.word) = lower(:lower_1)
-
- :param comparator_factory:
- A PropComparator subclass or factory that defines operator behavior
- for this property.
-
- :param descriptor:
- Optional when used in a ``properties={}`` declaration. The Python
- descriptor or property to layer comparison behavior on top of.
-
- The like-named descriptor will be automatically retrieved from the
- mapped class if left blank in a ``properties`` declaration.
-
- """
- return ComparableProperty(comparator_factory, descriptor)
-
-
-@sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
+@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
"is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have
@@ -1359,107 +216,24 @@ def clear_mappers():
finally:
mapperlib._CONFIGURE_MUTEX.release()
-
-def joinedload(*keys, **kw):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload` will remain available for the foreseeable
- future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- examples::
-
- # joined-load the "orders" collection on "User"
- query(User).options(joinedload(User.orders))
-
- # joined-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(joinedload(Order.items, Item.keywords))
-
- # to joined-load across both, use joinedload_all()
- query(Order).options(joinedload_all(Order.items, Item.keywords))
-
- # set the default strategy to be 'joined'
- query(Order).options(joinedload('*'))
-
- :func:`joinedload` also accepts a keyword argument `innerjoin=True` which
- indicates using an inner join instead of an outer::
-
- query(Order).options(joinedload(Order.user, innerjoin=True))
-
- .. note::
-
- The join created by :func:`joinedload` is anonymously aliased such that
- it **does not affect the query results**. An :meth:`.Query.order_by`
- or :meth:`.Query.filter` call **cannot** reference these aliased
- tables - so-called "user space" joins are constructed using
- :meth:`.Query.join`. The rationale for this is that
- :func:`joinedload` is only applied in order to affect how related
- objects or collections are loaded as an optimizing detail - it can be
- added or removed with no impact on actual results. See the section
- :ref:`zen_of_eager_loading` for a detailed description of how this is
- used, including how to use a single explicit JOIN for
- filtering/ordering and eager loading simultaneously.
-
- See also: :func:`subqueryload`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined'),
- strategies.EagerJoinOption(keys, innerjoin)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined')
-
-
-def joinedload_all(*keys, **kw):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload_all` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload_all` will remain available for the
- foreseeable future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(joinedload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one joined eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(joinedload_all(User.orders, Order.items, Item.keywords))
-
- The keyword arguments accept a flag `innerjoin=True|False` which will
- override the value of the `innerjoin` flag specified on the
- relationship().
-
- See also: :func:`subqueryload_all`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined', chained=True),
- strategies.EagerJoinOption(keys, innerjoin, chained=True)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined', chained=True)
-
+from . import strategy_options
+
+joinedload = strategy_options.joinedload._unbound_fn
+joinedload_all = strategy_options.joinedload._unbound_all_fn
+contains_eager = strategy_options.contains_eager._unbound_fn
+defer = strategy_options.defer._unbound_fn
+undefer = strategy_options.undefer._unbound_fn
+undefer_group = strategy_options.undefer_group._unbound_fn
+load_only = strategy_options.load_only._unbound_fn
+lazyload = strategy_options.lazyload._unbound_fn
+lazyload_all = strategy_options.lazyload_all._unbound_all_fn
+subqueryload = strategy_options.subqueryload._unbound_fn
+subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
+immediateload = strategy_options.immediateload._unbound_fn
+noload = strategy_options.noload._unbound_fn
+defaultload = strategy_options.defaultload._unbound_fn
+
+from .strategy_options import Load
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
@@ -1471,316 +245,23 @@ def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
-def subqueryload(*keys):
- """Return a ``MapperOption`` that will convert the property
- of the given name or series of mapped attributes
- into an subquery eager load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- examples::
- # subquery-load the "orders" collection on "User"
- query(User).options(subqueryload(User.orders))
+contains_alias = public_factory(AliasOption, ".orm.contains_alias")
- # subquery-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(subqueryload(Order.items, Item.keywords))
- # to subquery-load across both, use subqueryload_all()
- query(Order).options(subqueryload_all(Order.items, Item.keywords))
- # set the default strategy to be 'subquery'
- query(Order).options(subqueryload('*'))
-
- See also: :func:`joinedload`, :func:`lazyload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery")
+def __go(lcls):
+ global __all__
+ from .. import util as sa_util
+ from . import dynamic
+ from . import events
+ import inspect as _inspect
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
-def subqueryload_all(*keys):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into a subquery eager load.
+ _sa_util.dependencies.resolve_all("sqlalchemy.orm")
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(subqueryload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one subquery eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(subqueryload_all(User.orders, Order.items,
- Item.keywords))
-
- See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
-
-
-def lazyload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True)
-
-
-def lazyload_all(*keys):
- """Return a ``MapperOption`` that will convert all the properties
- along the given dot-separated path or series of mapped attributes
- into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True, chained=True)
-
-
-def noload(*keys):
- """Return a ``MapperOption`` that will convert the property of the
- given name or series of mapped attributes into a non-load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`,
- :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=None)
-
-
-def immediateload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an immediate load.
-
- The "immediate" load means the attribute will be fetched
- with a separate SELECT statement per parent in the
- same way as lazy loading - except the loader is guaranteed
- to be called at load time before the parent object
- is returned in the result.
-
- The normal behavior of lazy loading applies - if
- the relationship is a simple many-to-one, and the child
- object is already present in the :class:`.Session`,
- no SELECT statement will be emitted.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
-
- .. versionadded:: 0.6.5
-
- """
- return strategies.EagerLazyOption(keys, lazy='immediate')
-
-
-def contains_alias(alias):
- """Return a :class:`.MapperOption` that will indicate to the query that
- the main table has been aliased.
-
- This is used in the very rare case that :func:`.contains_eager`
- is being used in conjunction with a user-defined SELECT
- statement that aliases the parent table. E.g.::
-
- # define an aliased UNION called 'ulist'
- statement = users.select(users.c.user_id==7).\\
- union(users.select(users.c.user_id>7)).\\
- alias('ulist')
-
- # add on an eager load of "addresses"
- statement = statement.outerjoin(addresses).\\
- select().apply_labels()
-
- # create query, indicating "ulist" will be an
- # alias for the main table, "addresses"
- # property should be eager loaded
- query = session.query(User).options(
- contains_alias('ulist'),
- contains_eager('addresses'))
-
- # then get results via the statement
- results = query.from_statement(statement).all()
-
- :param alias: is the string name of an alias, or a
- :class:`~.sql.expression.Alias` object representing
- the alias.
-
- """
- return AliasOption(alias)
-
-
-def contains_eager(*keys, **kwargs):
- """Return a ``MapperOption`` that will indicate to the query that
- the given attribute should be eagerly loaded from columns currently
- in the query.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- The option is used in conjunction with an explicit join that loads
- the desired rows, i.e.::
-
- sess.query(Order).\\
- join(Order.user).\\
- options(contains_eager(Order.user))
-
- The above query would join from the ``Order`` entity to its related
- ``User`` entity, and the returned ``Order`` objects would have the
- ``Order.user`` attribute pre-populated.
-
- :func:`contains_eager` also accepts an `alias` argument, which is the
- string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
- construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
- the eagerly-loaded rows are to come from an aliased table::
-
- user_alias = aliased(User)
- sess.query(Order).\\
- join((user_alias, Order.user)).\\
- options(contains_eager(Order.user, alias=user_alias))
-
- See also :func:`eagerload` for the "automatic" version of this
- functionality.
-
- For additional examples of :func:`contains_eager` see
- :ref:`contains_eager`.
-
- """
- alias = kwargs.pop('alias', None)
- if kwargs:
- raise exc.ArgumentError(
- 'Invalid kwargs for contains_eager: %r' % list(kwargs.keys()))
- return strategies.EagerLazyOption(keys, lazy='joined',
- propagate_to_loaders=False, chained=True), \
- strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
-
-
-def defer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a deferred load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import defer
-
- query(MyClass).options(defer("attribute_one"),
- defer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- defer(MyClass.attribute_one),
- defer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.defer` option will be applied to that object
- when loaded::
-
- query(MyClass).options(
- defer("related.attribute_one"),
- defer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- defer(MyClass.related, MyOtherClass.attribute_one),
- defer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=True)
-
-
-def undefer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import undefer
-
- query(MyClass).options(
- undefer("attribute_one"),
- undefer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- undefer(MyClass.attribute_one),
- undefer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.undefer` option will be applied to that
- object when loaded::
-
- query(MyClass).options(
- undefer("related.attribute_one"),
- undefer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- undefer(MyClass.related, MyOtherClass.attribute_one),
- undefer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :func:`.orm.undefer_group` as a means to "undefer" a group
- of attributes at once.
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=False)
-
-
-def undefer_group(name):
- """Return a :class:`.MapperOption` that will convert the given group of
- deferred column properties into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- query(MyClass).options(undefer("group_one"))
-
- See also:
-
- :ref:`deferred`
-
- :param name: String name of the deferred group. This name is
- established using the "group" name to the :func:`.orm.deferred`
- configurational function.
-
- """
- return strategies.UndeferGroupOption(name)
+__go(locals())
-from sqlalchemy import util as _sa_util
-_sa_util.importlater.resolve_all()
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 13c2cf256..e5f8550ab 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -1,5 +1,5 @@
# orm/attributes.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -14,109 +14,19 @@ defines a large part of the ORM's interactivity.
"""
import operator
-from operator import itemgetter
-
from .. import util, event, inspection
-from . import interfaces, collections, events, exc as orm_exc
-from .instrumentation import instance_state, instance_dict, manager_of_class
+from . import interfaces, collections, exc as orm_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
-"""Symbol returned by a loader callable or other attribute/history
-retrieval operation when a value could not be determined, based
-on loader callable flags.
-"""
-)
-
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
-"""Symbol returned by a loader callable to indicate the
-retrieved value, or values, were assigned to their attributes
-on the target object.
-""")
-
-ATTR_EMPTY = util.symbol('ATTR_EMPTY',
-"""Symbol used internally to indicate an attribute had no callable.
-""")
-
-NO_VALUE = util.symbol('NO_VALUE',
-"""Symbol which may be placed as the 'previous' value of an attribute,
-indicating no value was loaded for an attribute when it was modified,
-and flags indicated we were not to load it.
-"""
-)
-
-NEVER_SET = util.symbol('NEVER_SET',
-"""Symbol which may be placed as the 'previous' value of an attribute
-indicating that the attribute had not been assigned to previously.
-"""
-)
-
-NO_CHANGE = util.symbol("NO_CHANGE",
-"""No callables or SQL should be emitted on attribute access
-and no state should change""", canonical=0
-)
-
-CALLABLES_OK = util.symbol("CALLABLES_OK",
-"""Loader callables can be fired off if a value
-is not present.""", canonical=1
-)
-
-SQL_OK = util.symbol("SQL_OK",
-"""Loader callables can emit SQL at least on scalar value
-attributes.""", canonical=2)
-
-RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
-"""callables can use SQL to load related objects as well
-as scalar value attributes.
-""", canonical=4
-)
-
-INIT_OK = util.symbol("INIT_OK",
-"""Attributes should be initialized with a blank
-value (None or an empty collection) upon get, if no other
-value can be obtained.
-""", canonical=8
-)
-
-NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
-"""callables can be emitted if the parent is not persistent.""",
-canonical=16
-)
-
-LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
-"""callables should use committed values as primary/foreign keys during a load
-""", canonical=32
-)
-
-# pre-packaged sets of flags used as inputs
-PASSIVE_OFF = util.symbol("PASSIVE_OFF",
- "Callables can be emitted in all cases.",
- canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
- INIT_OK | CALLABLES_OK | SQL_OK)
-)
-PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
- """PASSIVE_OFF ^ INIT_OK""",
- canonical=PASSIVE_OFF ^ INIT_OK
-)
-PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
- "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
- canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
-)
-PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
- "PASSIVE_OFF ^ SQL_OK",
- canonical=PASSIVE_OFF ^ SQL_OK
-)
-PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
- "PASSIVE_OFF ^ RELATED_OBJECT_OK",
- canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
-)
-PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
- "PASSIVE_OFF ^ NON_PERSISTENT_OK",
- canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
-)
+from .base import instance_state, instance_dict, manager_of_class
+from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
+ NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
+ INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
+ PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
+ PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT
+from .base import state_str, instance_str
+@inspection._self_inspects
class QueryableAttribute(interfaces._MappedAttribute,
interfaces._InspectionAttr,
interfaces.PropComparator):
@@ -159,9 +69,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
if key in base:
self.dispatch._update(base[key].dispatch)
- dispatch = event.dispatcher(events.AttributeEvents)
- dispatch.dispatch_cls._active_history = False
-
@util.memoized_property
def _supports_population(self):
return self.impl.supports_population
@@ -236,6 +143,18 @@ class QueryableAttribute(interfaces._MappedAttribute,
def __clause_element__(self):
return self.comparator.__clause_element__()
+ def _query_clause_element(self):
+ """like __clause_element__(), but called specifically
+ by :class:`.Query` to allow special behavior."""
+
+ return self.comparator._query_clause_element()
+
+ def adapt_to_entity(self, adapt_to_entity):
+ assert not self._of_type
+ return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
+ comparator=self.comparator.adapt_to_entity(adapt_to_entity),
+ parententity=adapt_to_entity)
+
def of_type(self, cls):
return QueryableAttribute(
self.class_,
@@ -246,7 +165,7 @@ class QueryableAttribute(interfaces._MappedAttribute,
of_type=cls)
def label(self, name):
- return self.__clause_element__().label(name)
+ return self._query_clause_element().label(name)
def operate(self, op, *other, **kwargs):
return op(self.comparator, *other, **kwargs)
@@ -286,8 +205,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
"""
return self.comparator.property
-inspection._self_inspects(QueryableAttribute)
-
class InstrumentedAttribute(QueryableAttribute):
"""Class bound instrumented attribute which adds basic
@@ -359,7 +276,7 @@ def create_proxied_attribute(descriptor):
return self._comparator
def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.class_, self.key, self.descriptor,
+ return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
self._comparator,
adapt_to_entity)
@@ -398,6 +315,53 @@ def create_proxied_attribute(descriptor):
from_instance=descriptor)
return Proxy
+OP_REMOVE = util.symbol("REMOVE")
+OP_APPEND = util.symbol("APPEND")
+OP_REPLACE = util.symbol("REPLACE")
+
+class Event(object):
+ """A token propagated throughout the course of a chain of attribute
+ events.
+
+ Serves as an indicator of the source of the event and also provides
+ a means of controlling propagation across a chain of attribute
+ operations.
+
+ The :class:`.Event` object is sent as the ``initiator`` argument
+ when dealing with the :meth:`.AttributeEvents.append`,
+ :meth:`.AttributeEvents.set`,
+ and :meth:`.AttributeEvents.remove` events.
+
+ The :class:`.Event` object is currently interpreted by the backref
+ event handlers, and is used to control the propagation of operations
+ across two mutually-dependent attributes.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ impl = None
+ """The :class:`.AttributeImpl` which is the current event initiator.
+ """
+
+ op = None
+ """The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`,
+ indicating the source operation.
+
+ """
+
+ def __init__(self, attribute_impl, op):
+ self.impl = attribute_impl
+ self.op = op
+ self.parent_token = self.impl.parent_token
+
+
+ @property
+ def key(self):
+ return self.impl.key
+
+ def hasparent(self, state):
+ return self.impl.hasparent(state)
class AttributeImpl(object):
"""internal implementation for instrumented attributes."""
@@ -406,6 +370,7 @@ class AttributeImpl(object):
callable_, dispatch, trackparent=False, extension=None,
compare_function=None, active_history=False,
parent_token=None, expire_missing=True,
+ send_modified_events=True,
**kwargs):
"""Construct an AttributeImpl.
@@ -449,6 +414,10 @@ class AttributeImpl(object):
during state.expire_attributes(None), if no value is present
for this key.
+ send_modified_events
+ if False, the InstanceState._modified_event method will have no effect;
+ this means the attribute will never show up as changed in a
+ history entry.
"""
self.class_ = class_
self.key = key
@@ -456,6 +425,7 @@ class AttributeImpl(object):
self.dispatch = dispatch
self.trackparent = trackparent
self.parent_token = parent_token or self
+ self.send_modified_events = send_modified_events
if compare_function is None:
self.is_equal = operator.eq
else:
@@ -534,8 +504,8 @@ class AttributeImpl(object):
"but the parent record "
"has gone stale, can't be sure this "
"is the most recent parent." %
- (orm_util.state_str(state),
- orm_util.state_str(parent_state),
+ (state_str(state),
+ state_str(parent_state),
self.key))
return
@@ -588,7 +558,6 @@ class AttributeImpl(object):
def get(self, state, dict_, passive=PASSIVE_OFF):
"""Retrieve a value from the given object.
-
If a callable is assembled on this object's attribute, and
passive is False, the callable will be executed and the
resulting value will be set as the new value for this attribute.
@@ -683,19 +652,24 @@ class ScalarAttributeImpl(AttributeImpl):
old = dict_.get(self.key, NO_VALUE)
if self.dispatch.remove:
- self.fire_remove_event(state, dict_, old, None)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
state._modified_event(dict_, self, old)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
- return History.from_scalar_attribute(
- self, state, dict_.get(self.key, NO_VALUE))
+ if self.key in dict_:
+ return History.from_scalar_attribute(self, state, dict_[self.key])
+ else:
+ if passive & INIT_OK:
+ passive ^= INIT_OK
+ current = self.get(state, dict_, passive=passive)
+ if current is PASSIVE_NO_RESULT:
+ return HISTORY_BLANK
+ else:
+ return History.from_scalar_attribute(self, state, current)
def set(self, state, dict_, value, initiator,
passive=PASSIVE_OFF, check_old=None, pop=False):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
else:
@@ -707,14 +681,26 @@ class ScalarAttributeImpl(AttributeImpl):
state._modified_event(dict_, self, old)
dict_[self.key] = value
+ @util.memoized_property
+ def _replace_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
return value
def fire_remove_event(self, state, dict_, value, initiator):
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
@property
def type(self):
@@ -736,7 +722,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
def delete(self, state, dict_):
old = self.get(state, dict_)
- self.fire_remove_event(state, dict_, old, self)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
@@ -773,14 +759,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
passive=PASSIVE_OFF, check_old=None, pop=False):
"""Set a value on the given InstanceState.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
-
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
else:
@@ -794,19 +773,20 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
else:
raise ValueError(
"Object %s not associated with %s on attribute '%s'" % (
- orm_util.instance_str(check_old),
- orm_util.state_str(state),
+ instance_str(check_old),
+ state_str(state),
self.key
))
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
+
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, value)
@@ -818,7 +798,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
self.sethasparent(instance_state(previous), state, False)
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
state._modified_event(dict_, self, previous)
@@ -902,9 +882,17 @@ class CollectionAttributeImpl(AttributeImpl):
return [(instance_state(o), o) for o in current]
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator):
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -921,7 +909,7 @@ class CollectionAttributeImpl(AttributeImpl):
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -948,8 +936,6 @@ class CollectionAttributeImpl(AttributeImpl):
self.key, state, self.collection_factory)
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
collection = self.get_collection(state, dict_, passive=passive)
if collection is PASSIVE_NO_RESULT:
value = self.fire_append_event(state, dict_, value, initiator)
@@ -960,9 +946,6 @@ class CollectionAttributeImpl(AttributeImpl):
collection.append_with_event(value, initiator)
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
collection = self.get_collection(state, state.dict, passive=passive)
if collection is PASSIVE_NO_RESULT:
self.fire_remove_event(state, dict_, value, initiator)
@@ -985,14 +968,8 @@ class CollectionAttributeImpl(AttributeImpl):
passive=PASSIVE_OFF, pop=False):
"""Set a value on the given object.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
self._set_iterable(
state, dict_, value,
lambda adapter, i: adapter.adapt_like_to_iterable(i))
@@ -1085,6 +1062,7 @@ def backref_listeners(attribute, key, uselist):
# use easily recognizable names for stack traces
parent_token = attribute.impl.parent_token
+ parent_impl = attribute.impl
def _acceptable_key_err(child_state, initiator, child_impl):
raise ValueError(
@@ -1092,7 +1070,7 @@ def backref_listeners(attribute, key, uselist):
'Passing object %s to attribute "%s" '
'triggers a modify event on attribute "%s" '
'via the backref "%s".' % (
- orm_util.state_str(child_state),
+ state_str(child_state),
initiator.parent_token,
child_impl.parent_token,
attribute.impl.parent_token
@@ -1108,10 +1086,14 @@ def backref_listeners(attribute, key, uselist):
old_state, old_dict = instance_state(oldchild),\
instance_dict(oldchild)
impl = old_state.manager[key].impl
- impl.pop(old_state,
- old_dict,
- state.obj(),
- initiator, passive=PASSIVE_NO_FETCH)
+
+ if initiator.impl is not impl or \
+ initiator.op not in (OP_REPLACE, OP_REMOVE):
+ impl.pop(old_state,
+ old_dict,
+ state.obj(),
+ parent_impl._append_token,
+ passive=PASSIVE_NO_FETCH)
if child is not None:
child_state, child_dict = instance_state(child),\
@@ -1120,12 +1102,14 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_append_event(state, child, initiator):
@@ -1139,7 +1123,9 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
child_state,
child_dict,
state.obj(),
@@ -1152,10 +1138,9 @@ def backref_listeners(attribute, key, uselist):
child_state, child_dict = instance_state(child),\
instance_dict(child)
child_impl = child_state.manager[key].impl
- # can't think of a path that would produce an initiator
- # mismatch here, as it would require an existing collection
- # mismatch.
- child_impl.pop(
+ if initiator.impl is not child_impl or \
+ initiator.op not in (OP_REMOVE, OP_REPLACE):
+ child_impl.pop(
child_state,
child_dict,
state.obj(),
@@ -1268,7 +1253,7 @@ class History(History):
original = state.committed_state.get(attribute.key, _NO_HISTORY)
if original is _NO_HISTORY:
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), ())
else:
return cls((), [current], ())
@@ -1285,7 +1270,7 @@ class History(History):
deleted = ()
else:
deleted = [original]
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), deleted)
else:
return cls([current], (), deleted)
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
new file mode 100644
index 000000000..577f9ff76
--- /dev/null
+++ b/lib/sqlalchemy/orm/base.py
@@ -0,0 +1,453 @@
+# orm/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Constants and rudimental functions used throughout the ORM.
+
+"""
+
+from .. import util, inspection, exc as sa_exc
+from ..sql import expression
+from . import exc
+import operator
+
+PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
+"""Symbol returned by a loader callable or other attribute/history
+retrieval operation when a value could not be determined, based
+on loader callable flags.
+"""
+)
+
+ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
+"""Symbol returned by a loader callable to indicate the
+retrieved value, or values, were assigned to their attributes
+on the target object.
+""")
+
+ATTR_EMPTY = util.symbol('ATTR_EMPTY',
+"""Symbol used internally to indicate an attribute had no callable.
+""")
+
+NO_VALUE = util.symbol('NO_VALUE',
+"""Symbol which may be placed as the 'previous' value of an attribute,
+indicating no value was loaded for an attribute when it was modified,
+and flags indicated we were not to load it.
+"""
+)
+
+NEVER_SET = util.symbol('NEVER_SET',
+"""Symbol which may be placed as the 'previous' value of an attribute
+indicating that the attribute had not been assigned to previously.
+"""
+)
+
+NO_CHANGE = util.symbol("NO_CHANGE",
+"""No callables or SQL should be emitted on attribute access
+and no state should change""", canonical=0
+)
+
+CALLABLES_OK = util.symbol("CALLABLES_OK",
+"""Loader callables can be fired off if a value
+is not present.""", canonical=1
+)
+
+SQL_OK = util.symbol("SQL_OK",
+"""Loader callables can emit SQL at least on scalar value
+attributes.""", canonical=2)
+
+RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
+"""callables can use SQL to load related objects as well
+as scalar value attributes.
+""", canonical=4
+)
+
+INIT_OK = util.symbol("INIT_OK",
+"""Attributes should be initialized with a blank
+value (None or an empty collection) upon get, if no other
+value can be obtained.
+""", canonical=8
+)
+
+NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
+"""callables can be emitted if the parent is not persistent.""",
+canonical=16
+)
+
+LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
+"""callables should use committed values as primary/foreign keys during a load
+""", canonical=32
+)
+
+# pre-packaged sets of flags used as inputs
+PASSIVE_OFF = util.symbol("PASSIVE_OFF",
+ "Callables can be emitted in all cases.",
+ canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
+ INIT_OK | CALLABLES_OK | SQL_OK)
+)
+PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
+ """PASSIVE_OFF ^ INIT_OK""",
+ canonical=PASSIVE_OFF ^ INIT_OK
+)
+PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
+ "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
+ canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
+)
+PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
+ "PASSIVE_OFF ^ SQL_OK",
+ canonical=PASSIVE_OFF ^ SQL_OK
+)
+PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
+ "PASSIVE_OFF ^ RELATED_OBJECT_OK",
+ canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
+)
+PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
+ "PASSIVE_OFF ^ NON_PERSISTENT_OK",
+ canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
+)
+
+DEFAULT_MANAGER_ATTR = '_sa_class_manager'
+DEFAULT_STATE_ATTR = '_sa_instance_state'
+_INSTRUMENTOR = ('mapper', 'instrumentor')
+
+EXT_CONTINUE = util.symbol('EXT_CONTINUE')
+EXT_STOP = util.symbol('EXT_STOP')
+
+ONETOMANY = util.symbol('ONETOMANY',
+"""Indicates the one-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOONE = util.symbol('MANYTOONE',
+"""Indicates the many-to-one direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOMANY = util.symbol('MANYTOMANY',
+"""Indicates the many-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+NOT_EXTENSION = util.symbol('NOT_EXTENSION',
+"""Symbol indicating an :class:`_InspectionAttr` that's
+ not part of sqlalchemy.ext.
+
+ Is assigned to the :attr:`._InspectionAttr.extension_type`
+ attibute.
+
+""")
+
+_none_set = frozenset([None])
+
+
+def _generative(*assertions):
+ """Mark a method as generative, e.g. method-chained."""
+
+ @util.decorator
+ def generate(fn, *args, **kw):
+ self = args[0]._clone()
+ for assertion in assertions:
+ assertion(self, fn.__name__)
+ fn(self, *args[1:], **kw)
+ return self
+ return generate
+
+
+# these can be replaced by sqlalchemy.ext.instrumentation
+# if augmented class instrumentation is enabled.
+def manager_of_class(cls):
+ return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
+
+instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
+
+instance_dict = operator.attrgetter('__dict__')
+
+def instance_str(instance):
+ """Return a string describing an instance."""
+
+ return state_str(instance_state(instance))
+
+def state_str(state):
+ """Return a string describing an instance via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
+
+def state_class_str(state):
+ """Return a string describing an instance's class via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s>' % (state.class_.__name__, )
+
+
+def attribute_str(instance, attribute):
+ return instance_str(instance) + "." + attribute
+
+
+def state_attribute_str(state, attribute):
+ return state_str(state) + "." + attribute
+
+def object_mapper(instance):
+ """Given an object, return the primary Mapper associated with the object
+ instance.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ This function is available via the inspection system as::
+
+ inspect(instance).mapper
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ return object_state(instance).mapper
+
+
+def object_state(instance):
+ """Given an object, return the :class:`.InstanceState`
+ associated with the object.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(instance)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ state = _inspect_mapped_object(instance)
+ if state is None:
+ raise exc.UnmappedInstanceError(instance)
+ else:
+ return state
+
+
+@inspection._inspects(object)
+def _inspect_mapped_object(instance):
+ try:
+ return instance_state(instance)
+ # TODO: whats the py-2/3 syntax to catch two
+ # different kinds of exceptions at once ?
+ except exc.UnmappedClassError:
+ return None
+ except exc.NO_STATE:
+ return None
+
+
+
+def _class_to_mapper(class_or_mapper):
+ insp = inspection.inspect(class_or_mapper, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ raise exc.UnmappedClassError(class_or_mapper)
+
+
+def _mapper_or_none(entity):
+ """Return the :class:`.Mapper` for the given class or None if the
+ class is not mapped."""
+
+ insp = inspection.inspect(entity, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ return None
+
+
+def _is_mapped_class(entity):
+ """Return True if the given object is a mapped class,
+ :class:`.Mapper`, or :class:`.AliasedClass`."""
+
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ hasattr(insp, "mapper") and \
+ (
+ insp.is_mapper
+ or insp.is_aliased_class
+ )
+
+def _attr_as_key(attr):
+ if hasattr(attr, 'key'):
+ return attr.key
+ else:
+ return expression._column_as_key(attr)
+
+
+
+def _orm_columns(entity):
+ insp = inspection.inspect(entity, False)
+ if hasattr(insp, 'selectable'):
+ return [c for c in insp.selectable.c]
+ else:
+ return [entity]
+
+
+
+def _is_aliased_class(entity):
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ getattr(insp, "is_aliased_class", False)
+
+
+def _entity_descriptor(entity, key):
+ """Return a class attribute given an entity and string name.
+
+ May return :class:`.InstrumentedAttribute` or user-defined
+ attribute.
+
+ """
+ insp = inspection.inspect(entity)
+ if insp.is_selectable:
+ description = entity
+ entity = insp.c
+ elif insp.is_aliased_class:
+ entity = insp.entity
+ description = entity
+ elif hasattr(insp, "mapper"):
+ description = entity = insp.mapper.class_
+ else:
+ description = entity
+
+ try:
+ return getattr(entity, key)
+ except AttributeError:
+ raise sa_exc.InvalidRequestError(
+ "Entity '%s' has no property '%s'" %
+ (description, key)
+ )
+
+_state_mapper = util.dottedgetter('manager.mapper')
+
+@inspection._inspects(type)
+def _inspect_mapped_class(class_, configure=False):
+ try:
+ class_manager = manager_of_class(class_)
+ if not class_manager.is_mapped:
+ return None
+ mapper = class_manager.mapper
+ if configure and mapper._new_mappers:
+ mapper._configure_all()
+ return mapper
+
+ except exc.NO_STATE:
+ return None
+
+def class_mapper(class_, configure=True):
+ """Given a class, return the primary :class:`.Mapper` associated
+ with the key.
+
+ Raises :exc:`.UnmappedClassError` if no mapping is configured
+ on the given class, or :exc:`.ArgumentError` if a non-class
+ object is passed.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(some_mapped_class)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
+
+ """
+ mapper = _inspect_mapped_class(class_, configure=configure)
+ if mapper is None:
+ if not isinstance(class_, type):
+ raise sa_exc.ArgumentError(
+ "Class object expected, got '%r'." % (class_, ))
+ raise exc.UnmappedClassError(class_)
+ else:
+ return mapper
+
+
+class _InspectionAttr(object):
+ """A base class applied to all ORM objects that can be returned
+ by the :func:`.inspect` function.
+
+ The attributes defined here allow the usage of simple boolean
+ checks to test basic facts about the object returned.
+
+ While the boolean checks here are basically the same as using
+ the Python isinstance() function, the flags here can be used without
+ the need to import all of these classes, and also such that
+ the SQLAlchemy class system can change while leaving the flags
+ here intact for forwards-compatibility.
+
+ """
+
+ is_selectable = False
+ """Return True if this object is an instance of :class:`.Selectable`."""
+
+ is_aliased_class = False
+ """True if this object is an instance of :class:`.AliasedClass`."""
+
+ is_instance = False
+ """True if this object is an instance of :class:`.InstanceState`."""
+
+ is_mapper = False
+ """True if this object is an instance of :class:`.Mapper`."""
+
+ is_property = False
+ """True if this object is an instance of :class:`.MapperProperty`."""
+
+ is_attribute = False
+ """True if this object is a Python :term:`descriptor`.
+
+ This can refer to one of many types. Usually a
+ :class:`.QueryableAttribute` which handles attributes events on behalf
+ of a :class:`.MapperProperty`. But can also be an extension type
+ such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
+ The :attr:`._InspectionAttr.extension_type` will refer to a constant
+ identifying the specific subtype.
+
+ .. seealso::
+
+ :attr:`.Mapper.all_orm_descriptors`
+
+ """
+
+ is_clause_element = False
+ """True if this object is an instance of :class:`.ClauseElement`."""
+
+ extension_type = NOT_EXTENSION
+ """The extension type, if any.
+ Defaults to :data:`.interfaces.NOT_EXTENSION`
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :data:`.HYBRID_METHOD`
+
+ :data:`.HYBRID_PROPERTY`
+
+ :data:`.ASSOCIATION_PROXY`
+
+ """
+
+class _MappedAttribute(object):
+ """Mixin for attributes which should be replaced by mapper-assigned
+ attributes.
+
+ """
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 03917d112..87e351b6c 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -1,5 +1,5 @@
# orm/collections.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -108,8 +108,7 @@ import weakref
from ..sql import expression
from .. import util, exc as sa_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-attributes = util.importlater("sqlalchemy.orm", "attributes")
+from . import base
__all__ = ['collection', 'collection_adapter',
@@ -139,8 +138,8 @@ class _PlainColumnGetter(object):
return self.cols
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [
m._get_state_attr_by_column(state, state.dict, col)
@@ -167,8 +166,8 @@ class _SerializableColumnGetter(object):
return _SerializableColumnGetter, (self.colkeys,)
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [m._get_state_attr_by_column(
state, state.dict,
m.mapped_table.columns[k])
@@ -352,7 +351,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'appender')
+ fn._sa_instrument_role = 'appender'
return fn
@staticmethod
@@ -379,7 +378,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'remover')
+ fn._sa_instrument_role = 'remover'
return fn
@staticmethod
@@ -393,7 +392,7 @@ class collection(object):
def __iter__(self): ...
"""
- setattr(fn, '_sa_instrument_role', 'iterator')
+ fn._sa_instrument_role = 'iterator'
return fn
@staticmethod
@@ -414,7 +413,7 @@ class collection(object):
def extend(self, items): ...
"""
- setattr(fn, '_sa_instrumented', True)
+ fn._sa_instrumented = True
return fn
@staticmethod
@@ -428,7 +427,7 @@ class collection(object):
that has been linked, or None if unlinking.
"""
- setattr(fn, '_sa_instrument_role', 'linker')
+ fn._sa_instrument_role = 'linker'
return fn
link = linker
@@ -464,7 +463,7 @@ class collection(object):
validation on the values about to be assigned.
"""
- setattr(fn, '_sa_instrument_role', 'converter')
+ fn._sa_instrument_role = 'converter'
return fn
@staticmethod
@@ -484,7 +483,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
+ fn._sa_instrument_before = ('fire_append_event', arg)
return fn
return decorator
@@ -504,8 +503,8 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_before = ('fire_append_event', arg)
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
@@ -526,7 +525,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
+ fn._sa_instrument_before = ('fire_remove_event', arg)
return fn
return decorator
@@ -546,32 +545,13 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
-# public instrumentation interface for 'internally instrumented'
-# implementations
-def collection_adapter(collection):
- """Fetch the :class:`.CollectionAdapter` for a collection."""
-
- return getattr(collection, '_sa_adapter', None)
-
-
-def collection_iter(collection):
- """Iterate over an object supporting the @iterator or __iter__ protocols.
-
- If the collection is an ORM collection, it need not be attached to an
- object to be iterable.
-
- """
- try:
- return getattr(collection, '_sa_iterator',
- getattr(collection, '__iter__'))()
- except AttributeError:
- raise TypeError("'%s' object is not iterable" %
- type(collection).__name__)
+collection_adapter = operator.attrgetter('_sa_adapter')
+"""Fetch the :class:`.CollectionAdapter` for a collection."""
class CollectionAdapter(object):
@@ -584,8 +564,6 @@ class CollectionAdapter(object):
The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
entity collections.
- The usage of getattr()/setattr() is currently to allow injection
- of custom methods, such as to unwrap Zope security proxies.
"""
invalidated = False
@@ -609,16 +587,19 @@ class CollectionAdapter(object):
return self.owner_state.manager[self._key].impl
def link_to_self(self, data):
- """Link a collection to this adapter, and fire a link event."""
- setattr(data, '_sa_adapter', self)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(self)
+ """Link a collection to this adapter"""
+
+ data._sa_adapter = self
+ if data._sa_linker:
+ data._sa_linker(self)
+
def unlink(self, data):
- """Unlink a collection from any adapter, and fire a link event."""
- setattr(data, '_sa_adapter', None)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(None)
+ """Unlink a collection from any adapter"""
+
+ del data._sa_adapter
+ if data._sa_linker:
+ data._sa_linker(None)
def adapt_like_to_iterable(self, obj):
"""Converts collection-compatible objects to an iterable of values.
@@ -634,7 +615,7 @@ class CollectionAdapter(object):
a default duck-typing-based implementation is used.
"""
- converter = getattr(self._data(), '_sa_converter', None)
+ converter = self._data()._sa_converter
if converter is not None:
return converter(obj)
@@ -655,60 +636,60 @@ class CollectionAdapter(object):
# If the object is an adapted collection, return the (iterable)
# adapter.
if getattr(obj, '_sa_adapter', None) is not None:
- return getattr(obj, '_sa_adapter')
+ return obj._sa_adapter
elif setting_type == dict:
if util.py3k:
return obj.values()
else:
- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
+ return getattr(obj, 'itervalues', obj.values)()
else:
return iter(obj)
def append_with_event(self, item, initiator=None):
"""Add an entity to the collection, firing mutation events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
+ self._data()._sa_appender(item, _sa_initiator=initiator)
def append_without_event(self, item):
"""Add or restore an entity to the collection, firing no events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
+ self._data()._sa_appender(item, _sa_initiator=False)
def append_multiple_without_event(self, items):
"""Add or restore an entity to the collection, firing no events."""
- appender = getattr(self._data(), '_sa_appender')
+ appender = self._data()._sa_appender
for item in items:
appender(item, _sa_initiator=False)
def remove_with_event(self, item, initiator=None):
"""Remove an entity from the collection, firing mutation events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
+ self._data()._sa_remover(item, _sa_initiator=initiator)
def remove_without_event(self, item):
"""Remove an entity from the collection, firing no events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
+ self._data()._sa_remover(item, _sa_initiator=False)
def clear_with_event(self, initiator=None):
"""Empty the collection, firing a mutation event for each entity."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=initiator)
def clear_without_event(self):
"""Empty the collection, firing no events."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=False)
def __iter__(self):
"""Iterate over entities in the collection."""
- return iter(getattr(self._data(), '_sa_iterator')())
+ return iter(self._data()._sa_iterator())
def __len__(self):
"""Count entities in the collection."""
- return len(list(getattr(self._data(), '_sa_iterator')()))
+ return len(list(self._data()._sa_iterator()))
def __bool__(self):
return True
@@ -960,7 +941,12 @@ def _instrument_class(cls):
for role, method_name in roles.items():
setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
- setattr(cls, '_sa_instrumented', id(cls))
+ cls._sa_adapter = None
+ if not hasattr(cls, '_sa_linker'):
+ cls._sa_linker = None
+ if not hasattr(cls, '_sa_converter'):
+ cls._sa_converter = None
+ cls._sa_instrumented = id(cls)
def _instrument_membership_mutator(method, before, argument, after):
@@ -999,7 +985,7 @@ def _instrument_membership_mutator(method, before, argument, after):
if initiator is False:
executor = None
else:
- executor = getattr(args[0], '_sa_adapter', None)
+ executor = args[0]._sa_adapter
if before and executor:
getattr(executor, before)(value, initiator)
@@ -1024,33 +1010,33 @@ def __set(collection, item, _sa_initiator=None):
"""Run set events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
+ item = executor.fire_append_event(item, _sa_initiator)
return item
def __del(collection, item, _sa_initiator=None):
"""Run del events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_remove_event')(item, _sa_initiator)
+ executor.fire_remove_event(item, _sa_initiator)
def __before_delete(collection, _sa_initiator=None):
"""Special method to run 'commit existing value' methods"""
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
+ executor.fire_pre_remove_event(_sa_initiator)
def _list_decorators():
"""Tailored instrumentation wrappers for any list-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(list, fn.__name__).__doc__
def append(fn):
def append(self, item, _sa_initiator=None):
@@ -1089,7 +1075,10 @@ def _list_decorators():
start = index.start or 0
if start < 0:
start += len(self)
- stop = index.stop or len(self)
+ if index.stop is not None:
+ stop = index.stop
+ else:
+ stop = len(self)
if stop < 0:
stop += len(self)
@@ -1172,6 +1161,15 @@ def _list_decorators():
_tidy(pop)
return pop
+ if not util.py2k:
+ def clear(fn):
+ def clear(self, index=-1):
+ for item in self:
+ __del(self, item)
+ fn(self)
+ _tidy(clear)
+ return clear
+
# __imul__ : not wrapping this. all members of the collection are already
# present, so no need to fire appends... wrapping it with an explicit
# decorator is still possible, so events on *= can be had if they're
@@ -1186,8 +1184,8 @@ def _dict_decorators():
"""Tailored instrumentation wrappers for any dict-like mapping class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(dict, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
@@ -1288,8 +1286,8 @@ def _set_decorators():
"""Tailored instrumentation wrappers for any set-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(set, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 9f1e497af..4709a1821 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -1,5 +1,5 @@
# orm/dependency.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index e50967253..020b7c718 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -1,5 +1,5 @@
# orm/deprecated_interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,7 @@
from .. import event, util
from .interfaces import EXT_CONTINUE
-
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class MapperExtension(object):
"""Base implementation for :class:`.Mapper` event hooks.
@@ -374,6 +374,7 @@ class MapperExtension(object):
return EXT_CONTINUE
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class SessionExtension(object):
"""Base implementation for :class:`.Session` event hooks.
@@ -494,6 +495,7 @@ class SessionExtension(object):
"""
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class AttributeExtension(object):
"""Base implementation for :class:`.AttributeImpl` event hooks, events
that fire upon attribute mutations in user code.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index c58951339..24b0a15e6 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -1,5 +1,5 @@
# orm/descriptor_props.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -12,10 +12,11 @@ as actively in the load/persist ORM loop.
from .interfaces import MapperProperty, PropComparator
from .util import _none_set
-from . import attributes, strategies
+from . import attributes
from .. import util, sql, exc as sa_exc, event, schema
from ..sql import expression
-properties = util.importlater('sqlalchemy.orm', 'properties')
+from . import properties
+from . import query
class DescriptorProperty(MapperProperty):
@@ -75,6 +76,7 @@ class DescriptorProperty(MapperProperty):
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class CompositeProperty(DescriptorProperty):
"""Defines a "composite" mapped attribute, representing a collection
of columns as one attribute.
@@ -82,12 +84,64 @@ class CompositeProperty(DescriptorProperty):
:class:`.CompositeProperty` is constructed using the :func:`.composite`
function.
- See also:
+ .. seealso::
- :ref:`mapper_composite`
+ :ref:`mapper_composite`
"""
def __init__(self, class_, *attrs, **kwargs):
+ """Return a composite column-based property for use with a Mapper.
+
+ See the mapping documentation section :ref:`mapper_composite` for a full
+ usage example.
+
+ The :class:`.MapperProperty` returned by :func:`.composite`
+ is the :class:`.CompositeProperty`.
+
+ :param class\_:
+ The "composite type" class.
+
+ :param \*cols:
+ List of Column objects to be mapped.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. See the same flag on :func:`.column_property`.
+
+ .. versionchanged:: 0.7
+ This flag specifically becomes meaningful
+ - previously it was a placeholder.
+
+ :param group:
+ A group name for this property when marked as deferred.
+
+ :param deferred:
+ When True, the column property is "deferred", meaning that it does not
+ load immediately, and is instead loaded when the attribute is first
+ accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
+
+ :param comparator_factory: a class which extends
+ :class:`.CompositeProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an :class:`.AttributeExtension` instance,
+ or list of extensions, which will be prepended to the list of
+ attribute listeners for the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ """
+
self.attrs = attrs
self.composite_class = class_
self.active_history = kwargs.get('active_history', False)
@@ -101,6 +155,7 @@ class CompositeProperty(DescriptorProperty):
util.set_creation_order(self)
self._create_descriptor()
+
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
@@ -190,6 +245,11 @@ class CompositeProperty(DescriptorProperty):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
+ else:
+ raise sa_exc.ArgumentError(
+ "Composite expects Column objects or mapped "
+ "attributes/attribute names as arguments, got: %r"
+ % (attr,))
props.append(prop)
@property
@@ -205,7 +265,9 @@ class CompositeProperty(DescriptorProperty):
prop.active_history = self.active_history
if self.deferred:
prop.deferred = self.deferred
- prop.strategy_class = strategies.DeferredColumnLoader
+ prop.strategy_class = prop._strategy_lookup(
+ ("deferred", True),
+ ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
@@ -300,6 +362,18 @@ class CompositeProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+ class CompositeBundle(query.Bundle):
+ def __init__(self, property, expr):
+ self.property = property
+ super(CompositeProperty.CompositeBundle, self).__init__(
+ property.key, *expr)
+
+ def create_row_processor(self, query, procs, labels):
+ def proc(row, result):
+ return self.property.composite_class(*[proc(row, result) for proc in procs])
+ return proc
+
+
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
@@ -319,10 +393,18 @@ class CompositeProperty(DescriptorProperty):
"""
+
+ __hash__ = None
+
+ @property
+ def clauses(self):
+ return self.__clause_element__()
+
def __clause_element__(self):
return expression.ClauseList(group=False, *self._comparable_elements)
- __hash__ = None
+ def _query_clause_element(self):
+ return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
@util.memoized_property
def _comparable_elements(self):
@@ -356,6 +438,7 @@ class CompositeProperty(DescriptorProperty):
return str(self.parent.class_.__name__) + "." + self.key
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ConcreteInheritedProperty(DescriptorProperty):
"""A 'do nothing' :class:`.MapperProperty` that disables
an attribute on a concrete subclass that is only present
@@ -404,11 +487,66 @@ class ConcreteInheritedProperty(DescriptorProperty):
self.descriptor = NoninheritedConcreteProp()
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
descriptor=None, comparator_factory=None,
doc=None):
+ """Denote an attribute name as a synonym to a mapped property,
+ in that the attribute will mirror the value and expression behavior
+ of another attribute.
+
+ :param name: the name of the existing mapped property. This
+ can refer to the string name of any :class:`.MapperProperty`
+ configured on the class, including column-bound attributes
+ and relationships.
+
+ :param descriptor: a Python :term:`descriptor` that will be used
+ as a getter (and potentially a setter) when this attribute is
+ accessed at the instance level.
+
+ :param map_column: if ``True``, the :func:`.synonym` construct will
+ locate the existing named :class:`.MapperProperty` based on the
+ attribute name of this :func:`.synonym`, and assign it to a new
+ attribute linked to the name of this :func:`.synonym`.
+ That is, given a mapping like::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ job_status = Column(String(50))
+
+ job_status = synonym("_job_status", map_column=True)
+
+ The above class ``MyClass`` will now have the ``job_status``
+ :class:`.Column` object mapped to the attribute named ``_job_status``,
+ and the attribute named ``job_status`` will refer to the synonym
+ itself. This feature is typically used in conjunction with the
+ ``descriptor`` argument in order to link a user-defined descriptor
+ as a "wrapper" for an existing column.
+
+ :param comparator_factory: A subclass of :class:`.PropComparator`
+ that will provide custom comparison behavior at the SQL expression
+ level.
+
+ .. note::
+
+ For the use case of providing an attribute which redefines both
+ Python-level and SQL-expression level behavior of an attribute,
+ please refer to the Hybrid attribute introduced at
+ :ref:`mapper_hybrids` for a more effective technique.
+
+ .. seealso::
+
+ :ref:`synonyms` - examples of functionality.
+
+ :ref:`mapper_hybrids` - Hybrids provide a better approach for
+ more complicated attribute-wrapping schemes than synonyms.
+
+ """
+
self.name = name
self.map_column = map_column
self.descriptor = descriptor
@@ -462,10 +600,72 @@ class SynonymProperty(DescriptorProperty):
self.parent = parent
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ComparableProperty(DescriptorProperty):
"""Instruments a Python property for use in query expressions."""
def __init__(self, comparator_factory, descriptor=None, doc=None):
+ """Provides a method of applying a :class:`.PropComparator`
+ to any Python descriptor attribute.
+
+ .. versionchanged:: 0.7
+ :func:`.comparable_property` is superseded by
+ the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
+ at :ref:`hybrid_custom_comparators`.
+
+ Allows any Python descriptor to behave like a SQL-enabled
+ attribute when used at the class level in queries, allowing
+ redefinition of expression operator behavior.
+
+ In the example below we redefine :meth:`.PropComparator.operate`
+ to wrap both sides of an expression in ``func.lower()`` to produce
+ case-insensitive comparison::
+
+ from sqlalchemy.orm import comparable_property
+ from sqlalchemy.orm.interfaces import PropComparator
+ from sqlalchemy.sql import func
+ from sqlalchemy import Integer, String, Column
+ from sqlalchemy.ext.declarative import declarative_base
+
+ class CaseInsensitiveComparator(PropComparator):
+ def __clause_element__(self):
+ return self.prop
+
+ def operate(self, op, other):
+ return op(
+ func.lower(self.__clause_element__()),
+ func.lower(other)
+ )
+
+ Base = declarative_base()
+
+ class SearchWord(Base):
+ __tablename__ = 'search_word'
+ id = Column(Integer, primary_key=True)
+ word = Column(String)
+ word_insensitive = comparable_property(lambda prop, mapper:
+ CaseInsensitiveComparator(mapper.c.word, mapper)
+ )
+
+
+ A mapping like the above allows the ``word_insensitive`` attribute
+ to render an expression like::
+
+ >>> print SearchWord.word_insensitive == "Trucks"
+ lower(search_word.word) = lower(:lower_1)
+
+ :param comparator_factory:
+ A PropComparator subclass or factory that defines operator behavior
+ for this property.
+
+ :param descriptor:
+ Optional when used in a ``properties={}`` declaration. The Python
+ descriptor or property to layer comparison behavior on top of.
+
+ The like-named descriptor will be automatically retrieved from the
+ mapped class if left blank in a ``properties`` declaration.
+
+ """
self.descriptor = descriptor
self.comparator_factory = comparator_factory
self.doc = doc or (descriptor and descriptor.__doc__) or None
@@ -473,3 +673,5 @@ class ComparableProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+
+
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 5814b47ca..bae09d32d 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -1,5 +1,5 @@
# orm/dynamic.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -15,11 +15,12 @@ from .. import log, util, exc
from ..sql import operators
from . import (
attributes, object_session, util as orm_util, strategies,
- object_mapper, exc as orm_exc
+ object_mapper, exc as orm_exc, properties
)
from .query import Query
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.is_class_level = True
@@ -39,9 +40,6 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
backref=self.parent_property.back_populates,
)
-log.class_logger(DynaLoader)
-
-
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
@@ -78,6 +76,14 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
history = self._get_collection_history(state, passive)
return history.added_plus_unchanged
+ @util.memoized_property
+ def _append_token(self):
+ return attributes.Event(self, attributes.OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return attributes.Event(self, attributes.OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator,
collection_history=None):
if collection_history is None:
@@ -86,7 +92,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection_history.add_added(value)
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), state, True)
@@ -102,7 +108,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
self.sethasparent(attributes.instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
def _modified_event(self, state, dict_):
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 7a11cd450..e1dd96068 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -1,5 +1,5 @@
# orm/evaluator.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 97019bb4e..a09154dd0 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -1,5 +1,5 @@
# orm/events.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,10 +8,14 @@
"""
from .. import event, exc, util
-orm = util.importlater("sqlalchemy", "orm")
+from .base import _mapper_or_none
import inspect
import weakref
-
+from . import interfaces
+from . import mapperlib, instrumentation
+from .session import Session, sessionmaker
+from .scoping import scoped_session
+from .attributes import QueryableAttribute
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
@@ -43,17 +47,20 @@ class InstrumentationEvents(event.Events):
"""
_target_class_doc = "SomeBaseClass"
+ _dispatch_target = instrumentation.InstrumentationFactory
+
@classmethod
def _accept_with(cls, target):
- # TODO: there's no coverage for this
if isinstance(target, type):
return _InstrumentationEventsHold(target)
else:
return None
@classmethod
- def _listen(cls, target, identifier, fn, propagate=True):
+ def _listen(cls, event_key, propagate=True):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
def listen(target_cls, *arg):
listen_cls = target()
@@ -63,22 +70,21 @@ class InstrumentationEvents(event.Events):
return fn(target_cls, *arg)
def remove(ref):
- event.Events._remove(orm.instrumentation._instrumentation_factory,
- identifier, listen)
+ key = event.registry._EventKey(None, identifier, listen,
+ instrumentation._instrumentation_factory)
+ getattr(instrumentation._instrumentation_factory.dispatch,
+ identifier).remove(key)
target = weakref.ref(target.class_, remove)
- event.Events._listen(orm.instrumentation._instrumentation_factory,
- identifier, listen)
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of instrumentation events "
- "not yet implemented")
+ event_key.\
+ with_dispatch_target(instrumentation._instrumentation_factory).\
+ with_wrapper(listen).base_listen()
@classmethod
def _clear(cls):
super(InstrumentationEvents, cls)._clear()
- orm.instrumentation._instrumentation_factory.dispatch._clear()
+ instrumentation._instrumentation_factory.dispatch._clear()
def class_instrument(self, cls):
"""Called after the given class is instrumented.
@@ -100,6 +106,7 @@ class InstrumentationEvents(event.Events):
"""Called when an attribute is instrumented."""
+
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
@@ -110,7 +117,6 @@ class _InstrumentationEventsHold(object):
dispatch = event.dispatcher(InstrumentationEvents)
-
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
@@ -121,21 +127,19 @@ class InstanceEvents(event.Events):
def my_load_listener(target, context):
print "on load!"
- event.listen(SomeMappedClass, 'load', my_load_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ event.listen(SomeClass, 'load', my_load_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(target, context):
- log.debug("Instance %s being loaded" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'load', some_listener)
+ .. versionchanged:: 0.8.0 instance events can be associated with
+ unmapped superclasses of mapped classes.
Instance events are closely related to mapper events, but
are more specific to the instance and its instrumentation,
@@ -154,21 +158,28 @@ class InstanceEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+
+ _dispatch_target = instrumentation.ClassManager
@classmethod
- def _accept_with(cls, target):
- if isinstance(target, orm.instrumentation.ClassManager):
+ def _new_classmanager_instance(cls, class_, classmanager):
+ _InstanceEventsHold.populate(class_, classmanager)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
+ if isinstance(target, instrumentation.ClassManager):
return target
- elif isinstance(target, orm.Mapper):
+ elif isinstance(target, mapperlib.Mapper):
return target.class_manager
elif target is orm.mapper:
- return orm.instrumentation.ClassManager
+ return instrumentation.ClassManager
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
- return orm.instrumentation.ClassManager
+ if issubclass(target, mapperlib.Mapper):
+ return instrumentation.ClassManager
else:
- manager = orm.instrumentation.manager_of_class(target)
+ manager = instrumentation.manager_of_class(target)
if manager:
return manager
else:
@@ -176,23 +187,23 @@ class InstanceEvents(event.Events):
return None
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if not raw:
orig_fn = fn
def wrap(state, *arg, **kw):
return orig_fn(state.obj(), *arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(fn)
+
+ event_key.base_listen(propagate=propagate)
- event.Events._listen(target, identifier, fn, propagate=propagate)
if propagate:
for mgr in target.subclass_managers(True):
- event.Events._listen(mgr, identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of instance events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr).base_listen(propagate=True)
@classmethod
def _clear(cls):
@@ -321,8 +332,7 @@ class InstanceEvents(event.Events):
"""
-
-class _EventsHold(object):
+class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
Establish _listen() for that class' mapper/instrumentation when
@@ -337,14 +347,20 @@ class _EventsHold(object):
cls.all_holds.clear()
class HoldEvents(object):
+ _dispatch_target = None
+
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if target.class_ in target.all_holds:
collection = target.all_holds[target.class_]
else:
- collection = target.all_holds[target.class_] = []
+ collection = target.all_holds[target.class_] = {}
- collection.append((identifier, fn, raw, propagate))
+ event.registry._stored_in_collection(event_key, target)
+ collection[event_key._key] = (event_key, raw, propagate)
if propagate:
stack = list(target.class_.__subclasses__())
@@ -353,28 +369,37 @@ class _EventsHold(object):
stack.extend(subclass.__subclasses__())
subject = target.resolve(subclass)
if subject is not None:
- subject.dispatch._listen(subject, identifier, fn,
- raw=raw, propagate=propagate)
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=propagate)
+
+ def remove(self, event_key):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
+ collection = target.all_holds[target.class_]
+ del collection[event_key._key]
@classmethod
def populate(cls, class_, subject):
for subclass in class_.__mro__:
if subclass in cls.all_holds:
- if subclass is class_:
- collection = cls.all_holds.pop(subclass)
- else:
- collection = cls.all_holds[subclass]
- for ident, fn, raw, propagate in collection:
+ collection = cls.all_holds[subclass]
+ for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
- subject.dispatch._listen(subject, ident,
- fn, raw, propagate)
+ # since we can't be sure in what order different classes
+ # in a hierarchy are triggered with populate(),
+ # we rely upon _EventsHold for all event
+ # assignment, instead of using the generic propagate
+ # flag.
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=False)
class _InstanceEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.instrumentation.manager_of_class(class_)
+ return instrumentation.manager_of_class(class_)
class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents):
pass
@@ -396,24 +421,22 @@ class MapperEvents(event.Events):
"select my_special_function(%d)"
% target.special_number)
- # associate the listener function with SomeMappedClass,
+ # associate the listener function with SomeClass,
# to execute during the "before_insert" hook
event.listen(
- SomeMappedClass, 'before_insert', my_before_insert_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ SomeClass, 'before_insert', my_before_insert_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(mapper, connection, target):
- log.debug("Instance %s being inserted" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'before_insert', some_listener)
+ .. versionchanged:: 0.8.0 mapper events can be associated with
+ unmapped superclasses of mapped classes.
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
@@ -455,17 +478,23 @@ class MapperEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+ _dispatch_target = mapperlib.Mapper
@classmethod
- def _accept_with(cls, target):
+ def _new_mapper_instance(cls, class_, mapper):
+ _MapperEventsHold.populate(class_, mapper)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
if target is orm.mapper:
- return orm.Mapper
+ return mapperlib.Mapper
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
+ if issubclass(target, mapperlib.Mapper):
return target
else:
- mapper = orm.util._mapper_or_none(target)
+ mapper = _mapper_or_none(target)
if mapper is not None:
return mapper
else:
@@ -474,8 +503,10 @@ class MapperEvents(event.Events):
return target
@classmethod
- def _listen(cls, target, identifier, fn,
+ def _listen(cls, event_key,
raw=False, retval=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
if not raw or not retval:
if not raw:
@@ -494,16 +525,17 @@ class MapperEvents(event.Events):
arg[target_index] = arg[target_index].obj()
if not retval:
wrapped_fn(*arg, **kw)
- return orm.interfaces.EXT_CONTINUE
+ return interfaces.EXT_CONTINUE
else:
return wrapped_fn(*arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
if propagate:
for mapper in target.self_and_descendants:
- event.Events._listen(mapper, identifier, fn, propagate=True)
+ event_key.with_dispatch_target(mapper).base_listen(propagate=True)
else:
- event.Events._listen(target, identifier, fn)
+ event_key.base_listen()
@classmethod
def _clear(cls):
@@ -517,8 +549,15 @@ class MapperEvents(event.Events):
This event is the earliest phase of mapper construction.
Most attributes of the mapper are not yet initialized.
- This listener can generally only be applied to the :class:`.Mapper`
- class overall.
+ This listener can either be applied to the :class:`.Mapper`
+ class overall, or to any un-mapped class which serves as a base
+ for classes that will be mapped (using the ``propagate=True`` flag)::
+
+ Base = declarative_base()
+
+ @event.listens_for(Base, "instrument_class", propagate=True)
+ def on_new_class(mapper, cls_):
+ " ... "
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -1048,17 +1087,11 @@ class MapperEvents(event.Events):
"""
- @classmethod
- def _remove(cls, identifier, target, fn):
- "Removal of mapper events not yet implemented"
- raise NotImplementedError(msg)
-
-
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.util._mapper_or_none(class_)
+ return _mapper_or_none(class_)
class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents):
pass
@@ -1083,7 +1116,7 @@ class SessionEvents(event.Events):
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
- of :func:`.sessionmaker` and :func:`.scoped_session`.
+ of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
Additionally, it accepts the :class:`.Session` class which
will apply listeners to all :class:`.Session` instances
@@ -1093,38 +1126,35 @@ class SessionEvents(event.Events):
_target_class_doc = "SomeSessionOrFactory"
+ _dispatch_target = Session
+
@classmethod
def _accept_with(cls, target):
- if isinstance(target, orm.scoped_session):
+ if isinstance(target, scoped_session):
target = target.session_factory
- if not isinstance(target, orm.sessionmaker) and \
+ if not isinstance(target, sessionmaker) and \
(
not isinstance(target, type) or
- not issubclass(target, orm.Session)
+ not issubclass(target, Session)
):
raise exc.ArgumentError(
"Session event listen on a scoped_session "
"requires that its creation callable "
"is associated with the Session class.")
- if isinstance(target, orm.sessionmaker):
+ if isinstance(target, sessionmaker):
return target.class_
elif isinstance(target, type):
- if issubclass(target, orm.scoped_session):
- return orm.Session
- elif issubclass(target, orm.Session):
+ if issubclass(target, scoped_session):
+ return Session
+ elif issubclass(target, Session):
return target
- elif isinstance(target, orm.Session):
+ elif isinstance(target, Session):
return target
else:
return None
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of session events not yet implemented"
- raise NotImplementedError(msg)
-
def after_transaction_create(self, session, transaction):
"""Execute when a new :class:`.SessionTransaction` is created.
@@ -1173,7 +1203,7 @@ class SessionEvents(event.Events):
.. note::
- The :meth:`.before_commit` hook is *not* per-flush,
+ The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
@@ -1265,9 +1295,9 @@ class SessionEvents(event.Events):
:param session: The target :class:`.Session`.
:param previous_transaction: The :class:`.SessionTransaction`
- transactional marker object which was just closed. The current
- :class:`.SessionTransaction` for the given :class:`.Session` is
- available via the :attr:`.Session.transaction` attribute.
+ transactional marker object which was just closed. The current
+ :class:`.SessionTransaction` for the given :class:`.Session` is
+ available via the :attr:`.Session.transaction` attribute.
.. versionadded:: 0.7.3
@@ -1359,7 +1389,7 @@ class SessionEvents(event.Events):
This is called before an add, delete or merge causes
the object to be part of the session.
- .. versionadded:: 0.8. Note that :meth:`.after_attach` now
+ .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
@@ -1474,7 +1504,7 @@ class AttributeEvents(event.Events):
listen(UserContact.phone, 'set', validate_phone, retval=True)
A validation function like the above can also raise an exception
- such as :class:`.ValueError` to halt the operation.
+ such as :exc:`ValueError` to halt the operation.
Several modifiers are available to the :func:`~.event.listen` function.
@@ -1503,25 +1533,32 @@ class AttributeEvents(event.Events):
"""
_target_class_doc = "SomeClass.some_attribute"
+ _dispatch_target = QueryableAttribute
+
+ @staticmethod
+ def _set_dispatch(cls, dispatch_cls):
+ event.Events._set_dispatch(cls, dispatch_cls)
+ dispatch_cls._active_history = False
@classmethod
def _accept_with(cls, target):
# TODO: coverage
- if isinstance(target, orm.interfaces.MapperProperty):
+ if isinstance(target, interfaces.MapperProperty):
return getattr(target.parent.class_, target.key)
else:
return target
@classmethod
- def _listen(cls, target, identifier, fn, active_history=False,
+ def _listen(cls, event_key, active_history=False,
raw=False, retval=False,
propagate=False):
+
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if active_history:
target.dispatch._active_history = True
- # TODO: for removal, need to package the identity
- # of the wrapper with the original function.
-
if not raw or not retval:
orig_fn = fn
@@ -1534,19 +1571,15 @@ class AttributeEvents(event.Events):
else:
return orig_fn(target, value, *arg)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
- event.Events._listen(target, identifier, fn, propagate)
+ event_key.base_listen(propagate=propagate)
if propagate:
- manager = orm.instrumentation.manager_of_class(target.class_)
+ manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
- event.Events._listen(mgr[target.key], identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of attribute events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -1558,8 +1591,15 @@ class AttributeEvents(event.Events):
is registered with ``retval=True``, the listener
function must return this value, or a new value which
replaces it.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
@@ -1572,8 +1612,15 @@ class AttributeEvents(event.Events):
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being removed.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: No return value is defined for this event.
"""
@@ -1593,9 +1640,17 @@ class AttributeEvents(event.Events):
the previous value of the attribute will be loaded from
the database if the existing value is currently unloaded
or expired.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
+
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index 0faa7bd29..d1ef1ded9 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -1,13 +1,11 @@
# orm/exc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc, util
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-attributes = util.importlater('sqlalchemy.orm', 'attributes')
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
@@ -65,10 +63,11 @@ class DetachedInstanceError(sa_exc.SQLAlchemyError):
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
- def __init__(self, obj, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, obj, msg=None):
if not msg:
try:
- mapper = orm_util.class_mapper(type(obj))
+ base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = ("Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance"
@@ -117,10 +116,11 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
object.
"""
- def __init__(self, state, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
- "row is otherwise not present." % orm_util.state_str(state)
+ "row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
@@ -149,10 +149,10 @@ def _safe_cls_name(cls):
cls_name = repr(cls)
return cls_name
-
-def _default_unmapped(cls):
+@util.dependencies("sqlalchemy.orm.base")
+def _default_unmapped(base, cls):
try:
- mappers = attributes.manager_of_class(cls).mappers
+ mappers = base.manager_of_class(cls).mappers
except NO_STATE:
mappers = {}
except TypeError:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index d0234a1d3..a91085d28 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -1,5 +1,5 @@
# orm/identity.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -172,7 +172,7 @@ class WeakInstanceDict(IdentityMap):
if util.py2k:
return dict.values(self)
else:
- return list(dict.values(self))
+ return list(dict.values(self))
def discard(self, state):
st = dict.get(self, state.key, None)
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 368a6a0b1..68b4f0611 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -1,5 +1,5 @@
# orm/instrumentation.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -29,17 +29,15 @@ alternate instrumentation forms.
"""
-from . import exc, collections, events, interfaces
-from operator import attrgetter
-from .. import event, util
-state = util.importlater("sqlalchemy.orm", "state")
-
+from . import exc, collections, interfaces, state
+from .. import util
+from . import base
class ClassManager(dict):
"""tracks state information at the class level."""
- MANAGER_ATTR = '_sa_class_manager'
- STATE_ATTR = '_sa_instance_state'
+ MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
+ STATE_ATTR = base.DEFAULT_STATE_ATTR
deferred_scalar_loader = None
@@ -63,7 +61,8 @@ class ClassManager(dict):
for base in self._bases:
self.update(base)
- events._InstanceEventsHold.populate(class_, self)
+ self.dispatch._events._new_classmanager_instance(class_, self)
+ #events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
@@ -79,7 +78,11 @@ class ClassManager(dict):
"reference cycles. Please remove this method." %
class_)
- dispatch = event.dispatcher(events.InstanceEvents)
+ def __hash__(self):
+ return id(self)
+
+ def __eq__(self, other):
+ return other is self
@property
def is_mapped(self):
@@ -164,9 +167,7 @@ class ClassManager(dict):
@util.hybridmethod
def manager_getter(self):
- def manager_of_class(cls):
- return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
- return manager_of_class
+ return _default_manager_getter
@util.hybridmethod
def state_getter(self):
@@ -177,11 +178,12 @@ class ClassManager(dict):
instance.
"""
- return attrgetter(self.STATE_ATTR)
+ return _default_state_getter
@util.hybridmethod
def dict_getter(self):
- return attrgetter('__dict__')
+ return _default_dict_getter
+
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
@@ -296,6 +298,9 @@ class ClassManager(dict):
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
+ def _serialize(self, state, state_dict):
+ return _SerializeManager(state, state_dict)
+
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
@@ -335,12 +340,41 @@ class ClassManager(dict):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
+class _SerializeManager(object):
+ """Provide serialization of a :class:`.ClassManager`.
+
+ The :class:`.InstanceState` uses ``__init__()`` on serialize
+ and ``__call__()`` on deserialize.
+
+ """
+ def __init__(self, state, d):
+ self.class_ = state.class_
+ manager = state.manager
+ manager.dispatch.pickle(state, d)
+
+ def __call__(self, state, inst, state_dict):
+ state.manager = manager = manager_of_class(self.class_)
+ if manager is None:
+ raise exc.UnmappedInstanceError(
+ inst,
+ "Cannot deserialize object of type %r - "
+ "no mapper() has "
+ "been configured for this class within the current "
+ "Python process!" %
+ self.class_)
+ elif manager.is_mapped and not manager.mapper.configured:
+ manager.mapper._configure_all()
+
+ # setup _sa_instance_state ahead of time so that
+ # unpickle events can access the object normally.
+ # see [ticket:2362]
+ if inst is not None:
+ manager.setup_instance(inst, state)
+ manager.dispatch.unpickle(state, state_dict)
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
- dispatch = event.dispatcher(events.InstrumentationEvents)
-
def create_manager_for_cls(self, class_):
assert class_ is not None
assert manager_of_class(class_) is None
@@ -380,6 +414,14 @@ class InstrumentationFactory(object):
# when importred.
_instrumentation_factory = InstrumentationFactory()
+# these attributes are replaced by sqlalchemy.ext.instrumentation
+# when a non-standard InstrumentationManager class is first
+# used to instrument a class.
+instance_state = _default_state_getter = base.instance_state
+
+instance_dict = _default_dict_getter = base.instance_dict
+
+manager_of_class = _default_manager_getter = base.manager_of_class
def register_class(class_):
"""Register class instrumentation.
@@ -411,15 +453,6 @@ def is_instrumented(instance, key):
return manager_of_class(instance.__class__).\
is_instrumented(key, search=True)
-# these attributes are replaced by sqlalchemy.ext.instrumentation
-# when a non-standard InstrumentationManager class is first
-# used to instrument a class.
-instance_state = _default_state_getter = ClassManager.state_getter()
-
-instance_dict = _default_dict_getter = ClassManager.dict_getter()
-
-manager_of_class = _default_manager_getter = ClassManager.manager_getter()
-
def _generate_init(class_, class_manager):
"""Build an __init__ decorator that triggers ClassManager events."""
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 150277be2..3d5559be9 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -1,5 +1,5 @@
# orm/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -21,9 +21,11 @@ from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
+from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
+from .base import _InspectionAttr, _MappedAttribute
+from .path_registry import PathRegistry
+import collections
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-collections = util.importlater('sqlalchemy.orm', 'collections')
__all__ = (
'AttributeExtension',
@@ -42,97 +44,6 @@ __all__ = (
'StrategizedProperty',
)
-EXT_CONTINUE = util.symbol('EXT_CONTINUE')
-EXT_STOP = util.symbol('EXT_STOP')
-
-ONETOMANY = util.symbol('ONETOMANY')
-MANYTOONE = util.symbol('MANYTOONE')
-MANYTOMANY = util.symbol('MANYTOMANY')
-
-from .deprecated_interfaces import AttributeExtension, \
- SessionExtension, \
- MapperExtension
-
-
-NOT_EXTENSION = util.symbol('NOT_EXTENSION')
-"""Symbol indicating an :class:`_InspectionAttr` that's
- not part of sqlalchemy.ext.
-
- Is assigned to the :attr:`._InspectionAttr.extension_type`
- attibute.
-
-"""
-
-class _InspectionAttr(object):
- """A base class applied to all ORM objects that can be returned
- by the :func:`.inspect` function.
-
- The attributes defined here allow the usage of simple boolean
- checks to test basic facts about the object returned.
-
- While the boolean checks here are basically the same as using
- the Python isinstance() function, the flags here can be used without
- the need to import all of these classes, and also such that
- the SQLAlchemy class system can change while leaving the flags
- here intact for forwards-compatibility.
-
- """
-
- is_selectable = False
- """Return True if this object is an instance of :class:`.Selectable`."""
-
- is_aliased_class = False
- """True if this object is an instance of :class:`.AliasedClass`."""
-
- is_instance = False
- """True if this object is an instance of :class:`.InstanceState`."""
-
- is_mapper = False
- """True if this object is an instance of :class:`.Mapper`."""
-
- is_property = False
- """True if this object is an instance of :class:`.MapperProperty`."""
-
- is_attribute = False
- """True if this object is a Python :term:`descriptor`.
-
- This can refer to one of many types. Usually a
- :class:`.QueryableAttribute` which handles attributes events on behalf
- of a :class:`.MapperProperty`. But can also be an extension type
- such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
- The :attr:`._InspectionAttr.extension_type` will refer to a constant
- identifying the specific subtype.
-
- .. seealso::
-
- :attr:`.Mapper.all_orm_descriptors`
-
- """
-
- is_clause_element = False
- """True if this object is an instance of :class:`.ClauseElement`."""
-
- extension_type = NOT_EXTENSION
- """The extension type, if any.
- Defaults to :data:`.interfaces.NOT_EXTENSION`
-
- .. versionadded:: 0.8.0
-
- .. seealso::
-
- :data:`.HYBRID_METHOD`
-
- :data:`.HYBRID_PROPERTY`
-
- :data:`.ASSOCIATION_PROXY`
-
- """
-
-class _MappedAttribute(object):
- """Mixin for attributes which should be replaced by mapper-assigned
- attributes.
-
- """
class MapperProperty(_MappedAttribute, _InspectionAttr):
@@ -235,7 +146,26 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
- MapperProperty."""
+ :class:`.MapperProperty`.
+
+ This is basically a ``getattr()`` call::
+
+ return getattr(self.parent.class_, self.key)
+
+ I.e. if this :class:`.MapperProperty` were named ``addresses``,
+ and the class to which it is mapped is ``User``, this sequence
+ is possible::
+
+ >>> from sqlalchemy import inspect
+ >>> mapper = inspect(User)
+ >>> addresses_property = mapper.attrs.addresses
+ >>> addresses_property.class_attribute is User.addresses
+ True
+ >>> User.addresses.property is addresses_property
+ True
+
+
+ """
return getattr(self.parent.class_, self.key)
@@ -389,6 +319,9 @@ class PropComparator(operators.ColumnOperators):
def __clause_element__(self):
raise NotImplementedError("%r" % self)
+ def _query_clause_element(self):
+ return self.__clause_element__()
+
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
@@ -490,51 +423,57 @@ class StrategizedProperty(MapperProperty):
strategy_wildcard_key = None
- @util.memoized_property
- def _wildcard_path(self):
- if self.strategy_wildcard_key:
- return ('loaderstrategy', (self.strategy_wildcard_key,))
- else:
- return None
+ def _get_context_loader(self, context, path):
+ load = None
- def _get_context_strategy(self, context, path):
- strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
+ # use EntityRegistry.__getitem__()->PropRegistry here so
+ # that the path is stated in terms of our base
+ search_path = dict.__getitem__(path, self)
- if not strategy_cls:
- wc_key = self._wildcard_path
- if wc_key and wc_key in context.attributes:
- strategy_cls = context.attributes[wc_key]
+ # search among: exact match, "attr.*", "default" strategy
+ # if any.
+ for path_key in (
+ search_path._loader_key,
+ search_path._wildcard_path_loader_key,
+ search_path._default_path_loader_key
+ ):
+ if path_key in context.attributes:
+ load = context.attributes[path_key]
+ break
- if strategy_cls:
- try:
- return self._strategies[strategy_cls]
- except KeyError:
- return self.__init_strategy(strategy_cls)
- return self.strategy
+ return load
- def _get_strategy(self, cls):
+ def _get_strategy(self, key):
try:
- return self._strategies[cls]
+ return self._strategies[key]
except KeyError:
- return self.__init_strategy(cls)
+ cls = self._strategy_lookup(*key)
+ self._strategies[key] = self._strategies[cls] = strategy = cls(self)
+ return strategy
- def __init_strategy(self, cls):
- self._strategies[cls] = strategy = cls(self)
- return strategy
+ def _get_strategy_by_cls(self, cls):
+ return self._get_strategy(cls._strategy_keys[0])
def setup(self, context, entity, path, adapter, **kwargs):
- self._get_context_strategy(context, path).\
- setup_query(context, entity, path,
- adapter, **kwargs)
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ strat.setup_query(context, entity, path, loader, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
- return self._get_context_strategy(context, path).\
- create_row_processor(context, path,
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ return strat.create_row_processor(context, path, loader,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
- self.strategy = self.__init_strategy(self.strategy_class)
+ self.strategy = self._get_strategy_by_cls(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
@@ -542,6 +481,30 @@ class StrategizedProperty(MapperProperty):
self.strategy.init_class_attribute(mapper)
+ _strategies = collections.defaultdict(dict)
+
+ @classmethod
+ def strategy_for(cls, **kw):
+ def decorate(dec_cls):
+ dec_cls._strategy_keys = []
+ key = tuple(sorted(kw.items()))
+ cls._strategies[cls][key] = dec_cls
+ dec_cls._strategy_keys.append(key)
+ return dec_cls
+ return decorate
+
+ @classmethod
+ def _strategy_lookup(cls, *key):
+ for prop_cls in cls.__mro__:
+ if prop_cls in cls._strategies:
+ strategies = cls._strategies[prop_cls]
+ try:
+ return strategies[key]
+ except KeyError:
+ pass
+ raise Exception("can't locate strategy for %s %s" % (cls, key))
+
+
class MapperOption(object):
"""Describe a modification to a Query."""
@@ -563,241 +526,6 @@ class MapperOption(object):
self.process_query(query)
-class PropertyOption(MapperOption):
- """A MapperOption that is applied to a property off the mapper or
- one of its child mappers, identified by a dot-separated key
- or list of class-bound attributes. """
-
- def __init__(self, key, mapper=None):
- self.key = key
- self.mapper = mapper
-
- def process_query(self, query):
- self._process(query, True)
-
- def process_query_conditionally(self, query):
- self._process(query, False)
-
- def _process(self, query, raiseerr):
- paths = self._process_paths(query, raiseerr)
- if paths:
- self.process_query_property(query, paths)
-
- def process_query_property(self, query, paths):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['key'] = ret = []
- for token in util.to_list(self.key):
- if isinstance(token, PropComparator):
- ret.append((token._parentmapper.class_, token.key))
- else:
- ret.append(token)
- return d
-
- def __setstate__(self, state):
- ret = []
- for key in state['key']:
- if isinstance(key, tuple):
- cls, propkey = key
- ret.append(getattr(cls, propkey))
- else:
- ret.append(key)
- state['key'] = tuple(ret)
- self.__dict__ = state
-
- def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
- if orm_util._is_aliased_class(mapper):
- searchfor = mapper
- else:
- searchfor = orm_util._class_to_mapper(mapper)
- for ent in query._mapper_entities:
- if ent.corresponds_to(searchfor):
- return ent
- else:
- if raiseerr:
- if not list(query._mapper_entities):
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- raise sa_exc.ArgumentError(
- "Can't find property '%s' on any entity "
- "specified in this Query. Note the full path "
- "from root (%s) to target entity must be specified."
- % (token, ",".join(str(x) for
- x in query._mapper_entities))
- )
- else:
- return None
-
- def _find_entity_basestring(self, query, token, raiseerr):
- for ent in query._mapper_entities:
- # return only the first _MapperEntity when searching
- # based on string prop name. Ideally object
- # attributes are used to specify more exactly.
- return ent
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- return None
-
- def _process_paths(self, query, raiseerr):
- """reconcile the 'key' for this PropertyOption with
- the current path and entities of the query.
-
- Return a list of affected paths.
-
- """
- path = orm_util.PathRegistry.root
- entity = None
- paths = []
- no_result = []
-
- # _current_path implies we're in a
- # secondary load with an existing path
- current_path = list(query._current_path.path)
-
- tokens = deque(self.key)
- while tokens:
- token = tokens.popleft()
- if isinstance(token, str):
- # wildcard token
- if token.endswith(':*'):
- return [path.token(token)]
- sub_tokens = token.split(".", 1)
- token = sub_tokens[0]
- tokens.extendleft(sub_tokens[1:])
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[1].key == token:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_basestring(
- query,
- token,
- raiseerr)
- if entity is None:
- return no_result
- path_element = entity.entity_zero
- mapper = entity.mapper
-
- if hasattr(mapper.class_, token):
- prop = getattr(mapper.class_, token).property
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Can't find property named '%s' on the "
- "mapped entity %s in this Query. " % (
- token, mapper)
- )
- else:
- return no_result
- elif isinstance(token, PropComparator):
- prop = token.property
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[0:2] == \
- [token._parententity, prop]:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token._parententity,
- raiseerr)
- if not entity:
- return no_result
-
- path_element = entity.entity_zero
- mapper = entity.mapper
- else:
- raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
- assert prop is not None
- if raiseerr and not prop.parent.common_parent(mapper):
- raise sa_exc.ArgumentError("Attribute '%s' does not "
- "link from element '%s'" % (token, path_element))
-
- path = path[path_element][prop]
-
- paths.append(path)
-
- if getattr(token, '_of_type', None):
- ac = token._of_type
- ext_info = inspect(ac)
- path_element = mapper = ext_info.mapper
- if not ext_info.is_aliased_class:
- ac = orm_util.with_polymorphic(
- ext_info.mapper.base_mapper,
- ext_info.mapper, aliased=True,
- _use_mapper_path=True)
- ext_info = inspect(ac)
- path.set(query._attributes, "path_with_polymorphic", ext_info)
- else:
- path_element = mapper = getattr(prop, 'mapper', None)
- if mapper is None and tokens:
- raise sa_exc.ArgumentError(
- "Attribute '%s' of entity '%s' does not "
- "refer to a mapped entity" %
- (token, entity)
- )
-
- if current_path:
- # ran out of tokens before
- # current_path was exhausted.
- assert not tokens
- return no_result
-
- return paths
-
-
-class StrategizedOption(PropertyOption):
- """A MapperOption that affects which LoaderStrategy will be used
- for an operation by a StrategizedProperty.
- """
-
- chained = False
-
- def process_query_property(self, query, paths):
- strategy = self.get_strategy_class()
- if self.chained:
- for path in paths:
- path.set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
- else:
- paths[-1].set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
-
- def get_strategy_class(self):
- raise NotImplementedError()
class LoaderStrategy(object):
@@ -832,10 +560,10 @@ class LoaderStrategy(object):
def init_class_attribute(self, mapper):
pass
- def setup_query(self, context, entity, path, adapter, **kwargs):
+ def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
pass
- def create_row_processor(self, context, path, mapper,
+ def create_row_processor(self, context, path, loadopt, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 1641f509e..af77fe3e0 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -1,5 +1,5 @@
# orm/loading.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -19,7 +19,6 @@ from .interfaces import EXT_CONTINUE
from ..sql import util as sql_util
from .util import _none_set, state_str
from .. import exc as sa_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
_new_runid = util.counter()
@@ -34,7 +33,8 @@ def instances(query, cursor, context):
for ent in query._entities]
filtered = id in filter_fns
- single_entity = filtered and len(query._entities) == 1
+ single_entity = len(query._entities) == 1 and \
+ query._entities[0].supports_single_entity
if filtered:
if single_entity:
@@ -44,7 +44,7 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
custom_rows = single_entity and \
- query._entities[0].mapper.dispatch.append_result
+ query._entities[0].custom_rows
(process, labels) = \
list(zip(*[
@@ -98,11 +98,10 @@ def instances(query, cursor, context):
break
-def merge_result(query, iterator, load=True):
+@util.dependencies("sqlalchemy.orm.query")
+def merge_result(querylib, query, iterator, load=True):
"""Merge a result into this :class:`.Query` object's Session."""
- from . import query as querylib
-
session = query.session
if load:
# flush current contents if we expect to load data
@@ -175,8 +174,6 @@ def load_on_ident(query, key,
only_load_props=None):
"""Load the given identity key from the database."""
- lockmode = lockmode or query._lockmode
-
if key is not None:
ident = key[1]
else:
@@ -214,10 +211,17 @@ def load_on_ident(query, key,
q._params = params
if lockmode is not None:
- q._lockmode = lockmode
+ version_check = True
+ q = q.with_lockmode(lockmode)
+ elif query._for_update_arg is not None:
+ version_check = True
+ q._for_update_arg = query._for_update_arg
+ else:
+ version_check = False
+
q._get_options(
populate_existing=bool(refresh_state),
- version_check=(lockmode is not None),
+ version_check=version_check,
only_load_props=only_load_props,
refresh_state=refresh_state)
q._order_by = None
@@ -547,7 +551,7 @@ def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
#assert mapper is _state_mapper(state)
- session = sessionlib._state_session(state)
+ session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
"Instance %s is not bound to a Session; "
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 5929aea6c..fc75a0cb5 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1,5 +1,5 @@
# orm/mapper.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -22,26 +22,18 @@ from collections import deque
from .. import sql, util, log, exc as sa_exc, event, schema, inspection
from ..sql import expression, visitors, operators, util as sql_util
-from . import instrumentation, attributes, \
- exc as orm_exc, events, loading, dependency
+from . import instrumentation, attributes, exc as orm_exc, loading, dependency
+from . import properties
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
-from .util import _INSTRUMENTOR, _class_to_mapper, \
- _state_mapper, class_mapper, \
- PathRegistry, state_str
+from .base import _class_to_mapper, _state_mapper, class_mapper, \
+ state_str, _INSTRUMENTOR
+from .path_registry import PathRegistry
+
import sys
-properties = util.importlater("sqlalchemy.orm", "properties")
-descriptor_props = util.importlater("sqlalchemy.orm", "descriptor_props")
-__all__ = (
- 'Mapper',
- '_mapper_registry',
- 'class_mapper',
- 'object_mapper',
- )
_mapper_registry = weakref.WeakKeyDictionary()
-_new_mappers = False
_already_compiling = False
_memoized_configured_property = util.group_expirable_memoized_property()
@@ -56,6 +48,8 @@ NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
_CONFIGURE_MUTEX = util.threading.RLock()
+@inspection._self_inspects
+@log.class_logger
class Mapper(_InspectionAttr):
"""Define the correlation of class attributes to database table
columns.
@@ -88,9 +82,12 @@ class Mapper(_InspectionAttr):
"""
+
+ _new_mappers = False
+
def __init__(self,
class_,
- local_table,
+ local_table=None,
properties=None,
primary_key=None,
non_primary=False,
@@ -118,10 +115,380 @@ class Mapper(_InspectionAttr):
legacy_is_orphan=False,
_compiled_cache_size=100,
):
- """Construct a new mapper.
+ """Return a new :class:`~.Mapper` object.
+
+ This function is typically used behind the scenes
+ via the Declarative extension. When using Declarative,
+ many of the usual :func:`.mapper` arguments are handled
+ by the Declarative extension itself, including ``class_``,
+ ``local_table``, ``properties``, and ``inherits``.
+ Other options are passed to :func:`.mapper` using
+ the ``__mapper_args__`` class variable::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ type = Column(String(50))
+ alt = Column("some_alt", Integer)
+
+ __mapper_args__ = {
+ 'polymorphic_on' : type
+ }
+
+
+ Explicit use of :func:`.mapper`
+ is often referred to as *classical mapping*. The above
+ declarative example is equivalent in classical form to::
+
+ my_table = Table("my_table", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50)),
+ Column("some_alt", Integer)
+ )
+
+ class MyClass(object):
+ pass
+
+ mapper(MyClass, my_table,
+ polymorphic_on=my_table.c.type,
+ properties={
+ 'alt':my_table.c.some_alt
+ })
+
+ .. seealso::
+
+ :ref:`classical_mapping` - discussion of direct usage of
+ :func:`.mapper`
+
+ :param class\_: The class to be mapped. When using Declarative,
+ this argument is automatically passed as the declared class
+ itself.
+
+ :param local_table: The :class:`.Table` or other selectable
+ to which the class is mapped. May be ``None`` if
+ this mapper inherits from another mapper using single-table
+ inheritance. When using Declarative, this argument is
+ automatically passed by the extension, based on what
+ is configured via the ``__table__`` argument or via the
+ :class:`.Table` produced as a result of the ``__tablename__``
+ and :class:`.Column` arguments present.
+
+ :param always_refresh: If True, all query operations for this mapped
+ class will overwrite all data within object instances that already
+ exist within the session, erasing any in-memory changes with
+ whatever information was loaded from the database. Usage of this
+ flag is highly discouraged; as an alternative, see the method
+ :meth:`.Query.populate_existing`.
+
+ :param allow_partial_pks: Defaults to True. Indicates that a
+ composite primary key with some NULL values should be considered as
+ possibly existing within the database. This affects whether a
+ mapper will assign an incoming row to an existing identity, as well
+ as if :meth:`.Session.merge` will check the database first for a
+ particular primary key value. A "partial primary key" can occur if
+ one has mapped to an OUTER JOIN, for example.
+
+ :param batch: Defaults to ``True``, indicating that save operations
+ of multiple entities can be batched together for efficiency.
+ Setting to False indicates
+ that an instance will be fully saved before saving the next
+ instance. This is used in the extremely rare case that a
+ :class:`.MapperEvents` listener requires being called
+ in between individual row persistence operations.
+
+ :param column_prefix: A string which will be prepended
+ to the mapped attribute name when :class:`.Column`
+ objects are automatically assigned as attributes to the
+ mapped class. Does not affect explicitly specified
+ column-based properties.
+
+ See the section :ref:`column_prefix` for an example.
+
+ :param concrete: If True, indicates this mapper should use concrete
+ table inheritance with its parent mapper.
+
+ See the section :ref:`concrete_inheritance` for an example.
+
+ :param eager_defaults: if True, the ORM will immediately fetch the
+ value of server-generated default values after an INSERT or UPDATE,
+ rather than leaving them as expired to be fetched on next access.
+ This can be used for event schemes where the server-generated values
+ are needed immediately before the flush completes. By default,
+ this scheme will emit an individual ``SELECT`` statement per row
+ inserted or updated, which note can add significant performance
+ overhead. However, if the
+ target database supports :term:`RETURNING`, the default values will be
+ returned inline with the INSERT or UPDATE statement, which can
+ greatly enhance performance for an application that needs frequent
+ access to just-generated server defaults.
+
+ .. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
+ make use of :term:`RETURNING` for backends which support it.
+
+ :param exclude_properties: A list or set of string column names to
+ be excluded from mapping.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param extension: A :class:`.MapperExtension` instance or
+ list of :class:`.MapperExtension` instances which will be applied
+ to all operations by this :class:`.Mapper`. **Deprecated.**
+ Please see :class:`.MapperEvents`.
+
+ :param include_properties: An inclusive list or set of string column
+ names to map.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param inherits: A mapped class or the corresponding :class:`.Mapper`
+ of one indicating a superclass to which this :class:`.Mapper`
+ should *inherit* from. The mapped class here must be a subclass
+ of the other mapper's class. When using Declarative, this argument
+ is passed automatically as a result of the natural class
+ hierarchy of the declared classes.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param inherit_condition: For joined table inheritance, a SQL
+ expression which will
+ define how the two tables are joined; defaults to a natural join
+ between the two tables.
+
+ :param inherit_foreign_keys: When ``inherit_condition`` is used and the
+ columns present are missing a :class:`.ForeignKey` configuration,
+ this parameter can be used to specify which columns are "foreign".
+ In most cases can be left as ``None``.
+
+ :param legacy_is_orphan: Boolean, defaults to ``False``.
+ When ``True``, specifies that "legacy" orphan consideration
+ is to be applied to objects mapped by this mapper, which means
+ that a pending (that is, not persistent) object is auto-expunged
+ from an owning :class:`.Session` only when it is de-associated
+ from *all* parents that specify a ``delete-orphan`` cascade towards
+ this mapper. The new default behavior is that the object is auto-expunged
+ when it is de-associated with *any* of its parents that specify
+ ``delete-orphan`` cascade. This behavior is more consistent with
+ that of a persistent object, and allows behavior to be consistent
+ in more scenarios independently of whether or not an orphanable
+ object has been flushed yet or not.
+
+ See the change note and example at :ref:`legacy_is_orphan_addition`
+ for more detail on this change.
+
+ .. versionadded:: 0.8 - the consideration of a pending object as
+ an "orphan" has been modified to more closely match the
+ behavior as that of persistent objects, which is that the object
+ is expunged from the :class:`.Session` as soon as it is
+ de-associated from any of its orphan-enabled parents. Previously,
+ the pending object would be expunged only if de-associated
+ from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
+ is added to :func:`.orm.mapper` which re-establishes the
+ legacy behavior.
+
+ :param non_primary: Specify that this :class:`.Mapper` is in addition
+ to the "primary" mapper, that is, the one used for persistence.
+ The :class:`.Mapper` created here may be used for ad-hoc
+ mapping of the class to an alternate selectable, for loading
+ only.
+
+ The ``non_primary`` feature is rarely needed with modern
+ usage.
+
+ :param order_by: A single :class:`.Column` or list of :class:`.Column`
+ objects for which selection operations should use as the default
+ ordering for entities. By default mappers have no pre-defined
+ ordering.
+
+ :param passive_updates: Indicates UPDATE behavior of foreign key
+ columns when a primary key column changes on a joined-table
+ inheritance mapping. Defaults to ``True``.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will handle
+ propagation of an UPDATE from a source column to dependent columns
+ on joined-table rows.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The :class:`.Mapper` here will
+ emit an UPDATE statement for the dependent columns during a
+ primary key change.
+
+ ..seealso::
+
+ :ref:`passive_updates` - description of a similar feature as
+ used with :func:`.relationship`
+
+ :param polymorphic_on: Specifies the column, attribute, or
+ SQL expression used to determine the target class for an
+ incoming row, when inheriting classes are present.
+
+ This value is commonly a :class:`.Column` object that's
+ present in the mapped :class:`.Table`::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":discriminator,
+ "polymorphic_identity":"employee"
+ }
+
+ It may also be specified
+ as a SQL expression, as in this example where we
+ use the :func:`.case` construct to provide a conditional
+ approach::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee"),
+ "polymorphic_identity":"employee"
+ }
+
+ It may also refer to any attribute
+ configured with :func:`.column_property`, or to the
+ string name of one::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+ employee_type = column_property(
+ case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee")
+ )
- Mappers are normally constructed via the
- :func:`~sqlalchemy.orm.mapper` function. See for details.
+ __mapper_args__ = {
+ "polymorphic_on":employee_type,
+ "polymorphic_identity":"employee"
+ }
+
+ .. versionchanged:: 0.7.4
+ ``polymorphic_on`` may be specified as a SQL expression,
+ or refer to any attribute configured with
+ :func:`.column_property`, or to the string name of one.
+
+ When setting ``polymorphic_on`` to reference an
+ attribute or expression that's not present in the
+ locally mapped :class:`.Table`, yet the value
+ of the discriminator should be persisted to the database,
+ the value of the
+ discriminator is not automatically set on new
+ instances; this must be handled by the user,
+ either through manual means or via event listeners.
+ A typical approach to establishing such a listener
+ looks like::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import object_mapper
+
+ @event.listens_for(Employee, "init", propagate=True)
+ def set_identity(instance, *arg, **kw):
+ mapper = object_mapper(instance)
+ instance.discriminator = mapper.polymorphic_identity
+
+ Where above, we assign the value of ``polymorphic_identity``
+ for the mapped class to the ``discriminator`` attribute,
+ thus persisting the value to the ``discriminator`` column
+ in the database.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param polymorphic_identity: Specifies the value which
+ identifies this particular class as returned by the
+ column expression referred to by the ``polymorphic_on``
+ setting. As rows are received, the value corresponding
+ to the ``polymorphic_on`` column expression is compared
+ to this value, indicating which subclass should
+ be used for the newly reconstructed object.
+
+ :param properties: A dictionary mapping the string names of object
+ attributes to :class:`.MapperProperty` instances, which define the
+ persistence behavior of that attribute. Note that :class:`.Column`
+ objects present in
+ the mapped :class:`.Table` are automatically placed into
+ ``ColumnProperty`` instances upon mapping, unless overridden.
+ When using Declarative, this argument is passed automatically,
+ based on all those :class:`.MapperProperty` instances declared
+ in the declared class body.
+
+ :param primary_key: A list of :class:`.Column` objects which define the
+ primary key to be used against this mapper's selectable unit.
+ This is normally simply the primary key of the ``local_table``, but
+ can be overridden here.
+
+ :param version_id_col: A :class:`.Column`
+ that will be used to keep a running version id of rows
+ in the table. This is used to detect concurrent updates or
+ the presence of stale data in a flush. The methodology is to
+ detect if an UPDATE statement does not match the last known
+ version id, a
+ :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
+ thrown.
+ By default, the column must be of :class:`.Integer` type,
+ unless ``version_id_generator`` specifies an alternative version
+ generator.
+
+ .. seealso::
+
+ :ref:`mapper_version_counter` - discussion of version counting
+ and rationale.
+
+ :param version_id_generator: Define how new version ids should
+ be generated. Defaults to ``None``, which indicates that
+ a simple integer counting scheme be employed. To provide a custom
+ versioning scheme, provide a callable function of the form::
+
+ def generate_version(version):
+ return next_version
+
+ Alternatively, server-side versioning functions such as triggers,
+ or programmatic versioning schemes outside of the version id generator
+ may be used, by specifying the value ``False``.
+ Please see :ref:`server_side_version_counter` for a discussion
+ of important points when using this option.
+
+ .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
+ version number generation.
+
+ .. seealso::
+
+ :ref:`custom_version_counter`
+
+ :ref:`server_side_version_counter`
+
+
+ :param with_polymorphic: A tuple in the form ``(<classes>,
+ <selectable>)`` indicating the default style of "polymorphic"
+ loading, that is, which tables are queried at once. <classes> is
+ any single or list of mappers and/or classes indicating the
+ inherited classes that should be loaded at once. The special value
+ ``'*'`` may be used to indicate all descending classes should be
+ loaded immediately. The second tuple argument <selectable>
+ indicates a selectable that will be used to query for multiple
+ classes.
+
+ .. seealso::
+
+ :ref:`with_polymorphic` - discussion of polymorphic querying techniques.
"""
@@ -138,9 +505,19 @@ class Mapper(_InspectionAttr):
self.order_by = order_by
self.always_refresh = always_refresh
- self.version_id_col = version_id_col
- self.version_id_generator = version_id_generator or \
- (lambda x: (x or 0) + 1)
+
+ if isinstance(version_id_col, MapperProperty):
+ self.version_id_prop = version_id_col
+ self.version_id_col = None
+ else:
+ self.version_id_col = version_id_col
+ if version_id_generator is False:
+ self.version_id_generator = False
+ elif version_id_generator is None:
+ self.version_id_generator = lambda x: (x or 0) + 1
+ else:
+ self.version_id_generator = version_id_generator
+
self.concrete = concrete
self.single = False
self.inherits = inherits
@@ -218,7 +595,7 @@ class Mapper(_InspectionAttr):
# configure_mappers() until construction succeeds)
_CONFIGURE_MUTEX.acquire()
try:
- events._MapperEventsHold.populate(class_, self)
+ self.dispatch._events._new_mapper_instance(class_, self)
self._configure_inheritance()
self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
@@ -226,8 +603,7 @@ class Mapper(_InspectionAttr):
self._configure_properties()
self._configure_polymorphic_setter()
self._configure_pks()
- global _new_mappers
- _new_mappers = True
+ Mapper._new_mappers = True
self._log("constructed")
self._expire_memoizations()
finally:
@@ -252,7 +628,7 @@ class Mapper(_InspectionAttr):
def entity(self):
"""Part of the inspection API.
- Returns self.class_.
+ Returns self.class\_.
"""
return self.class_
@@ -272,7 +648,9 @@ class Mapper(_InspectionAttr):
this :class:`.Mapper` represents. If this mapper is a
single-table inheriting mapper, local_table will be ``None``.
- See also :attr:`~.Mapper.mapped_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.mapped_table`.
"""
@@ -290,7 +668,9 @@ class Mapper(_InspectionAttr):
subclass. For single-table inheritance mappers, mapped_table
references the base table.
- See also :attr:`~.Mapper.local_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.local_table`.
"""
@@ -309,7 +689,9 @@ class Mapper(_InspectionAttr):
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
- See also :func:`.configure_mappers`.
+ .. seealso::
+
+ :func:`.configure_mappers`.
"""
@@ -478,8 +860,6 @@ class Mapper(_InspectionAttr):
c = None
"""A synonym for :attr:`~.Mapper.columns`."""
- dispatch = event.dispatcher(events.MapperEvents)
-
@util.memoized_property
def _path_registry(self):
return PathRegistry.per_mapper(self)
@@ -489,7 +869,7 @@ class Mapper(_InspectionAttr):
being present."""
# a set of all mappers which inherit from this one.
- self._inheriting_mappers = util.WeakSet()
+ self._inheriting_mappers = util.WeakSequence()
if self.inherits:
if isinstance(self.inherits, type):
@@ -563,7 +943,7 @@ class Mapper(_InspectionAttr):
self.polymorphic_map = self.inherits.polymorphic_map
self.batch = self.inherits.batch
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
@@ -630,7 +1010,7 @@ class Mapper(_InspectionAttr):
self.batch = self.inherits.batch
for mp in self.self_and_descendants:
mp.base_mapper = self.inherits.base_mapper
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
for key, prop in mapper._props.items():
@@ -735,30 +1115,20 @@ class Mapper(_InspectionAttr):
self._reconstructor = method
event.listen(manager, 'load', _event_on_load, raw=True)
elif hasattr(method, '__sa_validators__'):
- include_removes = getattr(method,
- "__sa_include_removes__", False)
+ validation_opts = method.__sa_validation_opts__
for name in method.__sa_validators__:
self.validators = self.validators.union(
- {name: (method, include_removes)}
+ {name: (method, validation_opts)}
)
manager.info[_INSTRUMENTOR] = self
- @util.deprecated("0.7", message=":meth:`.Mapper.compile` "
- "is replaced by :func:`.configure_mappers`")
- def compile(self):
- """Initialize the inter-mapper relationships of all mappers that
- have been constructed thus far.
+ @classmethod
+ def _configure_all(cls):
+ """Class-level path to the :func:`.configure_mappers` call.
"""
configure_mappers()
- return self
-
- @property
- @util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
- "is replaced by :attr:`.Mapper.configured`")
- def compiled(self):
- return self.configured
def dispose(self):
# Disable any attribute-based compilation.
@@ -956,7 +1326,7 @@ class Mapper(_InspectionAttr):
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
- elif not expression.is_column(self.polymorphic_on):
+ elif not expression._is_column(self.polymorphic_on):
# polymorphic_on is not a Column and not a ColumnProperty;
# not supported right now.
raise sa_exc.ArgumentError(
@@ -1080,6 +1450,13 @@ class Mapper(_InspectionAttr):
_validate_polymorphic_identity = None
@_memoized_configured_property
+ def _version_id_prop(self):
+ if self.version_id_col is not None:
+ return self._columntoproperty[self.version_id_col]
+ else:
+ return None
+
+ @_memoized_configured_property
def _acceptable_polymorphic_identities(self):
identities = set()
@@ -1205,7 +1582,7 @@ class Mapper(_InspectionAttr):
# generate a properties.ColumnProperty
columns = util.to_list(prop)
column = columns[0]
- if not expression.is_column(column):
+ if not expression._is_column(column):
raise sa_exc.ArgumentError(
"%s=%r is not an instance of MapperProperty or Column"
% (key, prop))
@@ -1369,7 +1746,7 @@ class Mapper(_InspectionAttr):
"""return a MapperProperty associated with the given key.
"""
- if _configure_mappers and _new_mappers:
+ if _configure_mappers and Mapper._new_mappers:
configure_mappers()
try:
@@ -1387,7 +1764,7 @@ class Mapper(_InspectionAttr):
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return iter(self._props.values())
@@ -1461,7 +1838,7 @@ class Mapper(_InspectionAttr):
@_memoized_configured_property
def _with_polymorphic_mappers(self):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if not self.with_polymorphic:
return []
@@ -1493,7 +1870,7 @@ class Mapper(_InspectionAttr):
Normally, this is equivalent to :attr:`.mapped_table`, unless
the ``with_polymorphic`` feature is in use, in which case the
- full "polymoprhic" selectable is returned.
+ full "polymorphic" selectable is returned.
"""
return self._with_polymorphic_selectable
@@ -1568,7 +1945,7 @@ class Mapper(_InspectionAttr):
:attr:`.Mapper.all_orm_descriptors`
"""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(self._props)
@@ -1611,23 +1988,23 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.SynonymProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.SynonymProperty)
+ return self._filter_properties(properties.SynonymProperty)
@_memoized_configured_property
def column_attrs(self):
"""Return a namespace of all :class:`.ColumnProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.ColumnProperty)
@@ -1637,10 +2014,10 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.RelationshipProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.RelationshipProperty)
@@ -1650,16 +2027,16 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.CompositeProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.CompositeProperty)
+ return self._filter_properties(properties.CompositeProperty)
def _filter_properties(self, type_):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(util.OrderedDict(
(k, v) for k, v in self._props.items()
@@ -1805,7 +2182,7 @@ class Mapper(_InspectionAttr):
while stack:
item = stack.popleft()
descendants.append(item)
- stack.extend(sorted(item._inheriting_mappers, key=lambda m: m.class_.__name__))
+ stack.extend(item._inheriting_mappers)
return util.WeakSequence(descendants)
def polymorphic_iterator(self):
@@ -1835,10 +2212,11 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
- row
- A ``sqlalchemy.engine.RowProxy`` instance or a
- dictionary corresponding result-set ``ColumnElement``
- instances to their values within a row.
+ :param row: A :class:`.RowProxy` instance. The columns which are mapped
+ by this :class:`.Mapper` should be locatable in the row, preferably
+ via the :class:`.Column` object directly (as is the case when a
+ :func:`.select` construct is executed), or via string names of the form
+ ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
@@ -1852,8 +2230,7 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from an identity map.
- primary_key
- A list of values indicating the identifier.
+ :param primary_key: A list of values indicating the identifier.
"""
return self._identity_class, tuple(primary_key)
@@ -1862,6 +2239,11 @@ class Mapper(_InspectionAttr):
"""Return the identity key for the given instance, based on
its primary key attributes.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
This value is typically also found on the instance state under the
attribute name `key`.
@@ -1882,6 +2264,11 @@ class Mapper(_InspectionAttr):
"""Return the list of primary key values for the given
instance.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
"""
state = attributes.instance_state(instance)
return self._primary_key_from_state(state)
@@ -2070,9 +2457,9 @@ class Mapper(_InspectionAttr):
dep is not None and \
dep is not parent and \
dep.inherit_condition is not None:
- cols = set(sql_util.find_columns(dep.inherit_condition))
+ cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
- cols = cols.union(sql_util.find_columns(
+ cols = cols.union(sql_util._find_columns(
parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
@@ -2107,14 +2494,13 @@ class Mapper(_InspectionAttr):
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
cols.intersection(
- [l for l, r in m._inherits_equated_pairs]):
+ util.reduce(set.union,
+ [l.proxy_set for l, r in m._inherits_equated_pairs])
+ ):
result[table].append((m, m._inherits_equated_pairs))
return result
-inspection._self_inspects(Mapper)
-log.class_logger(Mapper)
-
def configure_mappers():
"""Initialize the inter-mapper relationships of all mappers that
@@ -2125,8 +2511,7 @@ def configure_mappers():
"""
- global _new_mappers
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
_call_configured = None
@@ -2139,7 +2524,7 @@ def configure_mappers():
try:
# double-check inside mutex
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
# initialize properties on all mappers
@@ -2168,7 +2553,7 @@ def configure_mappers():
mapper._configure_failed = exc
raise
- _new_mappers = False
+ Mapper._new_mappers = False
finally:
_already_compiling = False
finally:
@@ -2220,13 +2605,28 @@ def validates(*names, **kw):
argument "is_remove" which will be a boolean.
.. versionadded:: 0.7.7
+ :param include_backrefs: defaults to ``True``; if ``False``, the
+ validation function will not emit if the originator is an attribute
+ event related via a backref. This can be used for bi-directional
+ :func:`.validates` usage where only one validator should emit per
+ attribute operation.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`simple_validators` - usage examples for :func:`.validates`
"""
include_removes = kw.pop('include_removes', False)
+ include_backrefs = kw.pop('include_backrefs', True)
def wrap(fn):
fn.__sa_validators__ = names
- fn.__sa_include_removes__ = include_removes
+ fn.__sa_validation_opts__ = {
+ "include_removes": include_removes,
+ "include_backrefs": include_backrefs
+ }
return fn
return wrap
@@ -2247,7 +2647,7 @@ def _event_on_first_init(manager, cls):
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
@@ -2262,7 +2662,7 @@ def _event_on_init(state, args, kwargs):
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if instrumenting_mapper._set_polymorphic_identity:
instrumenting_mapper._set_polymorphic_identity(state)
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
new file mode 100644
index 000000000..3397626b8
--- /dev/null
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -0,0 +1,261 @@
+# orm/path_registry.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Path tracking utilities, representing mapper graph traversals.
+
+"""
+
+from .. import inspection
+from .. import util
+from .. import exc
+from itertools import chain
+from .base import class_mapper
+
+def _unreduce_path(path):
+ return PathRegistry.deserialize(path)
+
+
+_WILDCARD_TOKEN = "*"
+_DEFAULT_TOKEN = "_sa_default"
+
+class PathRegistry(object):
+ """Represent query load paths and registry functions.
+
+ Basically represents structures like:
+
+ (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
+
+ These structures are generated by things like
+ query options (joinedload(), subqueryload(), etc.) and are
+ used to compose keys stored in the query._attributes dictionary
+ for various options.
+
+ They are then re-composed at query compile/result row time as
+ the query is formed and as rows are fetched, where they again
+ serve to compose keys to look up options in the context.attributes
+ dictionary, which is copied from query._attributes.
+
+ The path structure has a limited amount of caching, where each
+ "root" ultimately pulls from a fixed registry associated with
+ the first mapper, that also contains elements for each of its
+ property keys. However paths longer than two elements, which
+ are the exception rather than the rule, are generated on an
+ as-needed basis.
+
+ """
+
+ def __eq__(self, other):
+ return other is not None and \
+ self.path == other.path
+
+ def set(self, attributes, key, value):
+ attributes[(key, self.path)] = value
+
+ def setdefault(self, attributes, key, value):
+ attributes.setdefault((key, self.path), value)
+
+ def get(self, attributes, key, value=None):
+ key = (key, self.path)
+ if key in attributes:
+ return attributes[key]
+ else:
+ return value
+
+ def __len__(self):
+ return len(self.path)
+
+ @property
+ def length(self):
+ return len(self.path)
+
+ def pairs(self):
+ path = self.path
+ for i in range(0, len(path), 2):
+ yield path[i], path[i + 1]
+
+ def contains_mapper(self, mapper):
+ for path_mapper in [
+ self.path[i] for i in range(0, len(self.path), 2)
+ ]:
+ if path_mapper.is_mapper and \
+ path_mapper.isa(mapper):
+ return True
+ else:
+ return False
+
+ def contains(self, attributes, key):
+ return (key, self.path) in attributes
+
+ def __reduce__(self):
+ return _unreduce_path, (self.serialize(), )
+
+ def serialize(self):
+ path = self.path
+ return list(zip(
+ [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
+ [path[i].key for i in range(1, len(path), 2)] + [None]
+ ))
+
+ @classmethod
+ def deserialize(cls, path):
+ if path is None:
+ return None
+
+ p = tuple(chain(*[(class_mapper(mcls),
+ class_mapper(mcls).attrs[key]
+ if key is not None else None)
+ for mcls, key in path]))
+ if p and p[-1] is None:
+ p = p[0:-1]
+ return cls.coerce(p)
+
+ @classmethod
+ def per_mapper(cls, mapper):
+ return EntityRegistry(
+ cls.root, mapper
+ )
+
+ @classmethod
+ def coerce(cls, raw):
+ return util.reduce(lambda prev, next: prev[next], raw, cls.root)
+
+ def token(self, token):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ return TokenRegistry(self, token)
+ elif token.endswith(":" + _DEFAULT_TOKEN):
+ return TokenRegistry(self.root, token)
+ else:
+ raise exc.ArgumentError("invalid token: %s" % token)
+
+ def __add__(self, other):
+ return util.reduce(
+ lambda prev, next: prev[next],
+ other.path, self)
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.path, )
+
+
+class RootRegistry(PathRegistry):
+ """Root registry, defers to mappers so that
+ paths are maintained per-root-mapper.
+
+ """
+ path = ()
+ has_entity = False
+ def __getitem__(self, entity):
+ return entity._path_registry
+
+PathRegistry.root = RootRegistry()
+
+class TokenRegistry(PathRegistry):
+ def __init__(self, parent, token):
+ self.token = token
+ self.parent = parent
+ self.path = parent.path + (token,)
+
+ has_entity = False
+
+ def __getitem__(self, entity):
+ raise NotImplementedError()
+
+class PropRegistry(PathRegistry):
+ def __init__(self, parent, prop):
+ # restate this path in terms of the
+ # given MapperProperty's parent.
+ insp = inspection.inspect(parent[-1])
+ if not insp.is_aliased_class or insp._use_mapper_path:
+ parent = parent.parent[prop.parent]
+ elif insp.is_aliased_class and insp.with_polymorphic_mappers:
+ if prop.parent is not insp.mapper and \
+ prop.parent in insp.with_polymorphic_mappers:
+ subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
+ parent = parent.parent[subclass_entity]
+
+ self.prop = prop
+ self.parent = parent
+ self.path = parent.path + (prop,)
+
+ @util.memoized_property
+ def has_entity(self):
+ return hasattr(self.prop, "mapper")
+
+ @util.memoized_property
+ def entity(self):
+ return self.prop.mapper
+
+ @util.memoized_property
+ def _wildcard_path_loader_key(self):
+ """Given a path (mapper A, prop X), replace the prop with the wildcard,
+ e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
+ return within the ("loader", path) structure.
+
+ """
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _default_path_loader_key(self):
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _loader_key(self):
+ return ("loader", self.path)
+
+ @property
+ def mapper(self):
+ return self.entity
+
+ @property
+ def entity_path(self):
+ return self[self.entity]
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return EntityRegistry(
+ self, entity
+ )
+
+class EntityRegistry(PathRegistry, dict):
+ is_aliased_class = False
+ has_entity = True
+
+ def __init__(self, parent, entity):
+ self.key = entity
+ self.parent = parent
+ self.is_aliased_class = entity.is_aliased_class
+ self.entity = entity
+ self.path = parent.path + (entity,)
+ self.entity_path = self
+
+ @property
+ def mapper(self):
+ return inspection.inspect(self.entity).mapper
+
+ def __bool__(self):
+ return True
+ __nonzero__ = __bool__
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return dict.__getitem__(self, entity)
+
+ def __missing__(self, key):
+ self[key] = item = PropRegistry(self, key)
+ return item
+
+
+
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 1f5507edf..b0fa620e3 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -1,5 +1,5 @@
# orm/persistence.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,7 +17,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .util import _state_mapper, state_str, _attr_as_key
+from .base import _state_mapper, state_str, _attr_as_key
from ..sql import expression
from . import loading
@@ -61,7 +61,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
if insert:
_emit_insert_statements(base_mapper, uowtransaction,
cached_connections,
- table, insert)
+ mapper, table, insert)
_finalize_insert_update_commands(base_mapper, uowtransaction,
states_to_insert, states_to_update)
@@ -246,9 +246,12 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
value_params = {}
has_all_pks = True
+ has_all_defaults = True
for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col.key] = mapper.version_id_generator(None)
+ if col is mapper.version_id_col and \
+ mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(None)
+ params[col.key] = val
else:
# pull straight from the dict for
# pending objects
@@ -261,6 +264,9 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
elif col.default is None and \
col.server_default is None:
params[col.key] = value
+ elif col.server_default is not None and \
+ mapper.base_mapper.eager_defaults:
+ has_all_defaults = False
elif isinstance(value, sql.ClauseElement):
value_params[col] = value
@@ -268,7 +274,8 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[col.key] = value
insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks))
+ connection, value_params, has_all_pks,
+ has_all_defaults))
return insert
@@ -315,19 +322,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
params[col.key] = history.added[0]
hasdata = True
else:
- params[col.key] = mapper.version_id_generator(
- params[col._label])
-
- # HACK: check for history, in case the
- # history is only
- # in a different table than the one
- # where the version_id_col is.
- for prop in mapper._columntoproperty.values():
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- hasdata = True
+ if mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(params[col._label])
+ params[col.key] = val
+
+ # HACK: check for history, in case the
+ # history is only
+ # in a different table than the one
+ # where the version_id_col is.
+ for prop in mapper._columntoproperty.values():
+ history = attributes.get_state_history(
+ state, prop.key,
+ attributes.PASSIVE_NO_INITIALIZE)
+ if history.added:
+ hasdata = True
else:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -409,6 +417,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
mapper._get_state_attr_by_column(
state,
state_dict, col)
+
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -478,7 +487,13 @@ def _emit_update_statements(base_mapper, uowtransaction,
sql.bindparam(mapper.version_id_col._label,
type_=mapper.version_id_col.type))
- return table.update(clause)
+ stmt = table.update(clause)
+ if mapper.base_mapper.eager_defaults:
+ stmt = stmt.return_defaults()
+ elif mapper.version_id_col is not None:
+ stmt = stmt.return_defaults(mapper.version_id_col)
+
+ return stmt
statement = base_mapper._memo(('update', table), update_stmt)
@@ -500,8 +515,7 @@ def _emit_update_statements(base_mapper, uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
c.context.compiled_parameters[0],
value_params)
rows += c.rowcount
@@ -521,44 +535,55 @@ def _emit_update_statements(base_mapper, uowtransaction,
def _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections, table, insert):
+ cached_connections, mapper, table, insert):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
statement = base_mapper._memo(('insert', table), table.insert)
- for (connection, pkeys, hasvalue, has_all_pks), \
+ for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(insert,
lambda rec: (rec[4],
list(rec[2].keys()),
bool(rec[5]),
- rec[6])
+ rec[6], rec[7])
):
- if has_all_pks and not hasvalue:
+ if \
+ (
+ has_all_defaults
+ or not base_mapper.eager_defaults
+ or not connection.dialect.implicit_returning
+ ) and has_all_pks and not hasvalue:
+
records = list(records)
multiparams = [rec[2] for rec in records]
+
c = cached_connections[connection].\
execute(statement, multiparams)
- for (state, state_dict, params, mapper,
- conn, value_params, has_all_pks), \
+ for (state, state_dict, params, mapper_rec,
+ conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
last_inserted_params,
value_params)
else:
- for state, state_dict, params, mapper, \
+ if not has_all_defaults and base_mapper.eager_defaults:
+ statement = statement.return_defaults()
+ elif mapper.version_id_col is not None:
+ statement = statement.return_defaults(mapper.version_id_col)
+
+ for state, state_dict, params, mapper_rec, \
connection, value_params, \
- has_all_pks in records:
+ has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
@@ -574,23 +599,22 @@ def _emit_insert_statements(base_mapper, uowtransaction,
# set primary key attributes
for pk, col in zip(primary_key,
mapper._pks_by_table[table]):
- prop = mapper._columntoproperty[col]
+ prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
# TODO: would rather say:
#state_dict[prop.key] = pk
- mapper._set_state_attr_by_column(
+ mapper_rec._set_state_attr_by_column(
state,
state_dict,
col, pk)
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- result.context.prefetch_cols,
- result.context.postfetch_cols,
+ result,
result.context.compiled_parameters[0],
value_params)
@@ -699,14 +723,25 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
if readonly:
state._expire_attributes(state.dict, readonly)
- # if eager_defaults option is enabled,
- # refresh whatever has been expired.
- if base_mapper.eager_defaults and state.unloaded:
+ # if eager_defaults option is enabled, load
+ # all expired cols. Else if we have a version_id_col, make sure
+ # it isn't expired.
+ toload_now = []
+
+ if base_mapper.eager_defaults:
+ toload_now.extend(state._unloaded_non_object)
+ elif mapper.version_id_col is not None and \
+ mapper.version_id_generator is False:
+ prop = mapper._columntoproperty[mapper.version_id_col]
+ if prop.key in state.unloaded:
+ toload_now.extend([prop.key])
+
+ if toload_now:
state.key = base_mapper._identity_key_from_state(state)
loading.load_on_ident(
uowtransaction.session.query(base_mapper),
state.key, refresh_state=state,
- only_load_props=state.unloaded)
+ only_load_props=toload_now)
# call after_XXX extensions
if not has_identity:
@@ -716,15 +751,26 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
def _postfetch(mapper, uowtransaction, table,
- state, dict_, prefetch_cols, postfetch_cols,
- params, value_params):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
+ prefetch_cols = result.context.prefetch_cols
+ postfetch_cols = result.context.postfetch_cols
+ returning_cols = result.context.returning_cols
+
if mapper.version_id_col is not None:
prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
+ if returning_cols:
+ row = result.context.returned_defaults
+ if row is not None:
+ for col in returning_cols:
+ if col.primary_key:
+ continue
+ mapper._set_state_attr_by_column(state, dict_, col, row[col])
+
for c in prefetch_cols:
if c.key in params and c in mapper._columntoproperty:
mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 5986556db..a0def7d31 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -1,5 +1,5 @@
# orm/properties.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,30 +10,20 @@ This is a private module which defines the behavior of invidual ORM-
mapped attributes.
"""
+from __future__ import absolute_import
-from .. import sql, util, log, exc as sa_exc, inspect
-from ..sql import operators, expression
-from . import (
- attributes, mapper,
- strategies, configure_mappers, relationships,
- dependency
- )
-from .util import CascadeOptions, \
- _orm_annotate, _orm_deannotate, _orm_full_deannotate
+from .. import util, log
+from ..sql import expression
+from . import attributes
+from .util import _orm_full_deannotate
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY,\
- PropComparator, StrategizedProperty
-
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-NoneType = type(None)
-
-from .descriptor_props import CompositeProperty, SynonymProperty, \
- ComparableProperty, ConcreteInheritedProperty
+from .interfaces import PropComparator, StrategizedProperty
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
- 'ComparableProperty', 'RelationshipProperty', 'RelationProperty']
+ 'ComparableProperty', 'RelationshipProperty']
+@log.class_logger
class ColumnProperty(StrategizedProperty):
"""Describes an object attribute that corresponds to a table column.
@@ -41,31 +31,81 @@ class ColumnProperty(StrategizedProperty):
"""
+ strategy_wildcard_key = 'column'
+
def __init__(self, *columns, **kwargs):
- """Construct a ColumnProperty.
+ """Provide a column-level property for use with a Mapper.
- Note the public constructor is the :func:`.orm.column_property`
- function.
+ Column-based properties can normally be applied to the mapper's
+ ``properties`` dictionary using the :class:`.Column` element directly.
+ Use this function when the given column is not directly present within the
+ mapper's selectable; examples include SQL expressions, functions, and
+ scalar SELECT queries.
- :param \*columns: The list of `columns` describes a single
- object property. If there are multiple tables joined
- together for the mapper, this list represents the equivalent
- column as it appears across each table.
+ Columns that aren't present in the mapper's selectable won't be persisted
+ by the mapper and are effectively "read-only" attributes.
- :param group:
+ :param \*cols:
+ list of Column objects to be mapped.
- :param deferred:
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. Normally, history tracking logic for
+ simple non-primary-key scalar values only needs to be
+ aware of the "new" value in order to perform a flush. This
+ flag is available for applications that make use of
+ :func:`.attributes.get_history` or :meth:`.Session.is_modified`
+ which also need to know
+ the "previous" value of the attribute.
- :param comparator_factory:
+ .. versionadded:: 0.6.6
- :param descriptor:
+ :param comparator_factory: a class which extends
+ :class:`.ColumnProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
- :param expire_on_flush:
+ :param group:
+ a group name for this property when marked as deferred.
- :param extension:
+ :param deferred:
+ when True, the column property is "deferred", meaning that
+ it does not load immediately, and is instead loaded when the
+ attribute is first accessed on an instance. See also
+ :func:`~sqlalchemy.orm.deferred`.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param expire_on_flush=True:
+ Disable expiry on flush. A column_property() which refers
+ to a SQL expression (and not a single table-bound column)
+ is considered to be a "read only" property; populating it
+ has no effect on the state of data, and it can only return
+ database state. For this reason a column_property()'s value
+ is expired whenever the parent object is involved in a
+ flush, that is, has any kind of "dirty" state within a flush.
+ Setting this parameter to ``False`` will have the effect of
+ leaving any existing value present after the flush proceeds.
+ Note however that the :class:`.Session` with default expiration
+ settings still expires
+ all attributes after a :meth:`.Session.commit` call, however.
+
+ .. versionadded:: 0.7.3
:param info: Optional data dictionary which will be populated into the
- :attr:`.info` attribute of this object.
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an
+ :class:`.AttributeExtension`
+ instance, or list of extensions, which will be prepended
+ to the list of attribute listeners for the resulting
+ descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
"""
self._orig_columns = [expression._labeled(c) for c in columns]
@@ -102,12 +142,11 @@ class ColumnProperty(StrategizedProperty):
', '.join(sorted(kwargs.keys()))))
util.set_creation_order(self)
- if not self.instrument:
- self.strategy_class = strategies.UninstrumentedColumnLoader
- elif self.deferred:
- self.strategy_class = strategies.DeferredColumnLoader
- else:
- self.strategy_class = strategies.ColumnLoader
+
+ self.strategy_class = self._strategy_lookup(
+ ("deferred", self.deferred),
+ ("instrument", self.instrument)
+ )
@property
def expression(self):
@@ -215,1101 +254,6 @@ class ColumnProperty(StrategizedProperty):
col = self.__clause_element__()
return op(col._bind_param(op, other), col, **kwargs)
- # TODO: legacy..do we need this ? (0.5)
- ColumnComparator = Comparator
-
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
-log.class_logger(ColumnProperty)
-
-
-class RelationshipProperty(StrategizedProperty):
- """Describes an object property that holds a single item or list
- of items that correspond to a related database table.
-
- Public constructor is the :func:`.orm.relationship` function.
-
- See also:
-
- :ref:`relationship_config_toplevel`
-
- """
-
- strategy_wildcard_key = 'relationship:*'
-
- _dependency_processor = None
-
- def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None,
- info=None):
-
- self.uselist = uselist
- self.argument = argument
- self.secondary = secondary
- self.primaryjoin = primaryjoin
- self.secondaryjoin = secondaryjoin
- self.post_update = post_update
- self.direction = None
- self.viewonly = viewonly
- self.lazy = lazy
- self.single_parent = single_parent
- self._user_defined_foreign_keys = foreign_keys
- self.collection_class = collection_class
- self.passive_deletes = passive_deletes
- self.cascade_backrefs = cascade_backrefs
- self.passive_updates = passive_updates
- self.remote_side = remote_side
- self.enable_typechecks = enable_typechecks
- self.query_class = query_class
- self.innerjoin = innerjoin
- self.doc = doc
- self.active_history = active_history
- self.join_depth = join_depth
- self.local_remote_pairs = _local_remote_pairs
- self.extension = extension
- self.load_on_pending = load_on_pending
- self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
- self.comparator = self.comparator_factory(self, None)
- util.set_creation_order(self)
-
- if info is not None:
- self.info = info
-
- if strategy_class:
- self.strategy_class = strategy_class
- elif self.lazy == 'dynamic':
- from sqlalchemy.orm import dynamic
- self.strategy_class = dynamic.DynaLoader
- else:
- self.strategy_class = strategies.factory(self.lazy)
-
- self._reverse_property = set()
-
- self.cascade = cascade if cascade is not False \
- else "save-update, merge"
-
- self.order_by = order_by
-
- self.back_populates = back_populates
-
- if self.back_populates:
- if backref:
- raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
- self.backref = None
- else:
- self.backref = backref
-
- def instrument_class(self, mapper):
- attributes.register_descriptor(
- mapper.class_,
- self.key,
- comparator=self.comparator_factory(self, mapper),
- parententity=mapper,
- doc=self.doc,
- )
-
- class Comparator(PropComparator):
- """Produce boolean, comparison, and other operators for
- :class:`.RelationshipProperty` attributes.
-
- See the documentation for :class:`.PropComparator` for a brief overview
- of ORM level operator definition.
-
- See also:
-
- :class:`.PropComparator`
-
- :class:`.ColumnProperty.Comparator`
-
- :class:`.ColumnOperators`
-
- :ref:`types_operators`
-
- :attr:`.TypeEngine.comparator_factory`
-
- """
-
- _of_type = None
-
- def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
- """Construction of :class:`.RelationshipProperty.Comparator`
- is internal to the ORM's attribute mechanics.
-
- """
- self.prop = prop
- self._parentmapper = parentmapper
- self._adapt_to_entity = adapt_to_entity
- if of_type:
- self._of_type = of_type
-
- def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.property, self._parentmapper,
- adapt_to_entity=adapt_to_entity,
- of_type=self._of_type)
-
- @util.memoized_property
- def mapper(self):
- """The target :class:`.Mapper` referred to by this
- :class:`.RelationshipProperty.Comparator.
-
- This is the "target" or "remote" side of the
- :func:`.relationship`.
-
- """
- return self.property.mapper
-
- @util.memoized_property
- def _parententity(self):
- return self.property.parent
-
- def _source_selectable(self):
- elem = self.property.parent._with_polymorphic_selectable
- if self.adapter:
- return self.adapter(elem)
- else:
- return elem
-
- def __clause_element__(self):
- adapt_from = self._source_selectable()
- if self._of_type:
- of_type = inspect(self._of_type).mapper
- else:
- of_type = None
-
- pj, sj, source, dest, \
- secondary, target_adapter = self.property._create_joins(
- source_selectable=adapt_from,
- source_polymorphic=True,
- of_type=of_type)
- if sj is not None:
- return pj & sj
- else:
- return pj
-
- def of_type(self, cls):
- """Produce a construct that represents a particular 'subtype' of
- attribute for the parent class.
-
- Currently this is usable in conjunction with :meth:`.Query.join`
- and :meth:`.Query.outerjoin`.
-
- """
- return RelationshipProperty.Comparator(
- self.property,
- self._parentmapper,
- adapt_to_entity=self._adapt_to_entity,
- of_type=cls)
-
- def in_(self, other):
- """Produce an IN clause - this is not implemented
- for :func:`~.orm.relationship`-based attributes at this time.
-
- """
- raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
-
- __hash__ = None
-
- def __eq__(self, other):
- """Implement the ``==`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop == <some object>
-
- this will typically produce a
- clause such as::
-
- mytable.related_id == <some id>
-
- Where ``<some id>`` is the primary key of the given
- object.
-
- The ``==`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use :meth:`~.RelationshipProperty.Comparator.contains`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce a NOT EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction in [ONETOMANY, MANYTOMANY]:
- return ~self._criterion_exists()
- else:
- return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
- else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
-
- def _criterion_exists(self, criterion=None, **kwargs):
- if getattr(self, '_of_type', None):
- info = inspect(self._of_type)
- target_mapper, to_selectable, is_aliased_class = \
- info.mapper, info.selectable, info.is_aliased_class
- if self.property._is_self_referential and not is_aliased_class:
- to_selectable = to_selectable.alias()
-
- single_crit = target_mapper._single_table_criterion
- if single_crit is not None:
- if criterion is not None:
- criterion = single_crit & criterion
- else:
- criterion = single_crit
- else:
- is_aliased_class = False
- to_selectable = None
-
- if self.adapter:
- source_selectable = self._source_selectable()
- else:
- source_selectable = None
-
- pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
-
- for k in kwargs:
- crit = getattr(self.property.mapper.class_, k) == kwargs[k]
- if criterion is None:
- criterion = crit
- else:
- criterion = criterion & crit
-
- # annotate the *local* side of the join condition, in the case
- # of pj + sj this is the full primaryjoin, in the case of just
- # pj its the local side of the primaryjoin.
- if sj is not None:
- j = _orm_annotate(pj) & sj
- else:
- j = _orm_annotate(pj, exclude=self.property.remote_side)
-
- if criterion is not None and target_adapter and not is_aliased_class:
- # limit this adapter to annotated only?
- criterion = target_adapter.traverse(criterion)
-
- # only have the "joined left side" of what we
- # return be subject to Query adaption. The right
- # side of it is used for an exists() subquery and
- # should not correlate or otherwise reach out
- # to anything in the enclosing query.
- if criterion is not None:
- criterion = criterion._annotate(
- {'no_replacement_traverse': True})
-
- crit = j & criterion
-
- ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
- if secondary is not None:
- ex = ex.correlate_except(secondary)
- return ex
-
- def any(self, criterion=None, **kwargs):
- """Produce an expression that tests a collection against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.any(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
- AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.any` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.any` is particularly
- useful for testing for empty collections::
-
- session.query(MyClass).filter(
- ~MyClass.somereference.any()
- )
-
- will produce::
-
- SELECT * FROM my_table WHERE
- NOT EXISTS (SELECT 1 FROM related WHERE
- related.my_id=my_table.id)
-
- :meth:`~.RelationshipProperty.Comparator.any` is only
- valid for collections, i.e. a :func:`.relationship`
- that has ``uselist=True``. For scalar references,
- use :meth:`~.RelationshipProperty.Comparator.has`.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
-
- return self._criterion_exists(criterion, **kwargs)
-
- def has(self, criterion=None, **kwargs):
- """Produce an expression that tests a scalar reference against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.has(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE
- related.id==my_table.related_id AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.has` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.has` is only
- valid for scalar references, i.e. a :func:`.relationship`
- that has ``uselist=False``. For collection references,
- use :meth:`~.RelationshipProperty.Comparator.any`.
-
- """
- if self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
- return self._criterion_exists(criterion, **kwargs)
-
- def contains(self, other, **kwargs):
- """Return a simple expression that tests a collection for
- containment of a particular item.
-
- :meth:`~.RelationshipProperty.Comparator.contains` is
- only valid for a collection, i.e. a
- :func:`~.orm.relationship` that implements
- one-to-many or many-to-many with ``uselist=True``.
-
- When used in a simple one-to-many context, an
- expression like::
-
- MyClass.contains(other)
-
- Produces a clause like::
-
- mytable.id == <some id>
-
- Where ``<some id>`` is the value of the foreign key
- attribute on ``other`` which refers to the primary
- key of its parent object. From this it follows that
- :meth:`~.RelationshipProperty.Comparator.contains` is
- very useful when used with simple one-to-many
- operations.
-
- For many-to-many operations, the behavior of
- :meth:`~.RelationshipProperty.Comparator.contains`
- has more caveats. The association table will be
- rendered in the statement, producing an "implicit"
- join, that is, includes multiple tables in the FROM
- clause which are equated in the WHERE clause::
-
- query(MyClass).filter(MyClass.contains(other))
-
- Produces a query like::
-
- SELECT * FROM my_table, my_association_table AS
- my_association_table_1 WHERE
- my_table.id = my_association_table_1.parent_id
- AND my_association_table_1.child_id = <some id>
-
- Where ``<some id>`` would be the primary key of
- ``other``. From the above, it is clear that
- :meth:`~.RelationshipProperty.Comparator.contains`
- will **not** work with many-to-many collections when
- used in queries that move beyond simple AND
- conjunctions, such as multiple
- :meth:`~.RelationshipProperty.Comparator.contains`
- expressions joined by OR. In such cases subqueries or
- explicit "outer joins" will need to be used instead.
- See :meth:`~.RelationshipProperty.Comparator.any` for
- a less-performant alternative using EXISTS, or refer
- to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
- for more details on constructing outer joins.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
-
- if self.property.secondaryjoin is not None:
- clause.negation_clause = \
- self.__negated_contains_or_equals(other)
-
- return clause
-
- def __negated_contains_or_equals(self, other):
- if self.property.direction == MANYTOONE:
- state = attributes.instance_state(other)
-
- def state_bindparam(x, state, col):
- o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda: \
- self.property.mapper._get_committed_attr_by_column(o, col))
-
- def adapt(col):
- if self.adapter:
- return self.adapter(col)
- else:
- return col
-
- if self.property._use_get:
- return sql.and_(*[
- sql.or_(
- adapt(x) != state_bindparam(adapt(x), state, y),
- adapt(x) == None)
- for (x, y) in self.property.local_remote_pairs])
-
- criterion = sql.and_(*[x == y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
- return ~self._criterion_exists(criterion)
-
- def __ne__(self, other):
- """Implement the ``!=`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop != <some object>
-
- This will typically produce a clause such as::
-
- mytable.related_id != <some id>
-
- Where ``<some id>`` is the primary key of the
- given object.
-
- The ``!=`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use
- :meth:`~.RelationshipProperty.Comparator.contains`
- in conjunction with :func:`~.expression.not_`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` in
- conjunction with :func:`~.expression.not_` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce an EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction == MANYTOONE:
- return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
- else:
- return self._criterion_exists()
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
- else:
- return self.__negated_contains_or_equals(other)
-
- @util.memoized_property
- def property(self):
- if mapperlib.module._new_mappers:
- configure_mappers()
- return self.prop
-
- def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
- if op == operators.eq:
- if value is None:
- if self.uselist:
- return ~sql.exists([1], self.primaryjoin)
- else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return op(self.comparator, value)
-
- def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
- if value is not None:
- value = attributes.instance_state(value)
- return self._get_strategy(strategies.LazyLoader).lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
- def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
-
- if load:
- for r in self._reverse_property:
- if (source_state, r) in _recursive:
- return
-
- if not "merge" in self._cascade:
- return
-
- if self.key not in source_dict:
- return
-
- if self.uselist:
- instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
- if hasattr(instances, '_sa_adapter'):
- # convert collections to adapters to get a true iterator
- instances = instances._sa_adapter
-
- if load:
- # for a full merge, pre-load the destination collection,
- # so that individual _merge of each item pulls from identity
- # map for those already present.
- # also assumes CollectionAttrbiuteImpl behavior of loading
- # "old" list in any case
- dest_state.get_impl(self.key).get(dest_state, dest_dict)
-
- dest_list = []
- for current in instances:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- if obj is not None:
- dest_list.append(obj)
-
- if not load:
- coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
- for c in dest_list:
- coll.append_without_event(c)
- else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
- else:
- current = source_dict[self.key]
- if current is not None:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- else:
- obj = None
-
- if not load:
- dest_dict[self.key] = obj
- else:
- dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
-
- def _value_as_iterable(self, state, dict_, key,
- passive=attributes.PASSIVE_OFF):
- """Return a list of tuples (state, obj) for the given
- key.
-
- returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
- """
-
- impl = state.manager[key].impl
- x = impl.get(state, dict_, passive=passive)
- if x is attributes.PASSIVE_NO_RESULT or x is None:
- return []
- elif hasattr(impl, 'get_collection'):
- return [
- (attributes.instance_state(o), o) for o in
- impl.get_collection(state, dict_, x, passive=passive)
- ]
- else:
- return [(attributes.instance_state(x), x)]
-
- def cascade_iterator(self, type_, state, dict_,
- visited_states, halt_on=None):
- #assert type_ in self._cascade
-
- # only actively lazy load on the 'delete' cascade
- if type_ != 'delete' or self.passive_deletes:
- passive = attributes.PASSIVE_NO_INITIALIZE
- else:
- passive = attributes.PASSIVE_OFF
-
- if type_ == 'save-update':
- tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
-
- else:
- tuples = self._value_as_iterable(state, dict_, self.key,
- passive=passive)
-
- skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
- not in self._cascade
-
- for instance_state, c in tuples:
- if instance_state in visited_states:
- continue
-
- if c is None:
- # would like to emit a warning here, but
- # would not be consistent with collection.append(None)
- # current behavior of silently skipping.
- # see [ticket:2229]
- continue
-
- instance_dict = attributes.instance_dict(c)
-
- if halt_on and halt_on(instance_state):
- continue
-
- if skip_pending and not instance_state.key:
- continue
-
- instance_mapper = instance_state.manager.mapper
-
- if not instance_mapper.isa(self.mapper.class_manager.mapper):
- raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
-
- visited_states.add(instance_state)
-
- yield c, instance_mapper, instance_state, instance_dict
-
- def _add_reverse_property(self, key):
- other = self.mapper.get_property(key, _configure_mappers=False)
- self._reverse_property.add(other)
- other._reverse_property.add(self)
-
- if not other.mapper.common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
- if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
-
- @util.memoized_property
- def mapper(self):
- """Return the targeted :class:`.Mapper` for this
- :class:`.RelationshipProperty`.
-
- This is a lazy-initializing static attribute.
-
- """
- if isinstance(self.argument, type):
- mapper_ = mapper.class_mapper(self.argument,
- configure=False)
- elif isinstance(self.argument, mapper.Mapper):
- mapper_ = self.argument
- elif util.callable(self.argument):
-
- # accept a callable to suit various deferred-
- # configurational schemes
-
- mapper_ = mapper.class_mapper(self.argument(),
- configure=False)
- else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(self.argument)))
- assert isinstance(mapper_, mapper.Mapper), mapper_
- return mapper_
-
- @util.memoized_property
- @util.deprecated("0.7", "Use .target")
- def table(self):
- """Return the selectable linked to this
- :class:`.RelationshipProperty` object's target
- :class:`.Mapper`."""
- return self.target
-
- def do_init(self):
- self._check_conflicts()
- self._process_dependent_arguments()
- self._setup_join_conditions()
- self._check_cascade_settings(self._cascade)
- self._post_init()
- self._generate_backref()
- super(RelationshipProperty, self).do_init()
-
- def _process_dependent_arguments(self):
- """Convert incoming configuration arguments to their
- proper form.
-
- Callables are resolved, ORM annotations removed.
-
- """
- # accept callables for other attributes which may require
- # deferred initialization. This technique is used
- # by declarative "string configs" and some recipes.
- for attr in (
- 'order_by', 'primaryjoin', 'secondaryjoin',
- 'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
- attr_value = getattr(self, attr)
- if util.callable(attr_value):
- setattr(self, attr, attr_value())
-
- # remove "annotations" which are present if mapped class
- # descriptors are used to create the join expression.
- for attr in 'primaryjoin', 'secondaryjoin':
- val = getattr(self, attr)
- if val is not None:
- setattr(self, attr, _orm_deannotate(
- expression._only_column_elements(val, attr))
- )
-
- # ensure expressions in self.order_by, foreign_keys,
- # remote_side are all columns, not strings.
- if self.order_by is not False and self.order_by is not None:
- self.order_by = [
- expression._only_column_elements(x, "order_by")
- for x in
- util.to_list(self.order_by)]
-
- self._user_defined_foreign_keys = \
- util.column_set(
- expression._only_column_elements(x, "foreign_keys")
- for x in util.to_column_set(
- self._user_defined_foreign_keys
- ))
-
- self.remote_side = \
- util.column_set(
- expression._only_column_elements(x, "remote_side")
- for x in
- util.to_column_set(self.remote_side))
-
- self.target = self.mapper.mapped_table
-
-
- def _setup_join_conditions(self):
- self._join_condition = jc = relationships.JoinCondition(
- parent_selectable=self.parent.mapped_table,
- child_selectable=self.mapper.mapped_table,
- parent_local_selectable=self.parent.local_table,
- child_local_selectable=self.mapper.local_table,
- primaryjoin=self.primaryjoin,
- secondary=self.secondary,
- secondaryjoin=self.secondaryjoin,
- parent_equivalents=self.parent._equivalent_columns,
- child_equivalents=self.mapper._equivalent_columns,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- local_remote_pairs=self.local_remote_pairs,
- remote_side=self.remote_side,
- self_referential=self._is_self_referential,
- prop=self,
- support_sync=not self.viewonly,
- can_be_synced_fn=self._columns_are_mapped
- )
- self.primaryjoin = jc.deannotated_primaryjoin
- self.secondaryjoin = jc.deannotated_secondaryjoin
- self.direction = jc.direction
- self.local_remote_pairs = jc.local_remote_pairs
- self.remote_side = jc.remote_columns
- self.local_columns = jc.local_columns
- self.synchronize_pairs = jc.synchronize_pairs
- self._calculated_foreign_keys = jc.foreign_key_columns
- self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
-
- def _check_conflicts(self):
- """Test that this relationship is legal, warn about
- inheritance conflicts."""
-
- if not self.is_primary() \
- and not mapper.class_mapper(
- self.parent.class_,
- configure=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
-
- # check for conflicting relationship() on superclass
- if not self.parent.concrete:
- for inheriting in self.parent.iterate_to_root():
- if inheriting is not self.parent \
- and inheriting.has_property(self.key):
- util.warn("Warning: relationship '%s' on mapper "
- "'%s' supersedes the same relationship "
- "on inherited mapper '%s'; this can "
- "cause dependency issues during flush"
- % (self.key, self.parent, inheriting))
-
- def _get_cascade(self):
- """Return the current cascade setting for this
- :class:`.RelationshipProperty`.
- """
- return self._cascade
-
- def _set_cascade(self, cascade):
- cascade = CascadeOptions(cascade)
- if 'mapper' in self.__dict__:
- self._check_cascade_settings(cascade)
- self._cascade = cascade
-
- if self._dependency_processor:
- self._dependency_processor.cascade = cascade
-
- cascade = property(_get_cascade, _set_cascade)
-
- def _check_cascade_settings(self, cascade):
- if cascade.delete_orphan and not self.single_parent \
- and (self.direction is MANYTOMANY or self.direction
- is MANYTOONE):
- raise sa_exc.ArgumentError(
- 'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
- if self.direction is MANYTOONE and self.passive_deletes:
- util.warn("On %s, 'passive_deletes' is normally configured "
- "on one-to-many, one-to-one, many-to-many "
- "relationships only."
- % self)
-
- if self.passive_deletes == 'all' and \
- ("delete" in cascade or
- "delete-orphan" in cascade):
- raise sa_exc.ArgumentError(
- "On %s, can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade" % self)
-
- if cascade.delete_orphan:
- self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
-
- def _columns_are_mapped(self, *cols):
- """Return True if all columns in the given collection are
- mapped by the tables referenced by this :class:`.Relationship`.
-
- """
- for c in cols:
- if self.secondary is not None \
- and self.secondary.c.contains_column(c):
- continue
- if not self.parent.mapped_table.c.contains_column(c) and \
- not self.target.c.contains_column(c):
- return False
- return True
-
- def _generate_backref(self):
- """Interpret the 'backref' instruction to create a
- :func:`.relationship` complementary to this one."""
-
- if not self.is_primary():
- return
- if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, str):
- backref_key, kwargs = self.backref, {}
- else:
- backref_key, kwargs = self.backref
- mapper = self.mapper.primary_mapper()
-
- check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
- for m in check:
- if m.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, m))
-
- # determine primaryjoin/secondaryjoin for the
- # backref. Use the one we had, so that
- # a custom join doesn't have to be specified in
- # both directions.
- if self.secondary is not None:
- # for many to many, just switch primaryjoin/
- # secondaryjoin. use the annotated
- # pj/sj on the _join_condition.
- pj = kwargs.pop('primaryjoin',
- self._join_condition.secondaryjoin_minus_local)
- sj = kwargs.pop('secondaryjoin',
- self._join_condition.primaryjoin_minus_local)
- else:
- pj = kwargs.pop('primaryjoin',
- self._join_condition.primaryjoin_reverse_remote)
- sj = kwargs.pop('secondaryjoin', None)
- if sj:
- raise sa_exc.InvalidRequestError(
- "Can't assign 'secondaryjoin' on a backref "
- "against a non-secondary relationship."
- )
-
- foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
- parent = self.parent.primary_mapper()
- kwargs.setdefault('viewonly', self.viewonly)
- kwargs.setdefault('post_update', self.post_update)
- kwargs.setdefault('passive_updates', self.passive_updates)
- self.back_populates = backref_key
- relationship = RelationshipProperty(
- parent, self.secondary,
- pj, sj,
- foreign_keys=foreign_keys,
- back_populates=self.key,
- **kwargs)
- mapper._configure_property(backref_key, relationship)
-
- if self.back_populates:
- self._add_reverse_property(self.back_populates)
-
- def _post_init(self):
- if self.uselist is None:
- self.uselist = self.direction is not MANYTOONE
- if not self.viewonly:
- self._dependency_processor = \
- dependency.DependencyProcessor.from_relationship(self)
-
- @util.memoized_property
- def _use_get(self):
- """memoize the 'use_get' attribute of this RelationshipLoader's
- lazyloader."""
-
- strategy = self._get_strategy(strategies.LazyLoader)
- return strategy.use_get
-
- @util.memoized_property
- def _is_self_referential(self):
- return self.mapper.common_parent(self.parent)
-
- def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
- if source_selectable is None:
- if source_polymorphic and self.parent.with_polymorphic:
- source_selectable = self.parent._with_polymorphic_selectable
-
- aliased = False
- if dest_selectable is None:
- if dest_polymorphic and self.mapper.with_polymorphic:
- dest_selectable = self.mapper._with_polymorphic_selectable
- aliased = True
- else:
- dest_selectable = self.mapper.mapped_table
-
- if self._is_self_referential and source_selectable is None:
- dest_selectable = dest_selectable.alias()
- aliased = True
- else:
- aliased = True
-
- dest_mapper = of_type or self.mapper
-
- single_crit = dest_mapper._single_table_criterion
- aliased = aliased or (source_selectable is not None)
-
- primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
- self._join_condition.join_targets(
- source_selectable, dest_selectable, aliased, single_crit
- )
- if source_selectable is None:
- source_selectable = self.parent.local_table
- if dest_selectable is None:
- dest_selectable = self.mapper.local_table
- return (primaryjoin, secondaryjoin, source_selectable,
- dest_selectable, secondary, target_adapter)
-
-
-PropertyLoader = RelationProperty = RelationshipProperty
-log.class_logger(RelationshipProperty)
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index f6fd07e61..6bd465e9c 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1,5 +1,5 @@
# orm/query.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,37 +24,28 @@ from . import (
attributes, interfaces, object_mapper, persistence,
exc as orm_exc, loading
)
+from .base import _entity_descriptor, _is_aliased_class, \
+ _is_mapped_class, _orm_columns, _generative
+from .path_registry import PathRegistry
from .util import (
- AliasedClass, ORMAdapter, _entity_descriptor, PathRegistry,
- _is_aliased_class, _is_mapped_class, _orm_columns,
- join as orm_join, with_parent, aliased
+ AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
)
-from .. import sql, util, log, exc as sa_exc, inspect, inspection, \
- types as sqltypes
+from .. import sql, util, log, exc as sa_exc, inspect, inspection
from ..sql.expression import _interpret_as_from
from ..sql import (
util as sql_util,
expression, visitors
)
+from ..sql.base import ColumnCollection
+from . import properties
__all__ = ['Query', 'QueryContext', 'aliased']
-def _generative(*assertions):
- """Mark a method as generative."""
-
- @util.decorator
- def generate(fn, *args, **kw):
- self = args[0]._clone()
- for assertion in assertions:
- assertion(self, fn.__name__)
- fn(self, *args[1:], **kw)
- return self
- return generate
-
_path_registry = PathRegistry.root
-
+@inspection._self_inspects
+@log.class_logger
class Query(object):
"""ORM-level SQL construction object.
@@ -77,7 +68,6 @@ class Query(object):
_with_labels = False
_criterion = None
_yield_per = None
- _lockmode = None
_order_by = False
_group_by = False
_having = None
@@ -85,6 +75,7 @@ class Query(object):
_prefixes = None
_offset = None
_limit = None
+ _for_update_arg = None
_statement = None
_correlate = frozenset()
_populate_existing = False
@@ -118,6 +109,7 @@ class Query(object):
if entity_wrapper is None:
entity_wrapper = _QueryEntity
self._entities = []
+ self._primary_entity = None
for ent in util.to_list(entities):
entity_wrapper(self, ent)
@@ -299,11 +291,8 @@ class Query(object):
@property
def _mapper_entities(self):
- # TODO: this is wrong, its hardcoded to "primary entity" when
- # for the case of __all_equivs() it should not be
- # the name of this accessor is wrong too
for ent in self._entities:
- if hasattr(ent, 'primary_entity'):
+ if isinstance(ent, _MapperEntity):
yield ent
def _joinpoint_zero(self):
@@ -313,9 +302,10 @@ class Query(object):
)
def _mapper_zero_or_none(self):
- if not getattr(self._entities[0], 'primary_entity', False):
+ if self._primary_entity:
+ return self._primary_entity.mapper
+ else:
return None
- return self._entities[0].mapper
def _only_mapper_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -327,16 +317,11 @@ class Query(object):
return self._mapper_zero()
def _only_full_mapper_zero(self, methname):
- if len(self._entities) != 1:
+ if self._entities != [self._primary_entity]:
raise sa_exc.InvalidRequestError(
"%s() can only be used against "
"a single mapped class." % methname)
- entity = self._entity_zero()
- if not hasattr(entity, 'primary_entity'):
- raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
- return entity.entity_zero
+ return self._primary_entity.entity_zero
def _only_entity_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -555,7 +540,7 @@ class Query(object):
:class:`.Query`, converted
to a scalar subquery with a label of the given name.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.label`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.label`.
.. versionadded:: 0.6.5
@@ -567,7 +552,7 @@ class Query(object):
"""Return the full SELECT statement represented by this
:class:`.Query`, converted to a scalar subquery.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.as_scalar`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`.
.. versionadded:: 0.6.5
@@ -698,7 +683,7 @@ class Query(object):
"""
- if not getattr(self._entities[0], 'primary_entity', False):
+ if not self._primary_entity:
raise sa_exc.InvalidRequestError(
"No primary mapper set up for this Query.")
entity = self._entities[0]._clone()
@@ -811,7 +796,7 @@ class Query(object):
if not self._populate_existing and \
not mapper.always_refresh and \
- self._lockmode is None:
+ self._for_update_arg is None:
instance = loading.get_from_identity(
self.session, key, attributes.PASSIVE_OFF)
@@ -903,11 +888,10 @@ class Query(object):
"""
if property is None:
- from sqlalchemy.orm import properties
mapper = object_mapper(instance)
for prop in mapper.iterate_properties:
- if isinstance(prop, properties.PropertyLoader) and \
+ if isinstance(prop, properties.RelationshipProperty) and \
prop.mapper is self._mapper_zero():
property = prop
break
@@ -936,7 +920,7 @@ class Query(object):
@_generative()
def with_session(self, session):
- """Return a :class:`Query` that will use the given :class:`.Session`.
+ """Return a :class:`.Query` that will use the given :class:`.Session`.
"""
@@ -1140,32 +1124,63 @@ class Query(object):
@_generative()
def with_lockmode(self, mode):
- """Return a new Query object with the specified locking mode.
+ """Return a new :class:`.Query` object with the specified "locking mode",
+ which essentially refers to the ``FOR UPDATE`` clause.
- :param mode: a string representing the desired locking mode. A
- corresponding value is passed to the ``for_update`` parameter of
- :meth:`~sqlalchemy.sql.expression.select` when the query is
- executed. Valid values are:
+ .. deprecated:: 0.9.0 superseded by :meth:`.Query.with_for_update`.
- ``'update'`` - passes ``for_update=True``, which translates to
- ``FOR UPDATE`` (standard SQL, supported by most dialects)
+ :param mode: a string representing the desired locking mode.
+ Valid values are:
- ``'update_nowait'`` - passes ``for_update='nowait'``, which
- translates to ``FOR UPDATE NOWAIT`` (supported by Oracle,
- PostgreSQL 8.1 upwards)
+ * ``None`` - translates to no lockmode
- ``'read'`` - passes ``for_update='read'``, which translates to
- ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for
- PostgreSQL)
+ * ``'update'`` - translates to ``FOR UPDATE``
+ (standard SQL, supported by most dialects)
- ``'read_nowait'`` - passes ``for_update='read_nowait'``, which
- translates to ``FOR SHARE NOWAIT`` (supported by PostgreSQL).
+ * ``'update_nowait'`` - translates to ``FOR UPDATE NOWAIT``
+ (supported by Oracle, PostgreSQL 8.1 upwards)
+
+ * ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
+ and ``FOR SHARE`` (for PostgreSQL)
+
+ .. seealso::
+
+ :meth:`.Query.with_for_update` - improved API for
+ specifying the ``FOR UPDATE`` clause.
- .. versionadded:: 0.7.7
- ``FOR SHARE`` and ``FOR SHARE NOWAIT`` (PostgreSQL).
"""
+ self._for_update_arg = LockmodeArg.parse_legacy_query(mode)
+
+ @_generative()
+ def with_for_update(self, read=False, nowait=False, of=None):
+ """return a new :class:`.Query` with the specified options for the
+ ``FOR UPDATE`` clause.
+
+ The behavior of this method is identical to that of
+ :meth:`.SelectBase.with_for_update`. When called with no arguments,
+ the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause
+ appended. When additional arguments are specified, backend-specific
+ options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE``
+ can take effect.
+
+ E.g.::
+
+ q = sess.query(User).with_for_update(nowait=True, of=User)
+
+ The above query on a Postgresql backend will render like::
- self._lockmode = mode
+ SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
+
+ .. versionadded:: 0.9.0 :meth:`.Query.with_for_update` supersedes
+ the :meth:`.Query.with_lockmode` method.
+
+ .. seealso::
+
+ :meth:`.GenerativeSelect.with_for_update` - Core level method with
+ full argument and behavioral description.
+
+ """
+ self._for_update_arg = LockmodeArg(read=read, nowait=nowait, of=of)
@_generative()
def params(self, *args, **kwargs):
@@ -1300,7 +1315,7 @@ class Query(object):
"""apply a HAVING criterion to the query and return the
newly resulting :class:`.Query`.
- :meth:`having` is used in conjunction with :meth:`group_by`.
+ :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`.
HAVING criterion makes it possible to use filters on aggregate
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
@@ -1478,7 +1493,7 @@ class Query(object):
q = session.query(User).join(Address)
- The above calling form of :meth:`.join` will raise an error if
+ The above calling form of :meth:`~.Query.join` will raise an error if
either there are no foreign keys between the two entities, or if
there are multiple foreign key linkages between them. In the
above calling form, :meth:`~.Query.join` is called upon to
@@ -1640,14 +1655,14 @@ class Query(object):
example :ref:`examples_xmlpersistence` which illustrates
an XPath-like query system using algorithmic joins.
- :param *props: A collection of one or more join conditions,
+ :param \*props: A collection of one or more join conditions,
each consisting of a relationship-bound attribute or string
relationship name representing an "on clause", or a single
target entity, or a tuple in the form of ``(target, onclause)``.
A special two-argument calling form of the form ``target, onclause``
is also accepted.
:param aliased=False: If True, indicate that the JOIN target should be
- anonymously aliased. Subsequent calls to :class:`~.Query.filter`
+ anonymously aliased. Subsequent calls to :meth:`~.Query.filter`
and similar will adapt the incoming criterion to the target
alias, until :meth:`~.Query.reset_joinpoint` is called.
:param from_joinpoint=False: When using ``aliased=True``, a setting
@@ -1827,14 +1842,30 @@ class Query(object):
raise sa_exc.InvalidRequestError(
"Can't construct a join from %s to %s, they "
"are the same entity" %
- (left, right))
+ (left, right))
l_info = inspect(left)
r_info = inspect(right)
- overlap = not create_aliases and \
- sql_util.selectables_overlap(l_info.selectable,
- r_info.selectable)
+
+ overlap = False
+ if not create_aliases:
+ right_mapper = getattr(r_info, "mapper", None)
+ # if the target is a joined inheritance mapping,
+ # be more liberal about auto-aliasing.
+ if right_mapper and (
+ right_mapper.with_polymorphic or
+ isinstance(right_mapper.mapped_table, expression.Join)
+ ):
+ for from_obj in self._from_obj or [l_info.selectable]:
+ if sql_util.selectables_overlap(l_info.selectable, from_obj) and \
+ sql_util.selectables_overlap(from_obj, r_info.selectable):
+ overlap = True
+ break
+ elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable):
+ overlap = True
+
+
if overlap and l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
"Can't join table/selectable '%s' to itself" %
@@ -2219,7 +2250,7 @@ class Query(object):
``Query``.
:param \*prefixes: optional prefixes, typically strings,
- not using any commas. In particular is useful for MySQL keywords.
+ not using any commas. In particular is useful for MySQL keywords.
e.g.::
@@ -2414,10 +2445,10 @@ class Query(object):
"""
return [
{
- 'name':ent._label_name,
- 'type':ent.type,
- 'aliased':getattr(ent, 'is_aliased_class', False),
- 'expr':ent.expr
+ 'name': ent._label_name,
+ 'type': ent.type,
+ 'aliased': getattr(ent, 'is_aliased_class', False),
+ 'expr': ent.expr
}
for ent in self._entities
]
@@ -2500,7 +2531,7 @@ class Query(object):
.. versionadded:: 0.8.1
"""
- return sql.exists(self.with_entities('1').statement)
+ return sql.exists(self.with_labels().statement.with_only_columns(['1']))
def count(self):
"""Return a count of rows this Query would return.
@@ -2571,19 +2602,37 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows deleted, excluding any cascades.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE is configured for any foreign key
- references which require it. The Session needs to be expired (occurs
- automatically after commit(), or call expire_all()) in order for the
- state of dependent objects subject to delete or delete-orphan cascade
- to be correctly represented.
+ This method has several key caveats:
- Note that the :meth:`.MapperEvents.before_delete` and
- :meth:`.MapperEvents.after_delete`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_delete`.
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the DELETE, dependent objects in the :class:`.Session` which
+ were impacted by an ON DELETE may not contain the current
+ state, or may have been deleted. This issue is resolved once the
+ :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`. Accessing an expired object
+ whose row has been deleted will invoke a SELECT to locate the
+ row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError`
+ is raised.
+
+ * The :meth:`.MapperEvents.before_delete` and
+ :meth:`.MapperEvents.after_delete`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_delete` method is provided to act
+ upon a mass DELETE of entity rows.
+
+ .. seealso::
+
+ :meth:`.Query.update`
+
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
#TODO: cascades need handling.
@@ -2622,20 +2671,50 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows matched by the update.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
+
+ This method has several key caveats:
+
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON UPDATE CASCADE is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the UPDATE, dependent objects in the :class:`.Session` which
+ were impacted by an ON UPDATE CASCADE may not contain the current
+ state; this issue is resolved once the :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`.
+
+ * As of 0.8, this method will support multiple table updates, as detailed
+ in :ref:`multi_table_updates`, and this behavior does extend to support
+ updates of joined-inheritance and other multiple table mappings. However,
+ the **join condition of an inheritance mapper is currently not
+ automatically rendered**.
+ Care must be taken in any multiple-table update to explicitly include
+ the joining condition between those tables, even in mappings where
+ this is normally automatic.
+ E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the
+ ``Engineer`` local table using criteria against the ``Employee``
+ local table might look like::
+
+ session.query(Engineer).\\
+ filter(Engineer.id == Employee.id).\\
+ filter(Employee.name == 'dilbert').\\
+ update({"engineer_type": "programmer"})
+
+ * The :meth:`.MapperEvents.before_update` and
+ :meth:`.MapperEvents.after_update`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_update` method is provided to act
+ upon a mass UPDATE of entity rows.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it.
+ .. seealso::
- The Session needs to be expired (occurs automatically after commit(),
- or call expire_all()) in order for the state of dependent objects
- subject foreign key cascade to be correctly represented.
+ :meth:`.Query.delete`
- Note that the :meth:`.MapperEvents.before_update` and
- :meth:`.MapperEvents.after_update`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_update`.
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
@@ -2650,13 +2729,6 @@ class Query(object):
update_op.exec_()
return update_op.rowcount
- _lockmode_lookup = {
- 'read': 'read',
- 'read_nowait': 'read_nowait',
- 'update': True,
- 'update_nowait': 'nowait',
- None: False
- }
def _compile_context(self, labels=True):
context = QueryContext(self)
@@ -2666,12 +2738,8 @@ class Query(object):
context.labels = labels
- if self._lockmode:
- try:
- context.for_update = self._lockmode_lookup[self._lockmode]
- except KeyError:
- raise sa_exc.ArgumentError(
- "Unknown lockmode %r" % self._lockmode)
+ context._for_update_arg = self._for_update_arg
+
for entity in self._entities:
entity.setup_context(self, context)
@@ -2755,9 +2823,10 @@ class Query(object):
statement = sql.select(
[inner] + context.secondary_columns,
- for_update=context.for_update,
use_labels=context.labels)
+ statement._for_update_arg = context._for_update_arg
+
from_clause = inner
for eager_join in context.eager_joins.values():
# EagerLoader places a 'stop_on' attribute on the join,
@@ -2800,11 +2869,12 @@ class Query(object):
context.whereclause,
from_obj=context.froms,
use_labels=context.labels,
- for_update=context.for_update,
order_by=context.order_by,
**self._select_args
)
+ statement._for_update_arg = context._for_update_arg
+
for hint in self._with_hints:
statement = statement.with_hint(*hint)
@@ -2832,14 +2902,34 @@ class Query(object):
if adapter:
single_crit = adapter.traverse(single_crit)
single_crit = self._adapt_clause(single_crit, False, False)
- context.whereclause = sql.and_(context.whereclause,
- single_crit)
+ context.whereclause = sql.and_(
+ sql.True_._ifnone(context.whereclause),
+ single_crit)
def __str__(self):
return str(self._compile_context().statement)
-inspection._self_inspects(Query)
+from ..sql.selectable import ForUpdateArg
+class LockmodeArg(ForUpdateArg):
+ @classmethod
+ def parse_legacy_query(self, mode):
+ if mode in (None, False):
+ return None
+
+ if mode == "read":
+ read = True
+ nowait = False
+ elif mode == "update":
+ read = nowait = False
+ elif mode == "update_nowait":
+ nowait = True
+ read = False
+ else:
+ raise sa_exc.ArgumentError(
+ "Unknown with_lockmode argument: %r" % mode)
+
+ return LockmodeArg(read=read, nowait=nowait)
class _QueryEntity(object):
"""represent an entity column returned within a Query result."""
@@ -2850,6 +2940,8 @@ class _QueryEntity(object):
if not isinstance(entity, util.string_types) and \
_is_mapped_class(entity):
cls = _MapperEntity
+ elif isinstance(entity, Bundle):
+ cls = _BundleEntity
else:
cls = _ColumnEntity
return object.__new__(cls)
@@ -2864,12 +2956,15 @@ class _MapperEntity(_QueryEntity):
"""mapper/class/AliasedClass entity"""
def __init__(self, query, entity):
- self.primary_entity = not query._entities
+ if not query._primary_entity:
+ query._primary_entity = self
query._entities.append(self)
self.entities = [entity]
self.expr = entity
+ supports_single_entity = True
+
def setup_entity(self, ext_info, aliased_adapter):
self.mapper = ext_info.mapper
self.aliased_adapter = aliased_adapter
@@ -2884,6 +2979,7 @@ class _MapperEntity(_QueryEntity):
else:
self._label_name = self.mapper.class_.__name__
self.path = self.entity_zero._path_registry
+ self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
selectable, polymorphic_on):
@@ -2939,10 +3035,8 @@ class _MapperEntity(_QueryEntity):
return entity.common_parent(self.entity_zero)
- #_adapted_selectable = None
def adapt_to_selectable(self, query, sel):
query._entities.append(self)
- # self._adapted_selectable = sel
def _get_entity_clauses(self, query, context):
@@ -2980,7 +3074,7 @@ class _MapperEntity(_QueryEntity):
self.selectable,
self.mapper._equivalent_columns)
- if self.primary_entity:
+ if query._primary_entity is self:
_instance = loading.instance_processor(
self.mapper,
context,
@@ -3050,6 +3144,187 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
+@inspection._self_inspects
+class Bundle(object):
+ """A grouping of SQL expressions that are returned by a :class:`.Query`
+ under one namespace.
+
+ The :class:`.Bundle` essentially allows nesting of the tuple-based
+ results returned by a column-oriented :class:`.Query` object. It also
+ is extensible via simple subclassing, where the primary capability
+ to override is that of how the set of expressions should be returned,
+ allowing post-processing as well as custom return types, without
+ involving ORM identity-mapped classes.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`bundles`
+
+ """
+
+ single_entity = False
+ """If True, queries for a single Bundle will be returned as a single
+ entity, rather than an element within a keyed tuple."""
+
+ def __init__(self, name, *exprs, **kw):
+ """Construct a new :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+ print(row.mybundle.x, row.mybundle.y)
+
+ :param name: name of the bundle.
+ :param \*exprs: columns or SQL expressions comprising the bundle.
+ :param single_entity=False: if True, rows for this :class:`.Bundle`
+ can be returned as a "single entity" outside of any enclosing tuple
+ in the same manner as a mapped entity.
+
+ """
+ self.name = self._label = name
+ self.exprs = exprs
+ self.c = self.columns = ColumnCollection()
+ self.columns.update((getattr(col, "key", col._label), col)
+ for col in exprs)
+ self.single_entity = kw.pop('single_entity', self.single_entity)
+
+ columns = None
+ """A namespace of SQL expressions referred to by this :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ q = sess.query(bn).filter(bn.c.x == 5)
+
+ Nesting of bundles is also supported::
+
+ b1 = Bundle("b1",
+ Bundle('b2', MyClass.a, MyClass.b),
+ Bundle('b3', MyClass.x, MyClass.y)
+ )
+
+ q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+
+ .. seealso::
+
+ :attr:`.Bundle.c`
+
+ """
+
+ c = None
+ """An alias for :attr:`.Bundle.columns`."""
+
+ def _clone(self):
+ cloned = self.__class__.__new__(self.__class__)
+ cloned.__dict__.update(self.__dict__)
+ return cloned
+
+ def __clause_element__(self):
+ return expression.ClauseList(group=False, *self.c)
+
+ @property
+ def clauses(self):
+ return self.__clause_element__().clauses
+
+ def label(self, name):
+ """Provide a copy of this :class:`.Bundle` passing a new label."""
+
+ cloned = self._clone()
+ cloned.name = name
+ return cloned
+
+ def create_row_processor(self, query, procs, labels):
+ """Produce the "row processing" function for this :class:`.Bundle`.
+
+ May be overridden by subclasses.
+
+ .. seealso::
+
+ :ref:`bundles` - includes an example of subclassing.
+
+ """
+ def proc(row, result):
+ return util.KeyedTuple([proc(row, None) for proc in procs], labels)
+ return proc
+
+
+class _BundleEntity(_QueryEntity):
+ def __init__(self, query, bundle, setup_entities=True):
+ query._entities.append(self)
+ self.bundle = self.expr = bundle
+ self.type = type(bundle)
+ self._label_name = bundle.name
+ self._entities = []
+
+ if setup_entities:
+ for expr in bundle.exprs:
+ if isinstance(expr, Bundle):
+ _BundleEntity(self, expr)
+ else:
+ _ColumnEntity(self, expr, namespace=self)
+
+ self.entities = ()
+
+ self.filter_fn = lambda item: item
+
+ self.supports_single_entity = self.bundle.single_entity
+
+ custom_rows = False
+
+ @property
+ def entity_zero(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def corresponds_to(self, entity):
+ # TODO: this seems to have no effect for
+ # _ColumnEntity either
+ return False
+
+ @property
+ def entity_zero_or_selectable(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero_or_selectable
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def adapt_to_selectable(self, query, sel):
+ c = _BundleEntity(query, self.bundle, setup_entities=False)
+ #c._label_name = self._label_name
+ #c.entity_zero = self.entity_zero
+ #c.entities = self.entities
+
+ for ent in self._entities:
+ ent.adapt_to_selectable(c, sel)
+
+ def setup_entity(self, ext_info, aliased_adapter):
+ for ent in self._entities:
+ ent.setup_entity(ext_info, aliased_adapter)
+
+ def setup_context(self, query, context):
+ for ent in self._entities:
+ ent.setup_context(query, context)
+
+ def row_processor(self, query, context, custom_rows):
+ procs, labels = zip(
+ *[ent.row_processor(query, context, custom_rows)
+ for ent in self._entities]
+ )
+
+ proc = self.bundle.create_row_processor(query, procs, labels)
+
+ return proc, self._label_name
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
@@ -3066,7 +3341,7 @@ class _ColumnEntity(_QueryEntity):
interfaces.PropComparator
)):
self._label_name = column.key
- column = column.__clause_element__()
+ column = column._query_clause_element()
else:
self._label_name = getattr(column, 'key', None)
@@ -3079,6 +3354,9 @@ class _ColumnEntity(_QueryEntity):
if c is not column:
return
+ elif isinstance(column, Bundle):
+ _BundleEntity(query, column)
+ return
if not isinstance(column, sql.ColumnElement):
raise sa_exc.InvalidRequestError(
@@ -3086,7 +3364,7 @@ class _ColumnEntity(_QueryEntity):
"expected - got '%r'" % (column, )
)
- type_ = column.type
+ self.type = type_ = column.type
if type_.hashable:
self.filter_fn = lambda item: item
else:
@@ -3129,6 +3407,9 @@ class _ColumnEntity(_QueryEntity):
else:
self.entity_zero = None
+ supports_single_entity = False
+ custom_rows = False
+
@property
def entity_zero_or_selectable(self):
if self.entity_zero is not None:
@@ -3138,10 +3419,6 @@ class _ColumnEntity(_QueryEntity):
else:
return None
- @property
- def type(self):
- return self.column.type
-
def adapt_to_selectable(self, query, sel):
c = _ColumnEntity(query, sel.corresponding_column(self.column))
c._label_name = self._label_name
@@ -3154,6 +3431,8 @@ class _ColumnEntity(_QueryEntity):
self.froms.add(ext_info.selectable)
def corresponds_to(self, entity):
+ # TODO: just returning False here,
+ # no tests fail
if self.entity_zero is None:
return False
elif _is_aliased_class(entity):
@@ -3188,14 +3467,11 @@ class _ColumnEntity(_QueryEntity):
return str(self.column)
-log.class_logger(Query)
-
-
class QueryContext(object):
multi_row_eager_loaders = False
adapter = None
froms = ()
- for_update = False
+ for_update = None
def __init__(self, query):
@@ -3230,6 +3506,38 @@ class QueryContext(object):
class AliasOption(interfaces.MapperOption):
def __init__(self, alias):
+ """Return a :class:`.MapperOption` that will indicate to the :class:`.Query`
+ that the main table has been aliased.
+
+ This is a seldom-used option to suit the
+ very rare case that :func:`.contains_eager`
+ is being used in conjunction with a user-defined SELECT
+ statement that aliases the parent table. E.g.::
+
+ # define an aliased UNION called 'ulist'
+ ulist = users.select(users.c.user_id==7).\\
+ union(users.select(users.c.user_id>7)).\\
+ alias('ulist')
+
+ # add on an eager load of "addresses"
+ statement = ulist.outerjoin(addresses).\\
+ select().apply_labels()
+
+ # create query, indicating "ulist" will be an
+ # alias for the main table, "addresses"
+ # property should be eager loaded
+ query = session.query(User).options(
+ contains_alias(ulist),
+ contains_eager(User.addresses))
+
+ # then get results via the statement
+ results = query.from_statement(statement).all()
+
+ :param alias: is the string name of an alias, or a
+ :class:`~.sql.expression.Alias` object representing
+ the alias.
+
+ """
self.alias = alias
def process_query(self, query):
@@ -3238,3 +3546,5 @@ class AliasOption(interfaces.MapperOption):
else:
alias = self.alias
query._from_obj_alias = sql_util.ColumnAdapter(alias)
+
+
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 33377d3ec..6fdedd382 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -1,5 +1,5 @@
# orm/relationships.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,15 +13,20 @@ and `secondaryjoin` aspects of :func:`.relationship`.
"""
-from .. import sql, util, exc as sa_exc, schema
+from .. import sql, util, exc as sa_exc, schema, log
+
+from .util import CascadeOptions, _orm_annotate, _orm_deannotate
+from . import dependency
+from . import attributes
from ..sql.util import (
ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
- _deep_deannotate, find_tables, selectables_overlap
+ _deep_deannotate, selectables_overlap
)
from ..sql import operators, expression, visitors
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
-
+from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator
+from ..inspection import inspect
+from . import mapper as mapperlib
def remote(expr):
"""Annotate a portion of a primaryjoin expression
@@ -64,6 +69,1607 @@ def foreign(expr):
{"foreign": True})
+@log.class_logger
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
+class RelationshipProperty(StrategizedProperty):
+ """Describes an object property that holds a single item or list
+ of items that correspond to a related database table.
+
+ Public constructor is the :func:`.orm.relationship` function.
+
+ See also:
+
+ :ref:`relationship_config_toplevel`
+
+ """
+
+ strategy_wildcard_key = 'relationship'
+
+ _dependency_processor = None
+
+ def __init__(self, argument,
+ secondary=None, primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ uselist=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ post_update=False,
+ cascade=False, extension=None,
+ viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None,
+ single_parent=False, innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=False,
+ cascade_backrefs=True,
+ load_on_pending=False,
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None,
+ info=None):
+ """Provide a relationship of a primary Mapper to a secondary Mapper.
+
+ This corresponds to a parent-child or associative table relationship. The
+ constructed class is an instance of :class:`.RelationshipProperty`.
+
+ A typical :func:`.relationship`, used in a classical mapping::
+
+ mapper(Parent, properties={
+ 'children': relationship(Child)
+ })
+
+ Some arguments accepted by :func:`.relationship` optionally accept a
+ callable function, which when called produces the desired value.
+ The callable is invoked by the parent :class:`.Mapper` at "mapper
+ initialization" time, which happens only when mappers are first used, and
+ is assumed to be after all mappings have been constructed. This can be
+ used to resolve order-of-declaration and other dependency issues, such as
+ if ``Child`` is declared below ``Parent`` in the same file::
+
+ mapper(Parent, properties={
+ "children":relationship(lambda: Child,
+ order_by=lambda: Child.id)
+ })
+
+ When using the :ref:`declarative_toplevel` extension, the Declarative
+ initializer allows string arguments to be passed to :func:`.relationship`.
+ These string arguments are converted into callables that evaluate
+ the string as Python code, using the Declarative
+ class-registry as a namespace. This allows the lookup of related
+ classes to be automatic via their string name, and removes the need to
+ import related classes at all into the local module space::
+
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", order_by="Child.id")
+
+ A full array of examples and reference documentation regarding
+ :func:`.relationship` is at :ref:`relationship_config_toplevel`.
+
+ :param argument:
+ a mapped class, or actual :class:`.Mapper` instance, representing the
+ target of the relationship.
+
+ ``argument`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param secondary:
+ for a many-to-many relationship, specifies the intermediary
+ table, and is an instance of :class:`.Table`. The ``secondary`` keyword
+ argument should generally only
+ be used for a table that is not otherwise expressed in any class
+ mapping, unless this relationship is declared as view only, otherwise
+ conflicting persistence operations can occur.
+
+ ``secondary`` may
+ also be passed as a callable function which is evaluated at
+ mapper initialization time.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ many-to-one reference should be loaded when replaced, if
+ not already loaded. Normally, history tracking logic for
+ simple many-to-ones only needs to be aware of the "new"
+ value in order to perform a flush. This flag is available
+ for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute.
+
+ :param backref:
+ indicates the string name of a property to be placed on the related
+ mapper's class that will handle this relationship in the other
+ direction. The other property will be created automatically
+ when the mappers are configured. Can also be passed as a
+ :func:`backref` object to control the configuration of the
+ new relationship.
+
+ :param back_populates:
+ Takes a string name and has the same meaning as ``backref``,
+ except the complementing property is **not** created automatically,
+ and instead must be configured explicitly on the other mapper. The
+ complementing property should also indicate ``back_populates``
+ to this relationship to ensure proper functioning.
+
+ :param cascade:
+ a comma-separated list of cascade rules which determines how
+ Session operations should be "cascaded" from parent to child.
+ This defaults to ``False``, which means the default cascade
+ should be used. The default value is ``"save-update, merge"``.
+
+ Available cascades are:
+
+ * ``save-update`` - cascade the :meth:`.Session.add`
+ operation. This cascade applies both to future and
+ past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
+ meaning new items added to a collection or scalar relationship
+ get placed into the same session as that of the parent, and
+ also applies to items which have been removed from this
+ relationship but are still part of unflushed history.
+
+ * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
+ operation
+
+ * ``expunge`` - cascade the :meth:`.Session.expunge`
+ operation
+
+ * ``delete`` - cascade the :meth:`.Session.delete`
+ operation
+
+ * ``delete-orphan`` - if an item of the child's type is
+ detached from its parent, mark it for deletion.
+
+ .. versionchanged:: 0.7
+ This option does not prevent
+ a new instance of the child object from being persisted
+ without a parent to start with; to constrain against
+ that case, ensure the child's foreign key column(s)
+ is configured as NOT NULL
+
+ * ``refresh-expire`` - cascade the :meth:`.Session.expire`
+ and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
+
+ * ``all`` - shorthand for "save-update,merge, refresh-expire,
+ expunge, delete"
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param cascade_backrefs=True:
+ a boolean value indicating if the ``save-update`` cascade should
+ operate along an assignment event intercepted by a backref.
+ When set to ``False``,
+ the attribute managed by this relationship will not cascade
+ an incoming transient object into the session of a
+ persistent parent, if the event is received via backref.
+
+ That is::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B, backref="a", cascade_backrefs=False)
+ })
+
+ If an ``A()`` is present in the session, assigning it to
+ the "a" attribute on a transient ``B()`` will not place
+ the ``B()`` into the session. To set the flag in the other
+ direction, i.e. so that ``A().bs.append(B())`` won't add
+ a transient ``A()`` into the session for a persistent ``B()``::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B,
+ backref=backref("a", cascade_backrefs=False)
+ )
+ })
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param collection_class:
+ a class or callable that returns a new list-holding object. will
+ be used in place of a plain list for storing elements.
+ Behavior of this attribute is described in detail at
+ :ref:`custom_collections`.
+
+ :param comparator_factory:
+ a class which extends :class:`.RelationshipProperty.Comparator` which
+ provides custom SQL clause generation for comparison operations.
+
+ :param distinct_target_key=None:
+ Indicate if a "subquery" eager load should apply the DISTINCT
+ keyword to the innermost SELECT statement. When left as ``None``,
+ the DISTINCT keyword will be applied in those cases when the target
+ columns do not comprise the full primary key of the target table.
+ When set to ``True``, the DISTINCT keyword is applied to the innermost
+ SELECT unconditionally.
+
+ It may be desirable to set this flag to False when the DISTINCT is
+ reducing performance of the innermost subquery beyond that of what
+ duplicate innermost rows may be causing.
+
+ .. versionadded:: 0.8.3 - distinct_target_key allows the
+ subquery eager loader to apply a DISTINCT modifier to the
+ innermost SELECT.
+
+ .. versionchanged:: 0.9.0 - distinct_target_key now defaults to
+ ``None``, so that the feature enables itself automatically for
+ those cases where the innermost query targets a non-unique
+ key.
+
+ :param doc:
+ docstring which will be applied to the resulting descriptor.
+
+ :param extension:
+ an :class:`.AttributeExtension` instance, or list of extensions,
+ which will be prepended to the list of attribute listeners for
+ the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ :param foreign_keys:
+ a list of columns which are to be used as "foreign key" columns,
+ or columns which refer to the value in a remote column, within the
+ context of this :func:`.relationship` object's ``primaryjoin``
+ condition. That is, if the ``primaryjoin`` condition of this
+ :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
+ are required to be present in ``a.id``, then the "foreign key" column
+ of this :func:`.relationship` is ``b.a_id``.
+
+ In normal cases, the ``foreign_keys`` parameter is **not required.**
+ :func:`.relationship` will **automatically** determine which columns
+ in the ``primaryjoin`` conditition are to be considered "foreign key"
+ columns based on those :class:`.Column` objects that specify
+ :class:`.ForeignKey`, or are otherwise listed as referencing columns
+ in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
+ needed when:
+
+ 1. There is more than one way to construct a join from the local
+ table to the remote table, as there are multiple foreign key
+ references present. Setting ``foreign_keys`` will limit the
+ :func:`.relationship` to consider just those columns specified
+ here as "foreign".
+
+ .. versionchanged:: 0.8
+ A multiple-foreign key join ambiguity can be resolved by
+ setting the ``foreign_keys`` parameter alone, without the
+ need to explicitly set ``primaryjoin`` as well.
+
+ 2. The :class:`.Table` being mapped does not actually have
+ :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
+ constructs present, often because the table
+ was reflected from a database that does not support foreign key
+ reflection (MySQL MyISAM).
+
+ 3. The ``primaryjoin`` argument is used to construct a non-standard
+ join condition, which makes use of columns or expressions that do
+ not normally refer to their "parent" column, such as a join condition
+ expressed by a complex comparison using a SQL function.
+
+ The :func:`.relationship` construct will raise informative error messages
+ that suggest the use of the ``foreign_keys`` parameter when presented
+ with an ambiguous condition. In typical cases, if :func:`.relationship`
+ doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
+ not needed.
+
+ ``foreign_keys`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. seealso::
+
+ :ref:`relationship_foreign_keys`
+
+ :ref:`relationship_custom_foreign`
+
+ :func:`.foreign` - allows direct annotation of the "foreign" columns
+ within a ``primaryjoin`` condition.
+
+ .. versionadded:: 0.8
+ The :func:`.foreign` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "foreign".
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param innerjoin=False:
+ when ``True``, joined eager loads will use an inner join to join
+ against related tables instead of an outer join. The purpose
+ of this option is generally one of performance, as inner joins
+ generally perform better than outer joins. Another reason can be
+ the use of ``with_lockmode``, which does not support outer joins.
+
+ This flag can be set to ``True`` when the relationship references an
+ object via many-to-one using local foreign keys that are not nullable,
+ or when the reference is one-to-one or a collection that is guaranteed
+ to have one or at least one entry.
+
+ :param join_depth:
+ when non-``None``, an integer value indicating how many levels
+ deep "eager" loaders should join on a self-referring or cyclical
+ relationship. The number counts how many times the same Mapper
+ shall be present in the loading condition along a particular join
+ branch. When left at its default of ``None``, eager loaders
+ will stop chaining when they encounter a the same target mapper
+ which is already higher up in the chain. This option applies
+ both to joined- and subquery- eager loaders.
+
+ :param lazy='select': specifies
+ how the related items should be loaded. Default value is
+ ``select``. Values include:
+
+ * ``select`` - items should be loaded lazily when the property is first
+ accessed, using a separate SELECT statement, or identity map
+ fetch for simple many-to-one references.
+
+ * ``immediate`` - items should be loaded as the parents are loaded,
+ using a separate SELECT statement, or identity map fetch for
+ simple many-to-one references.
+
+ .. versionadded:: 0.6.5
+
+ * ``joined`` - items should be loaded "eagerly" in the same query as
+ that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
+ the join is "outer" or not is determined by the ``innerjoin``
+ parameter.
+
+ * ``subquery`` - items should be loaded "eagerly" as the parents are
+ loaded, using one additional SQL statement, which issues a JOIN to a
+ subquery of the original statement, for each collection requested.
+
+ * ``noload`` - no loading should occur at any time. This is to
+ support "write-only" attributes, or attributes which are
+ populated in some manner specific to the application.
+
+ * ``dynamic`` - the attribute will return a pre-configured
+ :class:`~sqlalchemy.orm.query.Query` object for all read
+ operations, onto which further filtering operations can be
+ applied before iterating the results. See
+ the section :ref:`dynamic_relationship` for more details.
+
+ * True - a synonym for 'select'
+
+ * False - a synonym for 'joined'
+
+ * None - a synonym for 'noload'
+
+ Detailed discussion of loader strategies is at :doc:`/orm/loading`.
+
+ :param load_on_pending=False:
+ Indicates loading behavior for transient or pending parent objects.
+
+ When set to ``True``, causes the lazy-loader to
+ issue a query for a parent object that is not persistent, meaning it has
+ never been flushed. This may take effect for a pending object when
+ autoflush is disabled, or for a transient object that has been
+ "attached" to a :class:`.Session` but is not part of its pending
+ collection.
+
+ The load_on_pending flag does not improve behavior
+ when the ORM is used normally - object references should be constructed
+ at the object level, not at the foreign key level, so that they
+ are present in an ordinary way before flush() proceeds. This flag
+ is not not intended for general use.
+
+ .. versionadded:: 0.6.5
+
+ .. seealso::
+
+ :meth:`.Session.enable_relationship_loading` - this method establishes
+ "load on pending" behavior for the whole object, and also allows
+ loading on objects that remain transient or detached.
+
+ :param order_by:
+ indicates the ordering that should be applied when loading these
+ items. ``order_by`` is expected to refer to one of the :class:`.Column`
+ objects to which the target class is mapped, or
+ the attribute itself bound to the target class which refers
+ to the column.
+
+ ``order_by`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param passive_deletes=False:
+ Indicates loading behavior during delete operations.
+
+ A value of True indicates that unloaded child items should not
+ be loaded during a delete operation on the parent. Normally,
+ when a parent item is deleted, all child items are loaded so
+ that they can either be marked as deleted, or have their
+ foreign key to the parent set to NULL. Marking this flag as
+ True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
+ place which will handle updating/deleting child rows on the
+ database side.
+
+ Additionally, setting the flag to the string value 'all' will
+ disable the "nulling out" of the child foreign keys, when there
+ is no delete or delete-orphan cascade enabled. This is
+ typically used when a triggering or error raise scenario is in
+ place on the database side. Note that the foreign key
+ attributes on in-session child objects will not be changed
+ after a flush occurs so this is a very special use-case
+ setting.
+
+ :param passive_updates=True:
+ Indicates loading and INSERT/UPDATE/DELETE behavior when the
+ source of a foreign key value changes (i.e. an "on update"
+ cascade), which are typically the primary key columns of the
+ source row.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will
+ handle propagation of an UPDATE from a source column to
+ dependent rows. Note that with databases which enforce
+ referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
+ ON UPDATE CASCADE is required for this operation. The
+ relationship() will update the value of the attribute on related
+ items which are locally present in the session during a flush.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The relationship() will issue the
+ appropriate UPDATE statements to the database in response to the
+ change of a referenced key, and items locally present in the
+ session during a flush will also be refreshed.
+
+ This flag should probably be set to False if primary key changes
+ are expected and the database in use doesn't support CASCADE
+ (i.e. SQLite, MySQL MyISAM tables).
+
+ Also see the passive_updates flag on ``mapper()``.
+
+ A future SQLAlchemy release will provide a "detect" feature for
+ this flag.
+
+ :param post_update:
+ this indicates that the relationship should be handled by a
+ second UPDATE statement after an INSERT or before a
+ DELETE. Currently, it also will issue an UPDATE after the
+ instance was UPDATEd as well, although this technically should
+ be improved. This flag is used to handle saving bi-directional
+ dependencies between two individual rows (i.e. each row
+ references the other), where it would otherwise be impossible to
+ INSERT or DELETE both rows fully since one row exists before the
+ other. Use this flag when a particular mapping arrangement will
+ incur two rows that are dependent on each other, such as a table
+ that has a one-to-many relationship to a set of child rows, and
+ also has a column that references a single child row within that
+ list (i.e. both tables contain a foreign key to each other). If
+ a ``flush()`` operation returns an error that a "cyclical
+ dependency" was detected, this is a cue that you might want to
+ use ``post_update`` to "break" the cycle.
+
+ :param primaryjoin:
+ a SQL expression that will be used as the primary
+ join of this child object against the parent object, or in a
+ many-to-many relationship the join of the primary object to the
+ association table. By default, this value is computed based on the
+ foreign key relationships of the parent and child tables (or association
+ table).
+
+ ``primaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param remote_side:
+ used for self-referential relationships, indicates the column or
+ list of columns that form the "remote side" of the relationship.
+
+ ``remote_side`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. versionchanged:: 0.8
+ The :func:`.remote` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "remote".
+
+ :param query_class:
+ a :class:`.Query` subclass that will be used as the base of the
+ "appender query" returned by a "dynamic" relationship, that
+ is, a relationship that specifies ``lazy="dynamic"`` or was
+ otherwise constructed using the :func:`.orm.dynamic_loader`
+ function.
+
+ :param secondaryjoin:
+ a SQL expression that will be used as the join of
+ an association table to the child object. By default, this value is
+ computed based on the foreign key relationships of the association and
+ child tables.
+
+ ``secondaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param single_parent=(True|False):
+ when True, installs a validator which will prevent objects
+ from being associated with more than one parent at a time.
+ This is used for many-to-one or many-to-many relationships that
+ should be treated either as one-to-one or one-to-many. Its
+ usage is optional unless delete-orphan cascade is also
+ set on this relationship(), in which case its required.
+
+ :param uselist=(True|False):
+ a boolean that indicates if this property should be loaded as a
+ list or a scalar. In most cases, this value is determined
+ automatically by ``relationship()``, based on the type and direction
+ of the relationship - one to many forms a list, many to one
+ forms a scalar, many to many is a list. If a scalar is desired
+ where normally a list would be present, such as a bi-directional
+ one-to-one relationship, set uselist to False.
+
+ :param viewonly=False:
+ when set to True, the relationship is used only for loading objects
+ within the relationship, and has no effect on the unit-of-work
+ flush process. Relationships with viewonly can specify any kind of
+ join conditions to provide additional views of related objects
+ onto a parent object. Note that the functionality of a viewonly
+ relationship has its limits - complicated join conditions may
+ not compile into eager or lazy loaders properly. If this is the
+ case, use an alternative method.
+
+ .. versionchanged:: 0.6
+ :func:`relationship` was renamed from its previous name
+ :func:`relation`.
+
+ """
+
+ self.uselist = uselist
+ self.argument = argument
+ self.secondary = secondary
+ self.primaryjoin = primaryjoin
+ self.secondaryjoin = secondaryjoin
+ self.post_update = post_update
+ self.direction = None
+ self.viewonly = viewonly
+ self.lazy = lazy
+ self.single_parent = single_parent
+ self._user_defined_foreign_keys = foreign_keys
+ self.collection_class = collection_class
+ self.passive_deletes = passive_deletes
+ self.cascade_backrefs = cascade_backrefs
+ self.passive_updates = passive_updates
+ self.remote_side = remote_side
+ self.enable_typechecks = enable_typechecks
+ self.query_class = query_class
+ self.innerjoin = innerjoin
+ self.distinct_target_key = distinct_target_key
+ self.doc = doc
+ self.active_history = active_history
+ self.join_depth = join_depth
+ self.local_remote_pairs = _local_remote_pairs
+ self.extension = extension
+ self.load_on_pending = load_on_pending
+ self.comparator_factory = comparator_factory or \
+ RelationshipProperty.Comparator
+ self.comparator = self.comparator_factory(self, None)
+ util.set_creation_order(self)
+
+ if info is not None:
+ self.info = info
+
+ if strategy_class:
+ self.strategy_class = strategy_class
+ else:
+ self.strategy_class = self._strategy_lookup(("lazy", self.lazy))
+
+ self._reverse_property = set()
+
+ self.cascade = cascade if cascade is not False \
+ else "save-update, merge"
+
+ self.order_by = order_by
+
+ self.back_populates = back_populates
+
+ if self.back_populates:
+ if backref:
+ raise sa_exc.ArgumentError(
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
+ self.backref = None
+ else:
+ self.backref = backref
+
+ def instrument_class(self, mapper):
+ attributes.register_descriptor(
+ mapper.class_,
+ self.key,
+ comparator=self.comparator_factory(self, mapper),
+ parententity=mapper,
+ doc=self.doc,
+ )
+
+ class Comparator(PropComparator):
+ """Produce boolean, comparison, and other operators for
+ :class:`.RelationshipProperty` attributes.
+
+ See the documentation for :class:`.PropComparator` for a brief overview
+ of ORM level operator definition.
+
+ See also:
+
+ :class:`.PropComparator`
+
+ :class:`.ColumnProperty.Comparator`
+
+ :class:`.ColumnOperators`
+
+ :ref:`types_operators`
+
+ :attr:`.TypeEngine.comparator_factory`
+
+ """
+
+ _of_type = None
+
+ def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
+ """Construction of :class:`.RelationshipProperty.Comparator`
+ is internal to the ORM's attribute mechanics.
+
+ """
+ self.prop = prop
+ self._parentmapper = parentmapper
+ self._adapt_to_entity = adapt_to_entity
+ if of_type:
+ self._of_type = of_type
+
+ def adapt_to_entity(self, adapt_to_entity):
+ return self.__class__(self.property, self._parentmapper,
+ adapt_to_entity=adapt_to_entity,
+ of_type=self._of_type)
+
+ @util.memoized_property
+ def mapper(self):
+ """The target :class:`.Mapper` referred to by this
+ :class:`.RelationshipProperty.Comparator`.
+
+ This is the "target" or "remote" side of the
+ :func:`.relationship`.
+
+ """
+ return self.property.mapper
+
+ @util.memoized_property
+ def _parententity(self):
+ return self.property.parent
+
+ def _source_selectable(self):
+ if self._adapt_to_entity:
+ return self._adapt_to_entity.selectable
+ else:
+ return self.property.parent._with_polymorphic_selectable
+
+ def __clause_element__(self):
+ adapt_from = self._source_selectable()
+ if self._of_type:
+ of_type = inspect(self._of_type).mapper
+ else:
+ of_type = None
+
+ pj, sj, source, dest, \
+ secondary, target_adapter = self.property._create_joins(
+ source_selectable=adapt_from,
+ source_polymorphic=True,
+ of_type=of_type)
+ if sj is not None:
+ return pj & sj
+ else:
+ return pj
+
+ def of_type(self, cls):
+ """Produce a construct that represents a particular 'subtype' of
+ attribute for the parent class.
+
+ Currently this is usable in conjunction with :meth:`.Query.join`
+ and :meth:`.Query.outerjoin`.
+
+ """
+ return RelationshipProperty.Comparator(
+ self.property,
+ self._parentmapper,
+ adapt_to_entity=self._adapt_to_entity,
+ of_type=cls)
+
+ def in_(self, other):
+ """Produce an IN clause - this is not implemented
+ for :func:`~.orm.relationship`-based attributes at this time.
+
+ """
+ raise NotImplementedError('in_() not yet supported for '
+ 'relationships. For a simple many-to-one, use '
+ 'in_() against the set of foreign key values.')
+
+ __hash__ = None
+
+ def __eq__(self, other):
+ """Implement the ``==`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop == <some object>
+
+ this will typically produce a
+ clause such as::
+
+ mytable.related_id == <some id>
+
+ Where ``<some id>`` is the primary key of the given
+ object.
+
+ The ``==`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use :meth:`~.RelationshipProperty.Comparator.contains`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce a NOT EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction in [ONETOMANY, MANYTOMANY]:
+ return ~self._criterion_exists()
+ else:
+ return _orm_annotate(self.property._optimized_compare(
+ None, adapt_source=self.adapter))
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a colle"
+ "ction to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return _orm_annotate(self.property._optimized_compare(other,
+ adapt_source=self.adapter))
+
+ def _criterion_exists(self, criterion=None, **kwargs):
+ if getattr(self, '_of_type', None):
+ info = inspect(self._of_type)
+ target_mapper, to_selectable, is_aliased_class = \
+ info.mapper, info.selectable, info.is_aliased_class
+ if self.property._is_self_referential and not is_aliased_class:
+ to_selectable = to_selectable.alias()
+
+ single_crit = target_mapper._single_table_criterion
+ if single_crit is not None:
+ if criterion is not None:
+ criterion = single_crit & criterion
+ else:
+ criterion = single_crit
+ else:
+ is_aliased_class = False
+ to_selectable = None
+
+ if self.adapter:
+ source_selectable = self._source_selectable()
+ else:
+ source_selectable = None
+
+ pj, sj, source, dest, secondary, target_adapter = \
+ self.property._create_joins(dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
+
+ for k in kwargs:
+ crit = getattr(self.property.mapper.class_, k) == kwargs[k]
+ if criterion is None:
+ criterion = crit
+ else:
+ criterion = criterion & crit
+
+ # annotate the *local* side of the join condition, in the case
+ # of pj + sj this is the full primaryjoin, in the case of just
+ # pj its the local side of the primaryjoin.
+ if sj is not None:
+ j = _orm_annotate(pj) & sj
+ else:
+ j = _orm_annotate(pj, exclude=self.property.remote_side)
+
+ if criterion is not None and target_adapter and not is_aliased_class:
+ # limit this adapter to annotated only?
+ criterion = target_adapter.traverse(criterion)
+
+ # only have the "joined left side" of what we
+ # return be subject to Query adaption. The right
+ # side of it is used for an exists() subquery and
+ # should not correlate or otherwise reach out
+ # to anything in the enclosing query.
+ if criterion is not None:
+ criterion = criterion._annotate(
+ {'no_replacement_traverse': True})
+
+ crit = j & sql.True_._ifnone(criterion)
+
+ ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
+ if secondary is not None:
+ ex = ex.correlate_except(secondary)
+ return ex
+
+ def any(self, criterion=None, **kwargs):
+ """Produce an expression that tests a collection against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.any(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
+ AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.any` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.any` is particularly
+ useful for testing for empty collections::
+
+ session.query(MyClass).filter(
+ ~MyClass.somereference.any()
+ )
+
+ will produce::
+
+ SELECT * FROM my_table WHERE
+ NOT EXISTS (SELECT 1 FROM related WHERE
+ related.my_id=my_table.id)
+
+ :meth:`~.RelationshipProperty.Comparator.any` is only
+ valid for collections, i.e. a :func:`.relationship`
+ that has ``uselist=True``. For scalar references,
+ use :meth:`~.RelationshipProperty.Comparator.has`.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
+
+ return self._criterion_exists(criterion, **kwargs)
+
+ def has(self, criterion=None, **kwargs):
+ """Produce an expression that tests a scalar reference against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.has(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE
+ related.id==my_table.related_id AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.has` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.has` is only
+ valid for scalar references, i.e. a :func:`.relationship`
+ that has ``uselist=False``. For collection references,
+ use :meth:`~.RelationshipProperty.Comparator.any`.
+
+ """
+ if self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'has()' not implemented for collections. "
+ "Use any().")
+ return self._criterion_exists(criterion, **kwargs)
+
+ def contains(self, other, **kwargs):
+ """Return a simple expression that tests a collection for
+ containment of a particular item.
+
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ only valid for a collection, i.e. a
+ :func:`~.orm.relationship` that implements
+ one-to-many or many-to-many with ``uselist=True``.
+
+ When used in a simple one-to-many context, an
+ expression like::
+
+ MyClass.contains(other)
+
+ Produces a clause like::
+
+ mytable.id == <some id>
+
+ Where ``<some id>`` is the value of the foreign key
+ attribute on ``other`` which refers to the primary
+ key of its parent object. From this it follows that
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ very useful when used with simple one-to-many
+ operations.
+
+ For many-to-many operations, the behavior of
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ has more caveats. The association table will be
+ rendered in the statement, producing an "implicit"
+ join, that is, includes multiple tables in the FROM
+ clause which are equated in the WHERE clause::
+
+ query(MyClass).filter(MyClass.contains(other))
+
+ Produces a query like::
+
+ SELECT * FROM my_table, my_association_table AS
+ my_association_table_1 WHERE
+ my_table.id = my_association_table_1.parent_id
+ AND my_association_table_1.child_id = <some id>
+
+ Where ``<some id>`` would be the primary key of
+ ``other``. From the above, it is clear that
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ will **not** work with many-to-many collections when
+ used in queries that move beyond simple AND
+ conjunctions, such as multiple
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ expressions joined by OR. In such cases subqueries or
+ explicit "outer joins" will need to be used instead.
+ See :meth:`~.RelationshipProperty.Comparator.any` for
+ a less-performant alternative using EXISTS, or refer
+ to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
+ for more details on constructing outer joins.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
+ clause = self.property._optimized_compare(other,
+ adapt_source=self.adapter)
+
+ if self.property.secondaryjoin is not None:
+ clause.negation_clause = \
+ self.__negated_contains_or_equals(other)
+
+ return clause
+
+ def __negated_contains_or_equals(self, other):
+ if self.property.direction == MANYTOONE:
+ state = attributes.instance_state(other)
+
+ def state_bindparam(x, state, col):
+ o = state.obj() # strong ref
+ return sql.bindparam(x, unique=True, callable_=lambda: \
+ self.property.mapper._get_committed_attr_by_column(o, col))
+
+ def adapt(col):
+ if self.adapter:
+ return self.adapter(col)
+ else:
+ return col
+
+ if self.property._use_get:
+ return sql.and_(*[
+ sql.or_(
+ adapt(x) != state_bindparam(adapt(x), state, y),
+ adapt(x) == None)
+ for (x, y) in self.property.local_remote_pairs])
+
+ criterion = sql.and_(*[x == y for (x, y) in
+ zip(
+ self.property.mapper.primary_key,
+ self.property.\
+ mapper.\
+ primary_key_from_instance(other))
+ ])
+ return ~self._criterion_exists(criterion)
+
+ def __ne__(self, other):
+ """Implement the ``!=`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop != <some object>
+
+ This will typically produce a clause such as::
+
+ mytable.related_id != <some id>
+
+ Where ``<some id>`` is the primary key of the
+ given object.
+
+ The ``!=`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ in conjunction with :func:`~.expression.not_`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` in
+ conjunction with :func:`~.expression.not_` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce an EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction == MANYTOONE:
+ return sql.or_(*[x != None for x in
+ self.property._calculated_foreign_keys])
+ else:
+ return self._criterion_exists()
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return self.__negated_contains_or_equals(other)
+
+ @util.memoized_property
+ def property(self):
+ if mapperlib.Mapper._new_mappers:
+ mapperlib.Mapper._configure_all()
+ return self.prop
+
+ def compare(self, op, value,
+ value_is_parent=False,
+ alias_secondary=True):
+ if op == operators.eq:
+ if value is None:
+ if self.uselist:
+ return ~sql.exists([1], self.primaryjoin)
+ else:
+ return self._optimized_compare(None,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return self._optimized_compare(value,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return op(self.comparator, value)
+
+ def _optimized_compare(self, value, value_is_parent=False,
+ adapt_source=None,
+ alias_secondary=True):
+ if value is not None:
+ value = attributes.instance_state(value)
+ return self._lazy_strategy.lazy_clause(value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
+
+ def __str__(self):
+ return str(self.parent.class_.__name__) + "." + self.key
+
+ def merge(self,
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
+
+ if load:
+ for r in self._reverse_property:
+ if (source_state, r) in _recursive:
+ return
+
+ if not "merge" in self._cascade:
+ return
+
+ if self.key not in source_dict:
+ return
+
+ if self.uselist:
+ instances = source_state.get_impl(self.key).\
+ get(source_state, source_dict)
+ if hasattr(instances, '_sa_adapter'):
+ # convert collections to adapters to get a true iterator
+ instances = instances._sa_adapter
+
+ if load:
+ # for a full merge, pre-load the destination collection,
+ # so that individual _merge of each item pulls from identity
+ # map for those already present.
+ # also assumes CollectionAttrbiuteImpl behavior of loading
+ # "old" list in any case
+ dest_state.get_impl(self.key).get(dest_state, dest_dict)
+
+ dest_list = []
+ for current in instances:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ if obj is not None:
+ dest_list.append(obj)
+
+ if not load:
+ coll = attributes.init_state_collection(dest_state,
+ dest_dict, self.key)
+ for c in dest_list:
+ coll.append_without_event(c)
+ else:
+ dest_state.get_impl(self.key)._set_iterable(dest_state,
+ dest_dict, dest_list)
+ else:
+ current = source_dict[self.key]
+ if current is not None:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ else:
+ obj = None
+
+ if not load:
+ dest_dict[self.key] = obj
+ else:
+ dest_state.get_impl(self.key).set(dest_state,
+ dest_dict, obj, None)
+
+ def _value_as_iterable(self, state, dict_, key,
+ passive=attributes.PASSIVE_OFF):
+ """Return a list of tuples (state, obj) for the given
+ key.
+
+ returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
+ """
+
+ impl = state.manager[key].impl
+ x = impl.get(state, dict_, passive=passive)
+ if x is attributes.PASSIVE_NO_RESULT or x is None:
+ return []
+ elif hasattr(impl, 'get_collection'):
+ return [
+ (attributes.instance_state(o), o) for o in
+ impl.get_collection(state, dict_, x, passive=passive)
+ ]
+ else:
+ return [(attributes.instance_state(x), x)]
+
+ def cascade_iterator(self, type_, state, dict_,
+ visited_states, halt_on=None):
+ #assert type_ in self._cascade
+
+ # only actively lazy load on the 'delete' cascade
+ if type_ != 'delete' or self.passive_deletes:
+ passive = attributes.PASSIVE_NO_INITIALIZE
+ else:
+ passive = attributes.PASSIVE_OFF
+
+ if type_ == 'save-update':
+ tuples = state.manager[self.key].impl.\
+ get_all_pending(state, dict_)
+
+ else:
+ tuples = self._value_as_iterable(state, dict_, self.key,
+ passive=passive)
+
+ skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
+ not in self._cascade
+
+ for instance_state, c in tuples:
+ if instance_state in visited_states:
+ continue
+
+ if c is None:
+ # would like to emit a warning here, but
+ # would not be consistent with collection.append(None)
+ # current behavior of silently skipping.
+ # see [ticket:2229]
+ continue
+
+ instance_dict = attributes.instance_dict(c)
+
+ if halt_on and halt_on(instance_state):
+ continue
+
+ if skip_pending and not instance_state.key:
+ continue
+
+ instance_mapper = instance_state.manager.mapper
+
+ if not instance_mapper.isa(self.mapper.class_manager.mapper):
+ raise AssertionError("Attribute '%s' on class '%s' "
+ "doesn't handle objects "
+ "of type '%s'" % (
+ self.key,
+ self.parent.class_,
+ c.__class__
+ ))
+
+ visited_states.add(instance_state)
+
+ yield c, instance_mapper, instance_state, instance_dict
+
+ def _add_reverse_property(self, key):
+ other = self.mapper.get_property(key, _configure_mappers=False)
+ self._reverse_property.add(other)
+ other._reverse_property.add(self)
+
+ if not other.mapper.common_parent(self.parent):
+ raise sa_exc.ArgumentError('reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' % (key, self, other,
+ self.parent))
+ if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
+ == other.direction:
+ raise sa_exc.ArgumentError('%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?'
+ % (other, self, self.direction))
+
+ @util.memoized_property
+ def mapper(self):
+ """Return the targeted :class:`.Mapper` for this
+ :class:`.RelationshipProperty`.
+
+ This is a lazy-initializing static attribute.
+
+ """
+ if util.callable(self.argument) and \
+ not isinstance(self.argument, (type, mapperlib.Mapper)):
+ argument = self.argument()
+ else:
+ argument = self.argument
+
+ if isinstance(argument, type):
+ mapper_ = mapperlib.class_mapper(argument,
+ configure=False)
+ elif isinstance(self.argument, mapperlib.Mapper):
+ mapper_ = argument
+ else:
+ raise sa_exc.ArgumentError("relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(argument)))
+ return mapper_
+
+ @util.memoized_property
+ @util.deprecated("0.7", "Use .target")
+ def table(self):
+ """Return the selectable linked to this
+ :class:`.RelationshipProperty` object's target
+ :class:`.Mapper`.
+ """
+ return self.target
+
+ def do_init(self):
+ self._check_conflicts()
+ self._process_dependent_arguments()
+ self._setup_join_conditions()
+ self._check_cascade_settings(self._cascade)
+ self._post_init()
+ self._generate_backref()
+ super(RelationshipProperty, self).do_init()
+ self._lazy_strategy = self._get_strategy((("lazy", "select"),))
+
+
+ def _process_dependent_arguments(self):
+ """Convert incoming configuration arguments to their
+ proper form.
+
+ Callables are resolved, ORM annotations removed.
+
+ """
+ # accept callables for other attributes which may require
+ # deferred initialization. This technique is used
+ # by declarative "string configs" and some recipes.
+ for attr in (
+ 'order_by', 'primaryjoin', 'secondaryjoin',
+ 'secondary', '_user_defined_foreign_keys', 'remote_side',
+ ):
+ attr_value = getattr(self, attr)
+ if util.callable(attr_value):
+ setattr(self, attr, attr_value())
+
+ # remove "annotations" which are present if mapped class
+ # descriptors are used to create the join expression.
+ for attr in 'primaryjoin', 'secondaryjoin':
+ val = getattr(self, attr)
+ if val is not None:
+ setattr(self, attr, _orm_deannotate(
+ expression._only_column_elements(val, attr))
+ )
+
+ # ensure expressions in self.order_by, foreign_keys,
+ # remote_side are all columns, not strings.
+ if self.order_by is not False and self.order_by is not None:
+ self.order_by = [
+ expression._only_column_elements(x, "order_by")
+ for x in
+ util.to_list(self.order_by)]
+
+ self._user_defined_foreign_keys = \
+ util.column_set(
+ expression._only_column_elements(x, "foreign_keys")
+ for x in util.to_column_set(
+ self._user_defined_foreign_keys
+ ))
+
+ self.remote_side = \
+ util.column_set(
+ expression._only_column_elements(x, "remote_side")
+ for x in
+ util.to_column_set(self.remote_side))
+
+ self.target = self.mapper.mapped_table
+
+
+ def _setup_join_conditions(self):
+ self._join_condition = jc = JoinCondition(
+ parent_selectable=self.parent.mapped_table,
+ child_selectable=self.mapper.mapped_table,
+ parent_local_selectable=self.parent.local_table,
+ child_local_selectable=self.mapper.local_table,
+ primaryjoin=self.primaryjoin,
+ secondary=self.secondary,
+ secondaryjoin=self.secondaryjoin,
+ parent_equivalents=self.parent._equivalent_columns,
+ child_equivalents=self.mapper._equivalent_columns,
+ consider_as_foreign_keys=self._user_defined_foreign_keys,
+ local_remote_pairs=self.local_remote_pairs,
+ remote_side=self.remote_side,
+ self_referential=self._is_self_referential,
+ prop=self,
+ support_sync=not self.viewonly,
+ can_be_synced_fn=self._columns_are_mapped
+ )
+ self.primaryjoin = jc.deannotated_primaryjoin
+ self.secondaryjoin = jc.deannotated_secondaryjoin
+ self.direction = jc.direction
+ self.local_remote_pairs = jc.local_remote_pairs
+ self.remote_side = jc.remote_columns
+ self.local_columns = jc.local_columns
+ self.synchronize_pairs = jc.synchronize_pairs
+ self._calculated_foreign_keys = jc.foreign_key_columns
+ self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
+
+ def _check_conflicts(self):
+ """Test that this relationship is legal, warn about
+ inheritance conflicts."""
+
+ if not self.is_primary() \
+ and not mapperlib.class_mapper(
+ self.parent.class_,
+ configure=False).has_property(self.key):
+ raise sa_exc.ArgumentError("Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " % (self.key,
+ self.parent.class_.__name__,
+ self.parent.class_.__name__))
+
+ # check for conflicting relationship() on superclass
+ if not self.parent.concrete:
+ for inheriting in self.parent.iterate_to_root():
+ if inheriting is not self.parent \
+ and inheriting.has_property(self.key):
+ util.warn("Warning: relationship '%s' on mapper "
+ "'%s' supersedes the same relationship "
+ "on inherited mapper '%s'; this can "
+ "cause dependency issues during flush"
+ % (self.key, self.parent, inheriting))
+
+ def _get_cascade(self):
+ """Return the current cascade setting for this
+ :class:`.RelationshipProperty`.
+ """
+ return self._cascade
+
+ def _set_cascade(self, cascade):
+ cascade = CascadeOptions(cascade)
+ if 'mapper' in self.__dict__:
+ self._check_cascade_settings(cascade)
+ self._cascade = cascade
+
+ if self._dependency_processor:
+ self._dependency_processor.cascade = cascade
+
+ cascade = property(_get_cascade, _set_cascade)
+
+ def _check_cascade_settings(self, cascade):
+ if cascade.delete_orphan and not self.single_parent \
+ and (self.direction is MANYTOMANY or self.direction
+ is MANYTOONE):
+ raise sa_exc.ArgumentError(
+ 'On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
+ if self.direction is MANYTOONE and self.passive_deletes:
+ util.warn("On %s, 'passive_deletes' is normally configured "
+ "on one-to-many, one-to-one, many-to-many "
+ "relationships only."
+ % self)
+
+ if self.passive_deletes == 'all' and \
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
+ raise sa_exc.ArgumentError(
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
+
+ if cascade.delete_orphan:
+ self.mapper.primary_mapper()._delete_orphans.append(
+ (self.key, self.parent.class_)
+ )
+
+ def _columns_are_mapped(self, *cols):
+ """Return True if all columns in the given collection are
+ mapped by the tables referenced by this :class:`.Relationship`.
+
+ """
+ for c in cols:
+ if self.secondary is not None \
+ and self.secondary.c.contains_column(c):
+ continue
+ if not self.parent.mapped_table.c.contains_column(c) and \
+ not self.target.c.contains_column(c):
+ return False
+ return True
+
+ def _generate_backref(self):
+ """Interpret the 'backref' instruction to create a
+ :func:`.relationship` complementary to this one."""
+
+ if not self.is_primary():
+ return
+ if self.backref is not None and not self.back_populates:
+ if isinstance(self.backref, util.string_types):
+ backref_key, kwargs = self.backref, {}
+ else:
+ backref_key, kwargs = self.backref
+ mapper = self.mapper.primary_mapper()
+
+ check = set(mapper.iterate_to_root()).\
+ union(mapper.self_and_descendants)
+ for m in check:
+ if m.has_property(backref_key):
+ raise sa_exc.ArgumentError("Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" % (backref_key,
+ self, m))
+
+ # determine primaryjoin/secondaryjoin for the
+ # backref. Use the one we had, so that
+ # a custom join doesn't have to be specified in
+ # both directions.
+ if self.secondary is not None:
+ # for many to many, just switch primaryjoin/
+ # secondaryjoin. use the annotated
+ # pj/sj on the _join_condition.
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.secondaryjoin_minus_local)
+ sj = kwargs.pop('secondaryjoin',
+ self._join_condition.primaryjoin_minus_local)
+ else:
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.primaryjoin_reverse_remote)
+ sj = kwargs.pop('secondaryjoin', None)
+ if sj:
+ raise sa_exc.InvalidRequestError(
+ "Can't assign 'secondaryjoin' on a backref "
+ "against a non-secondary relationship."
+ )
+
+ foreign_keys = kwargs.pop('foreign_keys',
+ self._user_defined_foreign_keys)
+ parent = self.parent.primary_mapper()
+ kwargs.setdefault('viewonly', self.viewonly)
+ kwargs.setdefault('post_update', self.post_update)
+ kwargs.setdefault('passive_updates', self.passive_updates)
+ self.back_populates = backref_key
+ relationship = RelationshipProperty(
+ parent, self.secondary,
+ pj, sj,
+ foreign_keys=foreign_keys,
+ back_populates=self.key,
+ **kwargs)
+ mapper._configure_property(backref_key, relationship)
+
+ if self.back_populates:
+ self._add_reverse_property(self.back_populates)
+
+ def _post_init(self):
+ if self.uselist is None:
+ self.uselist = self.direction is not MANYTOONE
+ if not self.viewonly:
+ self._dependency_processor = \
+ dependency.DependencyProcessor.from_relationship(self)
+
+ @util.memoized_property
+ def _use_get(self):
+ """memoize the 'use_get' attribute of this RelationshipLoader's
+ lazyloader."""
+
+ strategy = self._lazy_strategy
+ return strategy.use_get
+
+ @util.memoized_property
+ def _is_self_referential(self):
+ return self.mapper.common_parent(self.parent)
+
+ def _create_joins(self, source_polymorphic=False,
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
+ if source_selectable is None:
+ if source_polymorphic and self.parent.with_polymorphic:
+ source_selectable = self.parent._with_polymorphic_selectable
+
+ aliased = False
+ if dest_selectable is None:
+ if dest_polymorphic and self.mapper.with_polymorphic:
+ dest_selectable = self.mapper._with_polymorphic_selectable
+ aliased = True
+ else:
+ dest_selectable = self.mapper.mapped_table
+
+ if self._is_self_referential and source_selectable is None:
+ dest_selectable = dest_selectable.alias()
+ aliased = True
+ else:
+ aliased = True
+
+ dest_mapper = of_type or self.mapper
+
+ single_crit = dest_mapper._single_table_criterion
+ aliased = aliased or (source_selectable is not None)
+
+ primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
+ self._join_condition.join_targets(
+ source_selectable, dest_selectable, aliased, single_crit
+ )
+ if source_selectable is None:
+ source_selectable = self.parent.local_table
+ if dest_selectable is None:
+ dest_selectable = self.mapper.local_table
+ return (primaryjoin, secondaryjoin, source_selectable,
+ dest_selectable, secondary, target_adapter)
+
def _annotate_columns(element, annotations):
def clone(elem):
if isinstance(elem, expression.ColumnClause):
@@ -901,4 +2507,4 @@ class _ColInAnnotations(object):
self.name = name
def __call__(self, c):
- return self.name in c._annotations \ No newline at end of file
+ return self.name in c._annotations
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index 6393b74a7..c1f8f319f 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -1,5 +1,5 @@
# orm/scoping.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -163,7 +163,7 @@ def makeprop(name):
return property(get, set)
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
- 'is_active', 'autoflush', 'no_autoflush'):
+ 'is_active', 'autoflush', 'no_autoflush', 'info'):
setattr(scoped_session, prop, makeprop(prop))
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 5a4486eef..c10a0efc9 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -1,5 +1,5 @@
# orm/session.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,25 +8,40 @@
import weakref
-from .. import util, sql, engine, exc as sa_exc, event
+from .. import util, sql, engine, exc as sa_exc
from ..sql import util as sql_util, expression
from . import (
- SessionExtension, attributes, exc, query, util as orm_util,
+ SessionExtension, attributes, exc, query,
loading, identity
)
-from .util import (
+from ..inspection import inspect
+from .base import (
object_mapper, class_mapper,
_class_to_mapper, _state_mapper, object_state,
- _none_set
+ _none_set, state_str, instance_str
)
from .unitofwork import UOWTransaction
-from .mapper import Mapper
-from .events import SessionEvents
-statelib = util.importlater("sqlalchemy.orm", "state")
+from . import state as statelib
import sys
__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
+_sessions = weakref.WeakValueDictionary()
+"""Weak-referencing dictionary of :class:`.Session` objects.
+"""
+
+def _state_session(state):
+ """Given an :class:`.InstanceState`, return the :class:`.Session`
+ associated, if any.
+ """
+ if state.session_id:
+ try:
+ return _sessions[state.session_id]
+ except KeyError:
+ pass
+ return None
+
+
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@@ -39,7 +54,8 @@ class _SessionClassMethods(object):
sess.close()
@classmethod
- def identity_key(cls, *args, **kwargs):
+ @util.dependencies("sqlalchemy.orm.util")
+ def identity_key(cls, orm_util, *args, **kwargs):
"""Return an identity key.
This is an alias of :func:`.util.identity_key`.
@@ -469,6 +485,7 @@ class Session(_SessionClassMethods):
_enable_transaction_accounting=True,
autocommit=False, twophase=False,
weak_identity_map=True, binds=None, extension=None,
+ info=None,
query_cls=query.Query):
"""Construct a new Session.
@@ -557,6 +574,14 @@ class Session(_SessionClassMethods):
flush events, as well as a post-rollback event. **Deprecated.**
Please see :class:`.SessionEvents`.
+ :param info: optional dictionary of arbitrary data to be associated
+ with this :class:`.Session`. Is available via the :attr:`.Session.info`
+ attribute. Note the dictionary is copied at construction time so
+ that modifications to the per-:class:`.Session` dictionary will be local
+ to that :class:`.Session`.
+
+ .. versionadded:: 0.9.0
+
:param query_cls: Class which should be used to create new Query
objects, as returned by the ``query()`` method. Defaults to
:class:`~sqlalchemy.orm.query.Query`.
@@ -599,6 +624,8 @@ class Session(_SessionClassMethods):
self._enable_transaction_accounting = _enable_transaction_accounting
self.twophase = twophase
self._query_cls = query_cls
+ if info:
+ self.info.update(info)
if extension:
for ext in util.to_list(extension):
@@ -606,22 +633,39 @@ class Session(_SessionClassMethods):
if binds is not None:
for mapperortable, bind in binds.items():
- if isinstance(mapperortable, (type, Mapper)):
+ insp = inspect(mapperortable)
+ if insp.is_selectable:
+ self.bind_table(mapperortable, bind)
+ elif insp.is_mapper:
self.bind_mapper(mapperortable, bind)
else:
- self.bind_table(mapperortable, bind)
+ assert False
+
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
- dispatch = event.dispatcher(SessionEvents)
-
connection_callable = None
transaction = None
"""The current active or inactive :class:`.SessionTransaction`."""
+ @util.memoized_property
+ def info(self):
+ """A user-modifiable dictionary.
+
+ The initial value of this dictioanry can be populated using the
+ ``info`` argument to the :class:`.Session` constructor or
+ :class:`.sessionmaker` constructor or factory methods. The dictionary
+ here is always local to this :class:`.Session` and can be modified
+ independently of all other :class:`.Session` objects.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return {}
+
def begin(self, subtransactions=False, nested=False):
"""Begin a transaction on this Session.
@@ -779,7 +823,7 @@ class Session(_SessionClassMethods):
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
- :param close_with_result: Passed to :meth:`Engine.connect`, indicating
+ :param close_with_result: Passed to :meth:`.Engine.connect`, indicating
the :class:`.Connection` should be considered "single use",
automatically closing when the first result set is closed. This
flag only has an effect if this :class:`.Session` is configured with
@@ -1136,7 +1180,18 @@ class Session(_SessionClassMethods):
def _autoflush(self):
if self.autoflush and not self._flushing:
- self.flush()
+ try:
+ self.flush()
+ except sa_exc.StatementError as e:
+ # note we are reraising StatementError as opposed to
+ # raising FlushError with "chaining" to remain compatible
+ # with code that catches StatementError, IntegrityError,
+ # etc.
+ e.add_detail(
+ "raised as a result of Query-invoked autoflush; "
+ "consider using a session.no_autoflush block if this "
+ "flush is occuring prematurely")
+ util.raise_from_cause(e)
def refresh(self, instance, attribute_names=None, lockmode=None):
"""Expire and refresh the attributes on the given instance.
@@ -1180,7 +1235,7 @@ class Session(_SessionClassMethods):
only_load_props=attribute_names) is None:
raise sa_exc.InvalidRequestError(
"Could not refresh instance '%s'" %
- orm_util.instance_str(instance))
+ instance_str(instance))
def expire_all(self):
"""Expires all persistent instances within this Session.
@@ -1291,7 +1346,7 @@ class Session(_SessionClassMethods):
if state.session_id is not self.hash_key:
raise sa_exc.InvalidRequestError(
"Instance %s is not present in this Session" %
- orm_util.state_str(state))
+ state_str(state))
cascaded = list(state.manager.mapper.cascade_iterator(
'expunge', state))
@@ -1331,7 +1386,7 @@ class Session(_SessionClassMethods):
"expect these generated values. Ensure also that "
"this flush() is not occurring at an inappropriate "
"time, such aswithin a load() event."
- % orm_util.state_str(state)
+ % state_str(state)
)
if state.key is None:
@@ -1434,7 +1489,7 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state in self._deleted:
return
@@ -1598,7 +1653,7 @@ class Session(_SessionClassMethods):
"merging to update the most recent version."
% (
existing_version,
- orm_util.state_str(merged_state),
+ state_str(merged_state),
merged_version
))
@@ -1622,13 +1677,13 @@ class Session(_SessionClassMethods):
if not self.identity_map.contains_state(state):
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persistent within this Session" %
- orm_util.state_str(state))
+ state_str(state))
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
"Object '%s' already has an identity - it can't be registered "
- "as pending" % orm_util.state_str(state))
+ "as pending" % state_str(state))
self._before_attach(state)
if state not in self._new:
@@ -1644,13 +1699,13 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state.deleted:
raise sa_exc.InvalidRequestError(
"Instance '%s' has been deleted. Use the make_transient() "
"function to send this object back to the transient state." %
- orm_util.state_str(state)
+ state_str(state)
)
self._before_attach(state)
self._deleted.pop(state, None)
@@ -1703,8 +1758,8 @@ class Session(_SessionClassMethods):
may not fire off a backref event, if the effective value
is what was already loaded from a foreign-key-holding value.
- The :meth:`.Session.enable_relationship_loading` method supersedes
- the ``load_on_pending`` flag on :func:`.relationship`. Unlike
+ The :meth:`.Session.enable_relationship_loading` method is
+ similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike
that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
@@ -1721,6 +1776,12 @@ class Session(_SessionClassMethods):
.. versionadded:: 0.8
+ .. seealso::
+
+ ``load_on_pending`` at :func:`.relationship` - this flag
+ allows per-relationship loading of many-to-ones on items that
+ are pending.
+
"""
state = attributes.instance_state(obj)
self._attach(state, include_before=True)
@@ -1738,14 +1799,14 @@ class Session(_SessionClassMethods):
raise sa_exc.InvalidRequestError("Can't attach instance "
"%s; another instance with key %s is already "
"present in this session."
- % (orm_util.state_str(state), state.key))
+ % (state_str(state), state.key))
if state.session_id and \
state.session_id is not self.hash_key and \
state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
- "(this is '%s')" % (orm_util.state_str(state),
+ "(this is '%s')" % (state_str(state),
state.session_id, self.hash_key))
if state.session_id != self.hash_key:
@@ -2090,9 +2151,10 @@ class Session(_SessionClassMethods):
access to the full set of persistent objects (i.e., those
that have row identity) currently in the session.
- See also:
+ .. seealso::
- :func:`.identity_key` - operations involving identity keys.
+ :func:`.identity_key` - helper function to produce the keys used
+ in this dictionary.
"""
@@ -2196,7 +2258,8 @@ class sessionmaker(_SessionClassMethods):
def __init__(self, bind=None, class_=Session, autoflush=True,
autocommit=False,
- expire_on_commit=True, **kw):
+ expire_on_commit=True,
+ info=None, **kw):
"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
@@ -2213,6 +2276,13 @@ class sessionmaker(_SessionClassMethods):
:class:`.Session` objects.
:param expire_on_commit=True: the expire_on_commit setting to use
with newly created :class:`.Session` objects.
+ :param info: optional dictionary of information that will be available
+ via :attr:`.Session.info`. Note this dictionary is *updated*, not
+ replaced, when the ``info`` parameter is specified to the specific
+ :class:`.Session` construction operation.
+
+ .. versionadded:: 0.9.0
+
:param \**kw: all other keyword arguments are passed to the constructor
of newly created :class:`.Session` objects.
@@ -2221,6 +2291,8 @@ class sessionmaker(_SessionClassMethods):
kw['autoflush'] = autoflush
kw['autocommit'] = autocommit
kw['expire_on_commit'] = expire_on_commit
+ if info is not None:
+ kw['info'] = info
self.kw = kw
# make our own subclass of the given class, so that
# events can be associated with it specifically.
@@ -2238,7 +2310,12 @@ class sessionmaker(_SessionClassMethods):
"""
for k, v in self.kw.items():
- local_kw.setdefault(k, v)
+ if k == 'info' and 'info' in local_kw:
+ d = v.copy()
+ d.update(local_kw['info'])
+ local_kw['info'] = d
+ else:
+ local_kw.setdefault(k, v)
return self.class_(**local_kw)
def configure(self, **new_kw):
@@ -2253,13 +2330,12 @@ class sessionmaker(_SessionClassMethods):
self.kw.update(new_kw)
def __repr__(self):
- return "%s(class_=%r%s)" % (
+ return "%s(class_=%r,%s)" % (
self.__class__.__name__,
self.class_.__name__,
", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
)
-_sessions = weakref.WeakValueDictionary()
def make_transient(instance):
@@ -2304,12 +2380,4 @@ def object_session(instance):
raise exc.UnmappedInstanceError(instance)
-def _state_session(state):
- if state.session_id:
- try:
- return _sessions[state.session_id]
- except KeyError:
- pass
- return None
-
_new_sessionid = util.counter()
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index c479d880d..9712dd055 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -1,5 +1,5 @@
# orm/state.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,16 +13,11 @@ defines a large part of the ORM's interactivity.
import weakref
from .. import util
-from . import exc as orm_exc, attributes, util as orm_util, interfaces
-from .attributes import (
- PASSIVE_NO_RESULT,
- SQL_OK, NEVER_SET, ATTR_WAS_SET, NO_VALUE,\
- PASSIVE_NO_INITIALIZE
- )
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-instrumentation = util.importlater("sqlalchemy.orm", "instrumentation")
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-
+from . import exc as orm_exc, interfaces
+from .path_registry import PathRegistry
+from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
+ NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
+from . import base
class InstanceState(interfaces._InspectionAttr):
"""tracks state information at the instance level."""
@@ -89,15 +84,16 @@ class InstanceState(interfaces._InspectionAttr):
not self._attached
@property
- def _attached(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def _attached(self, sessionlib):
return self.session_id is not None and \
self.session_id in sessionlib._sessions
@property
- def session(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def session(self, sessionlib):
"""Return the owning :class:`.Session` for this instance,
or ``None`` if none available."""
-
return sessionlib._state_session(self)
@property
@@ -186,7 +182,7 @@ class InstanceState(interfaces._InspectionAttr):
def dict(self):
o = self.obj()
if o is not None:
- return attributes.instance_dict(o)
+ return base.instance_dict(o)
else:
return {}
@@ -214,8 +210,8 @@ class InstanceState(interfaces._InspectionAttr):
return self._pending_mutations[key]
def __getstate__(self):
- d = {'instance': self.obj()}
- d.update(
+ state_dict = {'instance': self.obj()}
+ state_dict.update(
(k, self.__dict__[k]) for k in (
'committed_state', '_pending_mutations', 'modified', 'expired',
'callables', 'key', 'parents', 'load_options',
@@ -223,14 +219,14 @@ class InstanceState(interfaces._InspectionAttr):
) if k in self.__dict__
)
if self.load_path:
- d['load_path'] = self.load_path.serialize()
+ state_dict['load_path'] = self.load_path.serialize()
- self.manager.dispatch.pickle(self, d)
+ state_dict['manager'] = self.manager._serialize(self, state_dict)
- return d
+ return state_dict
- def __setstate__(self, state):
- inst = state['instance']
+ def __setstate__(self, state_dict):
+ inst = state_dict['instance']
if inst is not None:
self.obj = weakref.ref(inst, self._cleanup)
self.class_ = inst.__class__
@@ -239,42 +235,26 @@ class InstanceState(interfaces._InspectionAttr):
# due to storage of state in "parents". "class_"
# also new.
self.obj = None
- self.class_ = state['class_']
- self.manager = manager = instrumentation.manager_of_class(self.class_)
- if manager is None:
- raise orm_exc.UnmappedInstanceError(
- inst,
- "Cannot deserialize object of type %r - "
- "no mapper() has "
- "been configured for this class within the current "
- "Python process!" %
- self.class_)
- elif manager.is_mapped and not manager.mapper.configured:
- mapperlib.configure_mappers()
-
- self.committed_state = state.get('committed_state', {})
- self._pending_mutations = state.get('_pending_mutations', {})
- self.parents = state.get('parents', {})
- self.modified = state.get('modified', False)
- self.expired = state.get('expired', False)
- self.callables = state.get('callables', {})
+ self.class_ = state_dict['class_']
+
+ self.committed_state = state_dict.get('committed_state', {})
+ self._pending_mutations = state_dict.get('_pending_mutations', {})
+ self.parents = state_dict.get('parents', {})
+ self.modified = state_dict.get('modified', False)
+ self.expired = state_dict.get('expired', False)
+ self.callables = state_dict.get('callables', {})
self.__dict__.update([
- (k, state[k]) for k in (
+ (k, state_dict[k]) for k in (
'key', 'load_options',
- ) if k in state
+ ) if k in state_dict
])
- if 'load_path' in state:
- self.load_path = orm_util.PathRegistry.\
- deserialize(state['load_path'])
+ if 'load_path' in state_dict:
+ self.load_path = PathRegistry.\
+ deserialize(state_dict['load_path'])
- # setup _sa_instance_state ahead of time so that
- # unpickle events can access the object normally.
- # see [ticket:2362]
- if inst is not None:
- manager.setup_instance(inst, self)
- manager.dispatch.unpickle(self, state)
+ state_dict['manager'](self, inst, state_dict)
def _initialize(self, key):
"""Set this attribute to an empty value or collection,
@@ -413,6 +393,13 @@ class InstanceState(interfaces._InspectionAttr):
difference(self.dict)
@property
+ def _unloaded_non_object(self):
+ return self.unloaded.intersection(
+ attr for attr in self.manager
+ if self.manager[attr].impl.accepts_scalar_loader
+ )
+
+ @property
def expired_attributes(self):
"""Return the set of keys which are 'expired' to be loaded by
the manager's deferred scalar loader, assuming no pending
@@ -428,6 +415,8 @@ class InstanceState(interfaces._InspectionAttr):
return None
def _modified_event(self, dict_, attr, previous, collection=False):
+ if not attr.send_modified_events:
+ return
if attr.key not in self.committed_state:
if collection:
if previous is NEVER_SET:
@@ -461,7 +450,7 @@ class InstanceState(interfaces._InspectionAttr):
"collected."
% (
self.manager[attr.key],
- orm_util.state_class_str(self)
+ base.state_class_str(self)
))
self.modified = True
@@ -527,13 +516,13 @@ class AttributeState(object):
to a particular attribute on a particular mapped object.
The :class:`.AttributeState` object is accessed
- via the :attr:`.InstanceState.attr` collection
+ via the :attr:`.InstanceState.attrs` collection
of a particular :class:`.InstanceState`::
from sqlalchemy import inspect
insp = inspect(some_mapped_object)
- attr_state = insp.attr.some_attribute
+ attr_state = insp.attrs.some_attribute
"""
@@ -568,10 +557,40 @@ class AttributeState(object):
"""Return the current pre-flush change history for
this attribute, via the :class:`.History` interface.
+ This method will **not** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :meth:`.AttributeState.load_history` - retrieve history
+ using loader callables if the value is not locally present.
+
+ :func:`.attributes.get_history` - underlying function
+
"""
return self.state.get_history(self.key,
PASSIVE_NO_INITIALIZE)
+ def load_history(self):
+ """Return the current pre-flush change history for
+ this attribute, via the :class:`.History` interface.
+
+ This method **will** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :attr:`.AttributeState.history`
+
+ :func:`.attributes.get_history` - underlying function
+
+ .. versionadded:: 0.9.0
+
+ """
+ return self.state.get_history(self.key,
+ PASSIVE_OFF ^ INIT_OK)
+
+
class PendingCollection(object):
"""A writable placeholder for an unloaded collection.
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index aa46d06a8..033e3d064 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -1,5 +1,5 @@
# orm/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,14 +16,13 @@ from . import (
)
from .state import InstanceState
from .util import _none_set
+from . import properties
from .interfaces import (
- LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
- StrategizedProperty
+ LoaderStrategy, StrategizedProperty
)
from .session import _state_session
import itertools
-
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
typecallable=None,
@@ -45,10 +44,10 @@ def _register_attribute(strategy, mapper, useobject,
listen_hooks.append(single_parent_validator)
if prop.key in prop.parent.validators:
- fn, include_removes = prop.parent.validators[prop.key]
+ fn, opts = prop.parent.validators[prop.key]
listen_hooks.append(
lambda desc, prop: orm_util._validator_events(desc,
- prop.key, fn, include_removes)
+ prop.key, fn, **opts)
)
if useobject:
@@ -81,6 +80,7 @@ def _register_attribute(strategy, mapper, useobject,
callable_=callable_,
active_history=active_history,
impl_class=impl_class,
+ send_modified_events=not useobject or not prop.viewonly,
doc=prop.doc,
**kw
)
@@ -88,7 +88,7 @@ def _register_attribute(strategy, mapper, useobject,
for hook in listen_hooks:
hook(desc, prop)
-
+@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
@@ -100,17 +100,19 @@ class UninstrumentedColumnLoader(LoaderStrategy):
super(UninstrumentedColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
return None, None, None
+@log.class_logger
+@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
class ColumnLoader(LoaderStrategy):
"""Provide loading behavior for a :class:`.ColumnProperty`."""
@@ -119,7 +121,7 @@ class ColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
- def setup_query(self, context, entity, path,
+ def setup_query(self, context, entity, path, loadopt,
adapter, column_collection, **kwargs):
for c in self.columns:
if adapter:
@@ -131,7 +133,8 @@ class ColumnLoader(LoaderStrategy):
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.parent_property.active_history or \
- self.columns[0].primary_key
+ self.columns[0].primary_key or \
+ mapper.version_id_col in set(self.columns)
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -139,7 +142,7 @@ class ColumnLoader(LoaderStrategy):
)
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ loadopt, mapper, row, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
@@ -156,9 +159,9 @@ class ColumnLoader(LoaderStrategy):
return expire_for_non_present_col, None, None
-log.class_logger(ColumnLoader)
-
+@log.class_logger
+@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
class DeferredColumnLoader(LoaderStrategy):
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
@@ -170,16 +173,16 @@ class DeferredColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.group = self.parent_property.group
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
if col in row:
- return self.parent_property._get_strategy(ColumnLoader).\
+ return self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
- context, path, mapper, row, adapter)
+ context, path, loadopt, mapper, row, adapter)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
@@ -202,15 +205,15 @@ class DeferredColumnLoader(LoaderStrategy):
expire_missing=False
)
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
only_load_props=None, **kwargs):
if (
- self.group is not None and
- context.attributes.get(('undefer', self.group), False)
+ loadopt and self.group and
+ loadopt.local_opts.get('undefer_group', False) == self.group
) or (only_load_props and self.key in only_load_props):
- self.parent_property._get_strategy(ColumnLoader).\
+ self.parent_property._get_strategy_by_cls(ColumnLoader).\
setup_query(context, entity,
- path, adapter, **kwargs)
+ path, loadopt, adapter, **kwargs)
def _load_for_state(self, state, passive):
if not state.key:
@@ -251,8 +254,6 @@ class DeferredColumnLoader(LoaderStrategy):
return attributes.ATTR_WAS_SET
-log.class_logger(DeferredColumnLoader)
-
class LoadDeferredColumns(object):
"""serializable loader object used by DeferredColumnLoader"""
@@ -269,29 +270,6 @@ class LoadDeferredColumns(object):
return strategy._load_for_state(state, passive)
-class DeferredOption(StrategizedOption):
- propagate_to_loaders = True
-
- def __init__(self, key, defer=False):
- super(DeferredOption, self).__init__(key)
- self.defer = defer
-
- def get_strategy_class(self):
- if self.defer:
- return DeferredColumnLoader
- else:
- return ColumnLoader
-
-
-class UndeferGroupOption(MapperOption):
- propagate_to_loaders = True
-
- def __init__(self, group):
- self.group = group
-
- def process_query(self, query):
- query._attributes[("undefer", self.group)] = True
-
class AbstractRelationshipLoader(LoaderStrategy):
"""LoaderStratgies which deal with related objects."""
@@ -304,6 +282,9 @@ class AbstractRelationshipLoader(LoaderStrategy):
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="noload")
+@properties.RelationshipProperty.strategy_for(lazy=None)
class NoLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=None".
@@ -319,15 +300,16 @@ class NoLoader(AbstractRelationshipLoader):
typecallable=self.parent_property.collection_class,
)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
return invoke_no_load, None, None
-log.class_logger(NoLoader)
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy=True)
+@properties.RelationshipProperty.strategy_for(lazy="select")
class LazyLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=True", that is loads when first accessed.
@@ -350,7 +332,6 @@ class LazyLoader(AbstractRelationshipLoader):
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
- #from sqlalchemy.orm import query
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
self._lazywhere,
@@ -542,7 +523,8 @@ class LazyLoader(AbstractRelationshipLoader):
for pk in self.mapper.primary_key
]
- def _emit_lazyload(self, session, state, ident_key, passive):
+ @util.dependencies("sqlalchemy.orm.strategy_options")
+ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive):
q = session.query(self.mapper)._adapt_all_clauses()
q = q._with_invoke_all_eagers(False)
@@ -571,7 +553,7 @@ class LazyLoader(AbstractRelationshipLoader):
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, LazyLoader):
- q = q.options(EagerLazyOption((rev.key,), lazy='select'))
+ q = q.options(strategy_options.Load(rev.parent).lazyload(rev.key))
lazy_clause = self.lazy_clause(state, passive=passive)
@@ -598,7 +580,7 @@ class LazyLoader(AbstractRelationshipLoader):
else:
return None
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
key = self.key
if not self.is_class_level:
@@ -630,8 +612,6 @@ class LazyLoader(AbstractRelationshipLoader):
return reset_for_lazy_callable, None, None
-log.class_logger(LazyLoader)
-
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
@@ -648,18 +628,19 @@ class LoadLazyAttribute(object):
return strategy._load_for_state(state, passive)
+@properties.RelationshipProperty.strategy_for(lazy="immediate")
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter, column_collection=None,
+ path, loadopt, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
@@ -667,6 +648,8 @@ class ImmediateLoader(AbstractRelationshipLoader):
return None, None, load_immediate
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="subquery")
class SubqueryLoader(AbstractRelationshipLoader):
def __init__(self, parent):
super(SubqueryLoader, self).__init__(parent)
@@ -674,11 +657,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter,
+ path, loadopt, adapter,
column_collection=None,
parentmapper=None, **kwargs):
@@ -703,14 +686,14 @@ class SubqueryLoader(AbstractRelationshipLoader):
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
elif subq_path.contains_mapper(self.mapper):
return
- subq_mapper, leftmost_mapper, leftmost_attr = \
+ subq_mapper, leftmost_mapper, leftmost_attr, leftmost_relationship = \
self._get_leftmost(subq_path)
orig_query = context.attributes.get(
@@ -721,7 +704,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
# produce a subquery from it.
left_alias = self._generate_from_original_query(
orig_query, leftmost_mapper,
- leftmost_attr, entity.mapper
+ leftmost_attr, leftmost_relationship,
+ entity.mapper
)
# generate another Query that will join the
@@ -770,11 +754,12 @@ class SubqueryLoader(AbstractRelationshipLoader):
leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
]
- return subq_mapper, leftmost_mapper, leftmost_attr
+ return subq_mapper, leftmost_mapper, leftmost_attr, leftmost_prop
def _generate_from_original_query(self,
orig_query, leftmost_mapper,
- leftmost_attr, entity_mapper
+ leftmost_attr, leftmost_relationship,
+ entity_mapper
):
# reformat the original query
# to look only for significant columns
@@ -785,8 +770,22 @@ class SubqueryLoader(AbstractRelationshipLoader):
if not q._from_obj and entity_mapper.isa(leftmost_mapper):
q._set_select_from([entity_mapper], False)
+ target_cols = q._adapt_col_list(leftmost_attr)
+
# select from the identity columns of the outer
- q._set_entities(q._adapt_col_list(leftmost_attr))
+ q._set_entities(target_cols)
+
+ distinct_target_key = leftmost_relationship.distinct_target_key
+
+ if distinct_target_key is True:
+ q._distinct = True
+ elif distinct_target_key is None:
+ # if target_cols refer to a non-primary key or only
+ # part of a composite primary key, set the q as distinct
+ for t in set(c.table for c in target_cols):
+ if not set(target_cols).issuperset(t.primary_key):
+ q._distinct = True
+ break
if q._order_by is False:
q._order_by = leftmost_mapper.order_by
@@ -916,7 +915,36 @@ class SubqueryLoader(AbstractRelationshipLoader):
q = q.order_by(*eager_order_by)
return q
- def create_row_processor(self, context, path,
+ class _SubqCollections(object):
+ """Given a :class:`.Query` used to emit the "subquery load",
+ provide a load interface that executes the query at the
+ first moment a value is needed.
+
+ """
+ _data = None
+
+ def __init__(self, subq):
+ self.subq = subq
+
+ def get(self, key, default):
+ if self._data is None:
+ self._load()
+ return self._data.get(key, default)
+
+ def _load(self):
+ self._data = dict(
+ (k, [vv[0] for vv in v])
+ for k, v in itertools.groupby(
+ self.subq,
+ lambda x: x[1:]
+ )
+ )
+
+ def loader(self, state, dict_, row):
+ if self._data is None:
+ self._load()
+
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
@@ -938,12 +966,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# call upon create_row_processor again
collections = path.get(context.attributes, "collections")
if collections is None:
- collections = dict(
- (k, [v[0] for v in v])
- for k, v in itertools.groupby(
- subq,
- lambda x: x[1:]
- ))
+ collections = self._SubqCollections(subq)
path.set(context.attributes, 'collections', collections)
if adapter:
@@ -963,7 +986,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
- return load_collection_from_subq, None, None
+ return load_collection_from_subq, None, None, collections.loader
def _create_scalar_loader(self, collections, local_cols):
def load_scalar_from_subq(state, dict_, row):
@@ -981,12 +1004,13 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return load_scalar_from_subq, None, None
-
+ return load_scalar_from_subq, None, None, collections.loader
-log.class_logger(SubqueryLoader)
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="joined")
+@properties.RelationshipProperty.strategy_for(lazy=False)
class JoinedLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
using joined eager loading.
@@ -998,9 +1022,9 @@ class JoinedLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).init_class_attribute(mapper)
+ _get_strategy_by_cls(LazyLoader).init_class_attribute(mapper)
- def setup_query(self, context, entity, path, adapter, \
+ def setup_query(self, context, entity, path, loadopt, adapter, \
column_collection=None, parentmapper=None,
allow_innerjoin=True,
**kwargs):
@@ -1013,19 +1037,19 @@ class JoinedLoader(AbstractRelationshipLoader):
with_polymorphic = None
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
clauses, adapter, add_to_collection = \
- self._get_user_defined_adapter(
+ self._setup_query_on_user_defined_adapter(
context, entity, path, adapter,
user_defined_adapter
)
else:
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
@@ -1034,7 +1058,7 @@ class JoinedLoader(AbstractRelationshipLoader):
clauses, adapter, add_to_collection, \
allow_innerjoin = self._generate_row_adapter(
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
)
@@ -1069,24 +1093,74 @@ class JoinedLoader(AbstractRelationshipLoader):
"when using joined loading with with_polymorphic()."
)
- def _get_user_defined_adapter(self, context, entity,
+ def _init_user_defined_eager_proc(self, loadopt, context):
+
+ # check if the opt applies at all
+ if "eager_from_alias" not in loadopt.local_opts:
+ # nope
+ return False
+
+ path = loadopt.path.parent
+
+ # the option applies. check if the "user_defined_eager_row_processor"
+ # has been built up.
+ adapter = path.get(context.attributes,
+ "user_defined_eager_row_processor", False)
+ if adapter is not False:
+ # just return it
+ return adapter
+
+ # otherwise figure it out.
+ alias = loadopt.local_opts["eager_from_alias"]
+
+ root_mapper, prop = path[-2:]
+
+ #from .mapper import Mapper
+ #from .interfaces import MapperProperty
+ #assert isinstance(root_mapper, Mapper)
+ #assert isinstance(prop, MapperProperty)
+
+ if alias is not None:
+ if isinstance(alias, str):
+ alias = prop.target.alias(alias)
+ adapter = sql_util.ColumnAdapter(alias,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ if path.contains(context.attributes, "path_with_polymorphic"):
+ with_poly_info = path.get(context.attributes,
+ "path_with_polymorphic")
+ adapter = orm_util.ORMAdapter(
+ with_poly_info.entity,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ adapter = context.query._polymorphic_adapters.get(prop.mapper, None)
+ path.set(context.attributes,
+ "user_defined_eager_row_processor",
+ adapter)
+
+ return adapter
+
+ def _setup_query_on_user_defined_adapter(self, context, entity,
path, adapter, user_defined_adapter):
- adapter = entity._get_entity_clauses(context.query, context)
- if adapter and user_defined_adapter:
- user_defined_adapter = user_defined_adapter.wrap(adapter)
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
- elif adapter:
- user_defined_adapter = adapter
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
+ # apply some more wrapping to the "user defined adapter"
+ # if we are setting up the query for SQL render.
+ adapter = entity._get_entity_clauses(context.query, context)
+
+ if adapter and user_defined_adapter:
+ user_defined_adapter = user_defined_adapter.wrap(adapter)
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
+ elif adapter:
+ user_defined_adapter = adapter
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
- add_to_collection = context.primary_columns
- return user_defined_adapter, adapter, add_to_collection
+ add_to_collection = context.primary_columns
+ return user_defined_adapter, adapter, add_to_collection
def _generate_row_adapter(self,
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
):
with_poly_info = path.get(
@@ -1109,9 +1183,12 @@ class JoinedLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
- innerjoin = allow_innerjoin and path.get(context.attributes,
- "eager_join_type",
- self.parent_property.innerjoin)
+ innerjoin = allow_innerjoin and (
+ loadopt.local_opts.get(
+ 'innerjoin', self.parent_property.innerjoin)
+ if loadopt is not None
+ else self.parent_property.innerjoin
+ )
if not innerjoin:
# if this is an outer join, all eager joins from
# here must also be outer joins
@@ -1202,7 +1279,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
- for col in sql_util.find_columns(
+ for col in sql_util._find_columns(
self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
@@ -1218,10 +1295,10 @@ class JoinedLoader(AbstractRelationshipLoader):
)
)
- def _create_eager_adapter(self, context, row, adapter, path):
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ def _create_eager_adapter(self, context, row, adapter, path, loadopt):
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
decorator = user_defined_adapter
# user defined eagerloads are part of the "primary"
@@ -1244,7 +1321,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# processor, will cause a degrade to lazy
return False
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
@@ -1256,7 +1333,7 @@ class JoinedLoader(AbstractRelationshipLoader):
eager_adapter = self._create_eager_adapter(
context,
row,
- adapter, our_path)
+ adapter, our_path, loadopt)
if eager_adapter is not False:
key = self.key
@@ -1273,9 +1350,9 @@ class JoinedLoader(AbstractRelationshipLoader):
return self._create_collection_loader(context, key, _instance)
else:
return self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
create_row_processor(
- context, path,
+ context, path, loadopt,
mapper, row, adapter)
def _create_collection_loader(self, context, key, _instance):
@@ -1336,102 +1413,6 @@ class JoinedLoader(AbstractRelationshipLoader):
None, load_scalar_from_joined_exec
-log.class_logger(JoinedLoader)
-
-
-class EagerLazyOption(StrategizedOption):
- def __init__(self, key, lazy=True, chained=False,
- propagate_to_loaders=True
- ):
- if isinstance(key[0], str) and key[0] == '*':
- if len(key) != 1:
- raise sa_exc.ArgumentError(
- "Wildcard identifier '*' must "
- "be specified alone.")
- key = ("relationship:*",)
- propagate_to_loaders = False
- super(EagerLazyOption, self).__init__(key)
- self.lazy = lazy
- self.chained = chained
- self.propagate_to_loaders = propagate_to_loaders
- self.strategy_cls = factory(lazy)
-
- def get_strategy_class(self):
- return self.strategy_cls
-
-_factory = {
- False: JoinedLoader,
- "joined": JoinedLoader,
- None: NoLoader,
- "noload": NoLoader,
- "select": LazyLoader,
- True: LazyLoader,
- "subquery": SubqueryLoader,
- "immediate": ImmediateLoader
-}
-
-
-def factory(identifier):
- return _factory.get(identifier, LazyLoader)
-
-
-class EagerJoinOption(PropertyOption):
-
- def __init__(self, key, innerjoin, chained=False):
- super(EagerJoinOption, self).__init__(key)
- self.innerjoin = innerjoin
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths:
- path.set(query._attributes, "eager_join_type", self.innerjoin)
- else:
- paths[-1].set(query._attributes, "eager_join_type", self.innerjoin)
-
-
-class LoadEagerFromAliasOption(PropertyOption):
-
- def __init__(self, key, alias=None, chained=False):
- super(LoadEagerFromAliasOption, self).__init__(key)
- if alias is not None:
- if not isinstance(alias, str):
- info = inspect(alias)
- alias = info.selectable
- self.alias = alias
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths[0:-1]:
- (root_mapper, prop) = path.path[-2:]
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- path.setdefault(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
- root_mapper, prop = paths[-1].path[-2:]
- if self.alias is not None:
- if isinstance(self.alias, str):
- self.alias = prop.target.alias(self.alias)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
- equivalents=prop.mapper._equivalent_columns)
- )
- else:
- if paths[-1].contains(query._attributes, "path_with_polymorphic"):
- with_poly_info = paths[-1].get(query._attributes,
- "path_with_polymorphic")
- adapter = orm_util.ORMAdapter(
- with_poly_info.entity,
- equivalents=prop.mapper._equivalent_columns)
- else:
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
def single_parent_validator(desc, prop):
def _do_check(state, value, oldvalue, initiator):
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
new file mode 100644
index 000000000..6e838ccb7
--- /dev/null
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -0,0 +1,924 @@
+# orm/strategy_options.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+
+"""
+
+from .interfaces import MapperOption, PropComparator
+from .. import util
+from ..sql.base import _generative, Generative
+from .. import exc as sa_exc, inspect
+from .base import _is_aliased_class, _class_to_mapper
+from . import util as orm_util
+from .path_registry import PathRegistry, TokenRegistry, \
+ _WILDCARD_TOKEN, _DEFAULT_TOKEN
+
+class Load(Generative, MapperOption):
+ """Represents loader options which modify the state of a
+ :class:`.Query` in order to affect how various mapped attributes are loaded.
+
+ .. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
+ the existing system of loader options, including options such as
+ :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular,
+ it introduces a new method-chained system that replaces the need for
+ dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`.
+
+ A :class:`.Load` object can be used directly or indirectly. To use one
+ directly, instantiate given the parent class. This style of usage is
+ useful when dealing with a :class:`.Query` that has multiple entities,
+ or when producing a loader option that can be applied generically to
+ any style of query::
+
+ myopt = Load(MyClass).joinedload("widgets")
+
+ The above ``myopt`` can now be used with :meth:`.Query.options`::
+
+ session.query(MyClass).options(myopt)
+
+ The :class:`.Load` construct is invoked indirectly whenever one makes use
+ of the various loader options that are present in ``sqlalchemy.orm``, including
+ options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`,
+ and all the rest. These constructs produce an "anonymous" form of the
+ :class:`.Load` object which tracks attributes and options, but is not linked
+ to a parent class until it is associated with a parent :class:`.Query`::
+
+ # produce "unbound" Load object
+ myopt = joinedload("widgets")
+
+ # when applied using options(), the option is "bound" to the
+ # class observed in the given query, e.g. MyClass
+ session.query(MyClass).options(myopt)
+
+ Whether the direct or indirect style is used, the :class:`.Load` object
+ returned now represents a specific "path" along the entities of a :class:`.Query`.
+ This path can be traversed using a standard method-chaining approach.
+ Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``,
+ ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety
+ of loader options along each element in the "path"::
+
+ session.query(User).options(
+ joinedload("addresses"),
+ subqueryload("orders").joinedload("items")
+ )
+
+ Where above, the ``addresses`` collection will be joined-loaded, the
+ ``orders`` collection will be subquery-loaded, and within that subquery load
+ the ``items`` collection will be joined-loaded.
+
+
+ """
+ def __init__(self, entity):
+ insp = inspect(entity)
+ self.path = insp._path_registry
+ self.context = {}
+ self.local_opts = {}
+
+ def _generate(self):
+ cloned = super(Load, self)._generate()
+ cloned.local_opts = {}
+ return cloned
+
+ strategy = None
+ propagate_to_loaders = False
+
+ def process_query(self, query):
+ self._process(query, True)
+
+ def process_query_conditionally(self, query):
+ self._process(query, False)
+
+ def _process(self, query, raiseerr):
+ current_path = query._current_path
+ if current_path:
+ for (token, start_path), loader in self.context.items():
+ chopped_start_path = self._chop_path(start_path, current_path)
+ if chopped_start_path is not None:
+ query._attributes[(token, chopped_start_path)] = loader
+ else:
+ query._attributes.update(self.context)
+
+ def _generate_path(self, path, attr, wildcard_key, raiseerr=True):
+ if raiseerr and not path.has_entity:
+ if isinstance(path, TokenRegistry):
+ raise sa_exc.ArgumentError(
+ "Wildcard token cannot be followed by another entity")
+ else:
+ raise sa_exc.ArgumentError(
+ "Attribute '%s' of entity '%s' does not "
+ "refer to a mapped entity" %
+ (path.prop.key, path.parent.entity)
+ )
+
+ if isinstance(attr, util.string_types):
+ default_token = attr.endswith(_DEFAULT_TOKEN)
+ if attr.endswith(_WILDCARD_TOKEN) or default_token:
+ if default_token:
+ self.propagate_to_loaders = False
+ if wildcard_key:
+ attr = "%s:%s" % (wildcard_key, attr)
+ return path.token(attr)
+
+ try:
+ # use getattr on the class to work around
+ # synonyms, hybrids, etc.
+ attr = getattr(path.entity.class_, attr)
+ except AttributeError:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Can't find property named '%s' on the "
+ "mapped entity %s in this Query. " % (
+ attr, path.entity)
+ )
+ else:
+ return None
+ else:
+ attr = attr.property
+
+ path = path[attr]
+ else:
+ prop = attr.property
+
+ if not prop.parent.common_parent(path.mapper):
+ if raiseerr:
+ raise sa_exc.ArgumentError("Attribute '%s' does not "
+ "link from element '%s'" % (attr, path.entity))
+ else:
+ return None
+
+ if getattr(attr, '_of_type', None):
+ ac = attr._of_type
+ ext_info = inspect(ac)
+
+ path_element = ext_info.mapper
+ if not ext_info.is_aliased_class:
+ ac = orm_util.with_polymorphic(
+ ext_info.mapper.base_mapper,
+ ext_info.mapper, aliased=True,
+ _use_mapper_path=True)
+ path.entity_path[prop].set(self.context,
+ "path_with_polymorphic", inspect(ac))
+ path = path[prop][path_element]
+ else:
+ path = path[prop]
+
+ if path.has_entity:
+ path = path.entity_path
+ return path
+
+ def _coerce_strat(self, strategy):
+ if strategy is not None:
+ strategy = tuple(sorted(strategy.items()))
+ return strategy
+
+ @_generative
+ def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True):
+ strategy = self._coerce_strat(strategy)
+
+ self.propagate_to_loaders = propagate_to_loaders
+ # if the path is a wildcard, this will set propagate_to_loaders=False
+ self.path = self._generate_path(self.path, attr, "relationship")
+ self.strategy = strategy
+ if strategy is not None:
+ self._set_path_strategy()
+
+ @_generative
+ def set_column_strategy(self, attrs, strategy, opts=None):
+ strategy = self._coerce_strat(strategy)
+
+ for attr in attrs:
+ path = self._generate_path(self.path, attr, "column")
+ cloned = self._generate()
+ cloned.strategy = strategy
+ cloned.path = path
+ cloned.propagate_to_loaders = True
+ if opts:
+ cloned.local_opts.update(opts)
+ cloned._set_path_strategy()
+
+ def _set_path_strategy(self):
+ if self.path.has_entity:
+ self.path.parent.set(self.context, "loader", self)
+ else:
+ self.path.set(self.context, "loader", self)
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d["path"] = self.path.serialize()
+ return d
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.path = PathRegistry.deserialize(self.path)
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+
+ for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
+ if isinstance(c_token, util.string_types):
+ # TODO: this is approximated from the _UnboundLoad
+ # version and probably has issues, not fully covered.
+
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key:
+ return None
+
+ if c_token is p_token:
+ continue
+ else:
+ return None
+ return to_chop[i+1:]
+
+
+class _UnboundLoad(Load):
+ """Represent a loader option that isn't tied to a root entity.
+
+ The loader option will produce an entity-linked :class:`.Load`
+ object when it is passed :meth:`.Query.options`.
+
+ This provides compatibility with the traditional system
+ of freestanding options, e.g. ``joinedload('x.y.z')``.
+
+ """
+ def __init__(self):
+ self.path = ()
+ self._to_bind = set()
+ self.local_opts = {}
+
+ _is_chain_link = False
+
+ def _set_path_strategy(self):
+ self._to_bind.add(self)
+
+ def _generate_path(self, path, attr, wildcard_key):
+ if wildcard_key and isinstance(attr, util.string_types) and \
+ attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN):
+ if attr == _DEFAULT_TOKEN:
+ self.propagate_to_loaders = False
+ attr = "%s:%s" % (wildcard_key, attr)
+
+ return path + (attr, )
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d['path'] = ret = []
+ for token in util.to_list(self.path):
+ if isinstance(token, PropComparator):
+ ret.append((token._parentmapper.class_, token.key))
+ else:
+ ret.append(token)
+ return d
+
+ def __setstate__(self, state):
+ ret = []
+ for key in state['path']:
+ if isinstance(key, tuple):
+ cls, propkey = key
+ ret.append(getattr(cls, propkey))
+ else:
+ ret.append(key)
+ state['path'] = tuple(ret)
+ self.__dict__ = state
+
+ def _process(self, query, raiseerr):
+ for val in self._to_bind:
+ val._bind_loader(query, query._attributes, raiseerr)
+
+ @classmethod
+ def _from_keys(self, meth, keys, chained, kw):
+ opt = _UnboundLoad()
+
+ def _split_key(key):
+ if isinstance(key, util.string_types):
+ # coerce fooload('*') into "default loader strategy"
+ if key == _WILDCARD_TOKEN:
+ return (_DEFAULT_TOKEN, )
+ # coerce fooload(".*") into "wildcard on default entity"
+ elif key.startswith("." + _WILDCARD_TOKEN):
+ key = key[1:]
+ return key.split(".")
+ else:
+ return (key,)
+ all_tokens = [token for key in keys for token in _split_key(key)]
+
+ for token in all_tokens[0:-1]:
+ if chained:
+ opt = meth(opt, token, **kw)
+ else:
+ opt = opt.defaultload(token)
+ opt._is_chain_link = True
+
+ opt = meth(opt, all_tokens[-1], **kw)
+ opt._is_chain_link = False
+
+ return opt
+
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+ for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())):
+ if isinstance(c_token, util.string_types):
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key:
+ return None
+ elif isinstance(c_token, PropComparator):
+ if c_token.property is not p_prop:
+ return None
+ else:
+ i += 1
+
+ return to_chop[i:]
+
+
+ def _bind_loader(self, query, context, raiseerr):
+ start_path = self.path
+ # _current_path implies we're in a
+ # secondary load with an existing path
+
+ current_path = query._current_path
+ if current_path:
+ start_path = self._chop_path(start_path, current_path)
+
+ if not start_path:
+ return None
+
+ token = start_path[0]
+ if isinstance(token, util.string_types):
+ entity = self._find_entity_basestring(query, token, raiseerr)
+ elif isinstance(token, PropComparator):
+ prop = token.property
+ entity = self._find_entity_prop_comparator(
+ query,
+ prop.key,
+ token._parententity,
+ raiseerr)
+
+ else:
+ raise sa_exc.ArgumentError(
+ "mapper option expects "
+ "string key or list of attributes")
+
+ if not entity:
+ return
+
+ path_element = entity.entity_zero
+
+ # transfer our entity-less state into a Load() object
+ # with a real entity path.
+ loader = Load(path_element)
+ loader.context = context
+ loader.strategy = self.strategy
+
+ path = loader.path
+ for token in start_path:
+ loader.path = path = loader._generate_path(
+ loader.path, token, None, raiseerr)
+ if path is None:
+ return
+
+ loader.local_opts.update(self.local_opts)
+
+ if loader.path.has_entity:
+ effective_path = loader.path.parent
+ else:
+ effective_path = loader.path
+
+ # prioritize "first class" options over those
+ # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z")
+ # versus someload("x") / someload("x.y")
+ if self._is_chain_link:
+ effective_path.setdefault(context, "loader", loader)
+ else:
+ effective_path.set(context, "loader", loader)
+
+ def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
+ if _is_aliased_class(mapper):
+ searchfor = mapper
+ else:
+ searchfor = _class_to_mapper(mapper)
+ for ent in query._mapper_entities:
+ if ent.corresponds_to(searchfor):
+ return ent
+ else:
+ if raiseerr:
+ if not list(query._mapper_entities):
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ raise sa_exc.ArgumentError(
+ "Can't find property '%s' on any entity "
+ "specified in this Query. Note the full path "
+ "from root (%s) to target entity must be specified."
+ % (token, ",".join(str(x) for
+ x in query._mapper_entities))
+ )
+ else:
+ return None
+
+ def _find_entity_basestring(self, query, token, raiseerr):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ if len(list(query._mapper_entities)) != 1:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Wildcard loader can only be used with exactly "
+ "one entity. Use Load(ent) to specify "
+ "specific entities.")
+
+ for ent in query._mapper_entities:
+ # return only the first _MapperEntity when searching
+ # based on string prop name. Ideally object
+ # attributes are used to specify more exactly.
+ return ent
+ else:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ return None
+
+
+
+class loader_option(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, fn):
+ self.name = name = fn.__name__
+ self.fn = fn
+ if hasattr(Load, name):
+ raise TypeError("Load class already has a %s method." % (name))
+ setattr(Load, name, fn)
+
+ return self
+
+ def _add_unbound_fn(self, fn):
+ self._unbound_fn = fn
+ fn_doc = self.fn.__doc__
+ self.fn.__doc__ = """Produce a new :class:`.Load` object with the
+:func:`.orm.%(name)s` option applied.
+
+See :func:`.orm.%(name)s` for usage examples.
+
+""" % {"name": self.name}
+
+ fn.__doc__ = fn_doc
+ return self
+
+ def _add_unbound_all_fn(self, fn):
+ self._unbound_all_fn = fn
+ fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
+
+.. deprecated:: 0.9.0
+
+ The "_all()" style is replaced by method chaining, e.g.::
+
+ session.query(MyClass).options(
+ %(name)s("someattribute").%(name)s("anotherattribute")
+ )
+
+""" % {"name": self.name}
+ return self
+
+@loader_option()
+def contains_eager(loadopt, attr, alias=None):
+ """Indicate that the given attribute should be eagerly loaded from
+ columns stated manually in the query.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ The option is used in conjunction with an explicit join that loads
+ the desired rows, i.e.::
+
+ sess.query(Order).\\
+ join(Order.user).\\
+ options(contains_eager(Order.user))
+
+ The above query would join from the ``Order`` entity to its related
+ ``User`` entity, and the returned ``Order`` objects would have the
+ ``Order.user`` attribute pre-populated.
+
+ :func:`contains_eager` also accepts an `alias` argument, which is the
+ string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
+ construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
+ the eagerly-loaded rows are to come from an aliased table::
+
+ user_alias = aliased(User)
+ sess.query(Order).\\
+ join((user_alias, Order.user)).\\
+ options(contains_eager(Order.user, alias=user_alias))
+
+ .. seealso::
+
+ :ref:`contains_eager`
+
+ """
+ if alias is not None:
+ if not isinstance(alias, str):
+ info = inspect(alias)
+ alias = info.selectable
+
+ cloned = loadopt.set_relationship_strategy(
+ attr,
+ {"lazy": "joined"},
+ propagate_to_loaders=False
+ )
+ cloned.local_opts['eager_from_alias'] = alias
+ return cloned
+
+@contains_eager._add_unbound_fn
+def contains_eager(*keys, **kw):
+ return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw)
+
+@loader_option()
+def load_only(loadopt, *attrs):
+ """Indicate that for a particular entity, only the given list
+ of column-based attribute names should be loaded; all others will be
+ deferred.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Example - given a class ``User``, load only the ``name`` and ``fullname``
+ attributes::
+
+ session.query(User).options(load_only("name", "fullname"))
+
+ Example - given a relationship ``User.addresses -> Address``, specify
+ subquery loading for the ``User.addresses`` collection, but on each ``Address``
+ object load only the ``email_address`` attribute::
+
+ session.query(User).options(
+ subqueryload("addreses").load_only("email_address")
+ )
+
+ For a :class:`.Query` that has multiple entities, the lead entity can be
+ specifically referred to using the :class:`.Load` constructor::
+
+ session.query(User, Address).join(User.addresses).options(
+ Load(User).load_only("name", "fullname"),
+ Load(Address).load_only("email_addres")
+ )
+
+
+ .. versionadded:: 0.9.0
+
+ """
+ cloned = loadopt.set_column_strategy(
+ attrs,
+ {"deferred": False, "instrument": True}
+ )
+ cloned.set_column_strategy("*",
+ {"deferred": True, "instrument": True})
+ return cloned
+
+@load_only._add_unbound_fn
+def load_only(*attrs):
+ return _UnboundLoad().load_only(*attrs)
+
+@loader_option()
+def joinedload(loadopt, attr, innerjoin=None):
+ """Indicate that the given attribute should be loaded using joined
+ eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # joined-load the "orders" collection on "User"
+ query(User).options(joinedload(User.orders))
+
+ # joined-load Order.items and then Item.keywords
+ query(Order).options(joinedload(Order.items).joinedload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # joined-load the keywords collection
+ query(Order).options(lazyload(Order.items).joinedload(Item.keywords))
+
+ :func:`.orm.joinedload` also accepts a keyword argument `innerjoin=True` which
+ indicates using an inner join instead of an outer::
+
+ query(Order).options(joinedload(Order.user, innerjoin=True))
+
+ .. note::
+
+ The joins produced by :func:`.orm.joinedload` are **anonymously aliased**.
+ The criteria by which the join proceeds cannot be modified, nor can the
+ :class:`.Query` refer to these joins in any way, including ordering.
+
+ To produce a specific SQL JOIN which is explicitly available, use
+ :meth:`.Query.join`. To combine explicit JOINs with eager loading
+ of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :ref:`contains_eager`
+
+ :func:`.orm.subqueryload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
+ if innerjoin is not None:
+ loader.local_opts['innerjoin'] = innerjoin
+ return loader
+
+@joinedload._add_unbound_fn
+def joinedload(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, False, kw)
+
+@joinedload._add_unbound_all_fn
+def joinedload_all(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, True, kw)
+
+
+@loader_option()
+def subqueryload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ subquery eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # subquery-load the "orders" collection on "User"
+ query(User).options(subqueryload(User.orders))
+
+ # subquery-load Order.items and then Item.keywords
+ query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # subquery-load the keywords collection
+ query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
+
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
+
+@subqueryload._add_unbound_fn
+def subqueryload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
+
+@subqueryload._add_unbound_all_fn
+def subqueryload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
+
+@loader_option()
+def lazyload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using "lazy"
+ loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
+
+@lazyload._add_unbound_fn
+def lazyload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
+
+@lazyload._add_unbound_all_fn
+def lazyload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
+
+@loader_option()
+def immediateload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ an immediate load with a per-attribute SELECT statement.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
+ return loader
+
+@immediateload._add_unbound_fn
+def immediateload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {})
+
+
+@loader_option()
+def noload(loadopt, attr):
+ """Indicate that the given relationship attribute should remain unloaded.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ :func:`.orm.noload` applies to :func:`.relationship` attributes; for
+ column-based attributes, see :func:`.orm.defer`.
+
+ """
+
+ return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
+
+@noload._add_unbound_fn
+def noload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
+
+@loader_option()
+def defaultload(loadopt, attr):
+ """Indicate an attribute should load using its default loader style.
+
+ This method is used to link to other loader options, such as
+ to set the :func:`.orm.defer` option on a class that is linked to
+ a relationship of the parent class being loaded, :func:`.orm.defaultload`
+ can be used to navigate this path without changing the loading style
+ of the relationship::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ .. seealso::
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_relationship_strategy(
+ attr,
+ None
+ )
+
+@defaultload._add_unbound_fn
+def defaultload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
+
+@loader_option()
+def defer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be deferred, e.g.
+ not loaded until accessed.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ e.g.::
+
+ from sqlalchemy.orm import defer
+
+ session.query(MyClass).options(
+ defer("attribute_one"),
+ defer("attribute_two"))
+
+ session.query(MyClass).options(
+ defer(MyClass.attribute_one),
+ defer(MyClass.attribute_two))
+
+ To specify a deferred load of an attribute on a related class,
+ the path can be specified one token at a time, specifying the loading
+ style for each link along the chain. To leave the loading style
+ for a link unchanged, use :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ A :class:`.Load` object that is present on a certain path can have
+ :meth:`.Load.defer` called multiple times, each will operate on the same
+ parent entity::
+
+
+ session.query(MyClass).options(
+ defaultload("someattr").
+ defer("some_column").
+ defer("some_other_column").
+ defer("another_column")
+ )
+
+ :param key: Attribute to be deferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": True, "instrument": True}
+ )
+
+
+@defer._add_unbound_fn
+def defer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be undeferred, e.g.
+ specified within the SELECT statement of the entity as a whole.
+
+ The column being undeferred is typically set up on the mapping as a
+ :func:`.deferred` attribute.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Examples::
+
+ # undefer two columns
+ session.query(MyClass).options(undefer("col1"), undefer("col2"))
+
+ # undefer all columns specific to a single class using Load + *
+ session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+
+ :param key: Attribute to be undeferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer_group`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": False, "instrument": True}
+ )
+
+@undefer._add_unbound_fn
+def undefer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer_group(loadopt, name):
+ """Indicate that columns within the given deferred group name should be undeferred.
+
+ The columns being undeferred are set up on the mapping as
+ :func:`.deferred` attributes and include a "group" name.
+
+ E.g::
+
+ session.query(MyClass).options(undefer_group("large_attrs"))
+
+ To undefer a group of attributes on a related entity, the path can be
+ spelled out using relationship loader options, such as :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs"))
+
+ .. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
+ particiular entity load path.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ "*",
+ None,
+ {"undefer_group": name}
+ )
+
+@undefer_group._add_unbound_fn
+def undefer_group(name):
+ return _UnboundLoad().undefer_group(name)
+
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index 6524ab27a..cf735fc53 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -1,5 +1,5 @@
# orm/sync.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 1e8d3e4dc..8c0c0d40e 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -1,5 +1,5 @@
# orm/unitofwork.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,8 +16,6 @@ from .. import util, event, exc
from ..util import topological
from . import attributes, persistence, util as orm_util, exc as orm_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
@@ -33,7 +31,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("collection append")
@@ -50,7 +48,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
prop = state.manager.mapper._props[key]
@@ -74,7 +72,7 @@ def track_cascade_events(descriptor, prop):
if oldvalue is newvalue:
return newvalue
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index ae1ca2013..dd85f2ef1 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -1,5 +1,5 @@
# orm/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,21 +7,20 @@
from .. import sql, util, event, exc as sa_exc, inspection
from ..sql import expression, util as sql_util, operators
-from .interfaces import PropComparator, MapperProperty, _InspectionAttr
-from itertools import chain
-from . import attributes, exc
+from .interfaces import PropComparator, MapperProperty
+from . import attributes
import re
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
+from .base import instance_str, state_str, state_class_str, attribute_str, \
+ state_attribute_str, object_mapper, object_state, _none_set
+from .base import class_mapper, _class_to_mapper
+from .base import _InspectionAttr
+from .path_registry import PathRegistry
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
"expunge", "save-update", "refresh-expire",
"none"))
-_INSTRUMENTOR = ('mapper', 'instrumentor')
-
-_none_set = frozenset([None])
-
class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
@@ -71,24 +70,43 @@ class CascadeOptions(frozenset):
)
-def _validator_events(desc, key, validator, include_removes):
+def _validator_events(desc, key, validator, include_removes, include_backrefs):
"""Runs a validation method on an attribute value to be set or appended."""
+ if not include_backrefs:
+ def detect_is_backref(state, initiator):
+ impl = state.manager[key].impl
+ return initiator.impl is not impl
+
if include_removes:
def append(state, value, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def remove(state, value, initiator):
- validator(state.obj(), key, value, True)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ validator(state.obj(), key, value, True)
+
else:
def append(state, value, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
event.listen(desc, 'append', append, raw=True, retval=True)
event.listen(desc, 'set', set_, raw=True, retval=True)
@@ -160,31 +178,59 @@ def polymorphic_union(table_map, typecolname,
def identity_key(*args, **kwargs):
- """Get an identity key.
+ """Generate "identity key" tuples, as are used as keys in the
+ :attr:`.Session.identity_map` dictionary.
- Valid call signatures:
+ This function has several call styles:
* ``identity_key(class, ident)``
- class
- mapped class (must be a positional argument)
+ This form receives a mapped class and a primary key scalar or
+ tuple as an argument.
+
+ E.g.::
+
+ >>> identity_key(MyClass, (1, 2))
+ (<class '__main__.MyClass'>, (1, 2))
- ident
- primary key, if the key is composite this is a tuple
+ :param class: mapped class (must be a positional argument)
+ :param ident: primary key, may be a scalar or tuple argument.
* ``identity_key(instance=instance)``
- instance
- object instance (must be given as a keyword arg)
+ This form will produce the identity key for a given instance. The
+ instance need not be persistent, only that its primary key attributes
+ are populated (else the key will contain ``None`` for those missing
+ values).
+
+ E.g.::
+
+ >>> instance = MyClass(1, 2)
+ >>> identity_key(instance=instance)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ In this form, the given instance is ultimately run though
+ :meth:`.Mapper.identity_key_from_instance`, which will have the
+ effect of performing a database check for the corresponding row
+ if the object is expired.
+
+ :param instance: object instance (must be given as a keyword arg)
* ``identity_key(class, row=row)``
- class
- mapped class (must be a positional argument)
+ This form is similar to the class/tuple form, except is passed a
+ database result row as a :class:`.RowProxy` object.
+
+ E.g.::
- row
- result proxy row (must be given as a keyword arg)
+ >>> row = engine.execute("select * from table where a=1 and b=2").first()
+ >>> identity_key(MyClass, row=row)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ :param class: mapped class (must be a positional argument)
+ :param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy`
+ (must be given as a keyword arg)
"""
if args:
@@ -245,212 +291,6 @@ class ORMAdapter(sql_util.ColumnAdapter):
else:
return None
-def _unreduce_path(path):
- return PathRegistry.deserialize(path)
-
-class PathRegistry(object):
- """Represent query load paths and registry functions.
-
- Basically represents structures like:
-
- (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
-
- These structures are generated by things like
- query options (joinedload(), subqueryload(), etc.) and are
- used to compose keys stored in the query._attributes dictionary
- for various options.
-
- They are then re-composed at query compile/result row time as
- the query is formed and as rows are fetched, where they again
- serve to compose keys to look up options in the context.attributes
- dictionary, which is copied from query._attributes.
-
- The path structure has a limited amount of caching, where each
- "root" ultimately pulls from a fixed registry associated with
- the first mapper, that also contains elements for each of its
- property keys. However paths longer than two elements, which
- are the exception rather than the rule, are generated on an
- as-needed basis.
-
- """
-
- def __eq__(self, other):
- return other is not None and \
- self.path == other.path
-
- def set(self, attributes, key, value):
- attributes[(key, self.path)] = value
-
- def setdefault(self, attributes, key, value):
- attributes.setdefault((key, self.path), value)
-
- def get(self, attributes, key, value=None):
- key = (key, self.path)
- if key in attributes:
- return attributes[key]
- else:
- return value
-
- def __len__(self):
- return len(self.path)
-
- @property
- def length(self):
- return len(self.path)
-
- def pairs(self):
- path = self.path
- for i in range(0, len(path), 2):
- yield path[i], path[i + 1]
-
- def contains_mapper(self, mapper):
- for path_mapper in [
- self.path[i] for i in range(0, len(self.path), 2)
- ]:
- if isinstance(path_mapper, mapperlib.Mapper) and \
- path_mapper.isa(mapper):
- return True
- else:
- return False
-
- def contains(self, attributes, key):
- return (key, self.path) in attributes
-
- def __reduce__(self):
- return _unreduce_path, (self.serialize(), )
-
- def serialize(self):
- path = self.path
- return list(zip(
- [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
- [path[i].key for i in range(1, len(path), 2)] + [None]
- ))
-
- @classmethod
- def deserialize(cls, path):
- if path is None:
- return None
-
- p = tuple(chain(*[(class_mapper(mcls),
- class_mapper(mcls).attrs[key]
- if key is not None else None)
- for mcls, key in path]))
- if p and p[-1] is None:
- p = p[0:-1]
- return cls.coerce(p)
-
- @classmethod
- def per_mapper(cls, mapper):
- return EntityRegistry(
- cls.root, mapper
- )
-
- @classmethod
- def coerce(cls, raw):
- return util.reduce(lambda prev, next: prev[next], raw, cls.root)
-
- @classmethod
- def token(cls, token):
- return TokenRegistry(cls.root, token)
-
- def __add__(self, other):
- return util.reduce(
- lambda prev, next: prev[next],
- other.path, self)
-
- def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.path, )
-
-
-class RootRegistry(PathRegistry):
- """Root registry, defers to mappers so that
- paths are maintained per-root-mapper.
-
- """
- path = ()
-
- def __getitem__(self, entity):
- return entity._path_registry
-PathRegistry.root = RootRegistry()
-
-class TokenRegistry(PathRegistry):
- def __init__(self, parent, token):
- self.token = token
- self.parent = parent
- self.path = parent.path + (token,)
-
- def __getitem__(self, entity):
- raise NotImplementedError()
-
-class PropRegistry(PathRegistry):
- def __init__(self, parent, prop):
- # restate this path in terms of the
- # given MapperProperty's parent.
- insp = inspection.inspect(parent[-1])
- if not insp.is_aliased_class or insp._use_mapper_path:
- parent = parent.parent[prop.parent]
- elif insp.is_aliased_class and insp.with_polymorphic_mappers:
- if prop.parent is not insp.mapper and \
- prop.parent in insp.with_polymorphic_mappers:
- subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
- parent = parent.parent[subclass_entity]
-
- self.prop = prop
- self.parent = parent
- self.path = parent.path + (prop,)
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return EntityRegistry(
- self, entity
- )
-
-
-class EntityRegistry(PathRegistry, dict):
- is_aliased_class = False
-
- def __init__(self, parent, entity):
- self.key = entity
- self.parent = parent
- self.is_aliased_class = entity.is_aliased_class
-
- self.path = parent.path + (entity,)
-
- def __bool__(self):
- return True
- __nonzero__ = __bool__
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return dict.__getitem__(self, entity)
-
- def _inlined_get_for(self, prop, context, key):
- """an inlined version of:
-
- cls = path[mapperproperty].get(context, key)
-
- Skips the isinstance() check in __getitem__
- and the extra method call for get().
- Used by StrategizedProperty for its
- very frequent lookup.
-
- """
- path = dict.__getitem__(self, prop)
- path_key = (key, path.path)
- if path_key in context.attributes:
- return context.attributes[path_key]
- else:
- return None
-
- def __missing__(self, key):
- self[key] = item = PropRegistry(self, key)
- return item
-
-
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
@@ -538,8 +378,10 @@ class AliasedClass(object):
else:
raise AttributeError(key)
- if isinstance(attr, attributes.QueryableAttribute):
- return _aliased_insp._adapt_prop(attr, key)
+ if isinstance(attr, PropComparator):
+ ret = attr.adapt_to_entity(_aliased_insp)
+ setattr(self, key, ret)
+ return ret
elif hasattr(attr, 'func_code'):
is_method = getattr(_aliased_insp._target, key, None)
if is_method and is_method.__self__ is not None:
@@ -550,7 +392,8 @@ class AliasedClass(object):
ret = attr.__get__(None, self)
if isinstance(ret, PropComparator):
return ret.adapt_to_entity(_aliased_insp)
- return ret
+ else:
+ return ret
else:
return attr
@@ -672,17 +515,6 @@ class AliasedInsp(_InspectionAttr):
'parentmapper': self.mapper}
)
- def _adapt_prop(self, existing, key):
- comparator = existing.comparator.adapt_to_entity(self)
- queryattr = attributes.QueryableAttribute(
- self.entity, key,
- impl=existing.impl,
- parententity=self,
- comparator=comparator)
- setattr(self.entity, key, queryattr)
- return queryattr
-
-
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
if mapper in self_poly:
@@ -1053,186 +885,6 @@ def with_parent(instance, prop):
value_is_parent=True)
-def _attr_as_key(attr):
- if hasattr(attr, 'key'):
- return attr.key
- else:
- return expression._column_as_key(attr)
-
-
-_state_mapper = util.dottedgetter('manager.mapper')
-
-
-@inspection._inspects(object)
-def _inspect_mapped_object(instance):
- try:
- return attributes.instance_state(instance)
- # TODO: whats the py-2/3 syntax to catch two
- # different kinds of exceptions at once ?
- except exc.UnmappedClassError:
- return None
- except exc.NO_STATE:
- return None
-
-
-@inspection._inspects(type)
-def _inspect_mapped_class(class_, configure=False):
- try:
- class_manager = attributes.manager_of_class(class_)
- if not class_manager.is_mapped:
- return None
- mapper = class_manager.mapper
- if configure and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper
-
- except exc.NO_STATE:
- return None
-
-
-def object_mapper(instance):
- """Given an object, return the primary Mapper associated with the object
- instance.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- This function is available via the inspection system as::
-
- inspect(instance).mapper
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- return object_state(instance).mapper
-
-
-def object_state(instance):
- """Given an object, return the :class:`.InstanceState`
- associated with the object.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(instance)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- state = _inspect_mapped_object(instance)
- if state is None:
- raise exc.UnmappedInstanceError(instance)
- else:
- return state
-
-
-def class_mapper(class_, configure=True):
- """Given a class, return the primary :class:`.Mapper` associated
- with the key.
-
- Raises :class:`.UnmappedClassError` if no mapping is configured
- on the given class, or :class:`.ArgumentError` if a non-class
- object is passed.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(some_mapped_class)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
-
- """
- mapper = _inspect_mapped_class(class_, configure=configure)
- if mapper is None:
- if not isinstance(class_, type):
- raise sa_exc.ArgumentError(
- "Class object expected, got '%r'." % class_)
- raise exc.UnmappedClassError(class_)
- else:
- return mapper
-
-
-def _class_to_mapper(class_or_mapper):
- insp = inspection.inspect(class_or_mapper, False)
- if insp is not None:
- return insp.mapper
- else:
- raise exc.UnmappedClassError(class_or_mapper)
-
-
-def _mapper_or_none(entity):
- """Return the :class:`.Mapper` for the given class or None if the
- class is not mapped."""
-
- insp = inspection.inspect(entity, False)
- if insp is not None:
- return insp.mapper
- else:
- return None
-
-
-def _is_mapped_class(entity):
- """Return True if the given object is a mapped class,
- :class:`.Mapper`, or :class:`.AliasedClass`."""
-
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- hasattr(insp, "mapper") and \
- (
- insp.is_mapper
- or insp.is_aliased_class
- )
-
-
-def _is_aliased_class(entity):
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- getattr(insp, "is_aliased_class", False)
-
-
-def _entity_descriptor(entity, key):
- """Return a class attribute given an entity and string name.
-
- May return :class:`.InstrumentedAttribute` or user-defined
- attribute.
-
- """
- insp = inspection.inspect(entity)
- if insp.is_selectable:
- description = entity
- entity = insp.c
- elif insp.is_aliased_class:
- entity = insp.entity
- description = entity
- elif hasattr(insp, "mapper"):
- description = entity = insp.mapper.class_
- else:
- description = entity
-
- try:
- return getattr(entity, key)
- except AttributeError:
- raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (description, key)
- )
-
-
-def _orm_columns(entity):
- insp = inspection.inspect(entity, False)
- if hasattr(insp, 'selectable'):
- return [c for c in insp.selectable.c]
- else:
- return [entity]
-
def has_identity(object):
"""Return True if the given object has a database
@@ -1260,37 +912,8 @@ def was_deleted(object):
state = attributes.instance_state(object)
return state.deleted
-def instance_str(instance):
- """Return a string describing an instance."""
-
- return state_str(attributes.instance_state(instance))
-
-
-def state_str(state):
- """Return a string describing an instance via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
-
-
-def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s>' % (state.class_.__name__, )
-def attribute_str(instance, attribute):
- return instance_str(instance) + "." + attribute
-
-
-def state_attribute_str(state, attribute):
- return state_str(state) + "." + attribute
-
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
@@ -1327,3 +950,4 @@ def randomize_unitofwork():
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
dependency.set = RandomSet
+