summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/orm
diff options
context:
space:
mode:
authorBrian Jarrett <celttechie@gmail.com>2014-07-20 12:44:40 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2014-07-20 12:44:40 -0400
commitcca03097f47f22783d42d1853faac6cf84607c5a (patch)
tree4fe1a63d03a2d88d1cf37e1167759dfaf84f4ce7 /lib/sqlalchemy/orm
parent827329a0cca5351094a1a86b6b2be2b9182f0ae2 (diff)
downloadsqlalchemy-cca03097f47f22783d42d1853faac6cf84607c5a.tar.gz
- apply pep8 formatting to sqlalchemy/sql, sqlalchemy/util, sqlalchemy/dialects,
sqlalchemy/orm, sqlalchemy/event, sqlalchemy/testing
Diffstat (limited to 'lib/sqlalchemy/orm')
-rw-r--r--lib/sqlalchemy/orm/__init__.py89
-rw-r--r--lib/sqlalchemy/orm/attributes.py310
-rw-r--r--lib/sqlalchemy/orm/base.py193
-rw-r--r--lib/sqlalchemy/orm/collections.py73
-rw-r--r--lib/sqlalchemy/orm/dependency.py570
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py64
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py135
-rw-r--r--lib/sqlalchemy/orm/dynamic.py80
-rw-r--r--lib/sqlalchemy/orm/evaluator.py22
-rw-r--r--lib/sqlalchemy/orm/events.py127
-rw-r--r--lib/sqlalchemy/orm/exc.py3
-rw-r--r--lib/sqlalchemy/orm/identity.py6
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py56
-rw-r--r--lib/sqlalchemy/orm/interfaces.py34
-rw-r--r--lib/sqlalchemy/orm/loading.py192
-rw-r--r--lib/sqlalchemy/orm/mapper.py525
-rw-r--r--lib/sqlalchemy/orm/path_registry.py37
-rw-r--r--lib/sqlalchemy/orm/persistence.py385
-rw-r--r--lib/sqlalchemy/orm/properties.py41
-rw-r--r--lib/sqlalchemy/orm/query.py594
-rw-r--r--lib/sqlalchemy/orm/relationships.py1029
-rw-r--r--lib/sqlalchemy/orm/scoping.py8
-rw-r--r--lib/sqlalchemy/orm/session.py323
-rw-r--r--lib/sqlalchemy/orm/state.py52
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py229
-rw-r--r--lib/sqlalchemy/orm/sync.py39
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py126
-rw-r--r--lib/sqlalchemy/orm/util.py184
28 files changed, 2867 insertions, 2659 deletions
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 6cd9dfcb1..741e79b9d 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -15,40 +15,40 @@ documentation for an overview of how this module is used.
from . import exc
from .mapper import (
- Mapper,
- _mapper_registry,
- class_mapper,
- configure_mappers,
- reconstructor,
- validates
- )
+ Mapper,
+ _mapper_registry,
+ class_mapper,
+ configure_mappers,
+ reconstructor,
+ validates
+)
from .interfaces import (
- EXT_CONTINUE,
- EXT_STOP,
- PropComparator,
- )
+ EXT_CONTINUE,
+ EXT_STOP,
+ PropComparator,
+)
from .deprecated_interfaces import (
- MapperExtension,
- SessionExtension,
- AttributeExtension,
+ MapperExtension,
+ SessionExtension,
+ AttributeExtension,
)
from .util import (
- aliased,
- join,
- object_mapper,
- outerjoin,
- polymorphic_union,
- was_deleted,
- with_parent,
- with_polymorphic,
- )
+ aliased,
+ join,
+ object_mapper,
+ outerjoin,
+ polymorphic_union,
+ was_deleted,
+ with_parent,
+ with_polymorphic,
+)
from .properties import ColumnProperty
from .relationships import RelationshipProperty
from .descriptor_props import (
- ComparableProperty,
- CompositeProperty,
- SynonymProperty,
- )
+ ComparableProperty,
+ CompositeProperty,
+ SynonymProperty,
+)
from .relationships import (
foreign,
remote,
@@ -69,6 +69,7 @@ from ..util.langhelpers import public_factory
from .. import util as _sa_util
from . import strategies as _strategies
+
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
with no automation enabled by default.
@@ -107,6 +108,7 @@ def create_session(bind=None, **kwargs):
relationship = public_factory(RelationshipProperty, ".orm.relationship")
+
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
@@ -144,7 +146,8 @@ def backref(name, **kwargs):
Used with the ``backref`` keyword argument to :func:`relationship` in
place of a string argument, e.g.::
- 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery'))
+ 'items':relationship(
+ SomeItem, backref=backref('parent', lazy='subquery'))
"""
return (name, kwargs)
@@ -158,7 +161,8 @@ def deferred(*columns, **kw):
:class:`.Column` object, however a collection is supported in order
to support multiple columns mapped under the same attribute.
- :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`.
+ :param \**kw: additional keyword arguments passed to
+ :class:`.ColumnProperty`.
.. seealso::
@@ -173,11 +177,11 @@ mapper = public_factory(Mapper, ".orm.mapper")
synonym = public_factory(SynonymProperty, ".orm.synonym")
comparable_property = public_factory(ComparableProperty,
- ".orm.comparable_property")
+ ".orm.comparable_property")
@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
- "is renamed to :func:`.configure_mappers`")
+ "is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have
been defined.
@@ -196,14 +200,14 @@ def clear_mappers():
:func:`.clear_mappers` is *not* for normal use, as there is literally no
valid usage for it outside of very specific testing scenarios. Normally,
mappers are permanent structural components of user-defined classes, and
- are never discarded independently of their class. If a mapped class itself
- is garbage collected, its mapper is automatically disposed of as well. As
- such, :func:`.clear_mappers` is only for usage in test suites that re-use
- the same classes with different mappings, which is itself an extremely rare
- use case - the only such use case is in fact SQLAlchemy's own test suite,
- and possibly the test suites of other ORM extension libraries which
- intend to test various combinations of mapper construction upon a fixed
- set of classes.
+ are never discarded independently of their class. If a mapped class
+ itself is garbage collected, its mapper is automatically disposed of as
+ well. As such, :func:`.clear_mappers` is only for usage in test suites
+ that re-use the same classes with different mappings, which is itself an
+ extremely rare use case - the only such use case is in fact SQLAlchemy's
+ own test suite, and possibly the test suites of other ORM extension
+ libraries which intend to test various combinations of mapper construction
+ upon a fixed set of classes.
"""
mapperlib._CONFIGURE_MUTEX.acquire()
@@ -237,6 +241,7 @@ defaultload = strategy_options.defaultload._unbound_fn
from .strategy_options import Load
+
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
return joinedload(*args, **kwargs)
@@ -247,12 +252,9 @@ def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
-
-
contains_alias = public_factory(AliasOption, ".orm.contains_alias")
-
def __go(lcls):
global __all__
from .. import util as sa_util
@@ -261,9 +263,8 @@ def __go(lcls):
import inspect as _inspect
__all__ = sorted(name for name, obj in lcls.items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
_sa_util.dependencies.resolve_all("sqlalchemy.orm")
__go(locals())
-
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 329367473..67e4dca9b 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -21,16 +21,17 @@ from . import interfaces, collections, exc as orm_exc
from .base import instance_state, instance_dict, manager_of_class
from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
- NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
- INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
- PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
- PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH
+ NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
+ INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
+ PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
+ PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH
from .base import state_str, instance_str
+
@inspection._self_inspects
class QueryableAttribute(interfaces._MappedAttribute,
- interfaces._InspectionAttr,
- interfaces.PropComparator):
+ interfaces._InspectionAttr,
+ interfaces.PropComparator):
"""Base class for :term:`descriptor` objects that intercept
attribute events on behalf of a :class:`.MapperProperty`
object. The actual :class:`.MapperProperty` is accessible
@@ -52,8 +53,8 @@ class QueryableAttribute(interfaces._MappedAttribute,
is_attribute = True
def __init__(self, class_, key, impl=None,
- comparator=None, parententity=None,
- of_type=None):
+ comparator=None, parententity=None,
+ of_type=None):
self.class_ = class_
self.key = key
self.impl = impl
@@ -76,13 +77,12 @@ class QueryableAttribute(interfaces._MappedAttribute,
def get_history(self, instance, passive=PASSIVE_OFF):
return self.impl.get_history(instance_state(instance),
- instance_dict(instance), passive)
+ instance_dict(instance), passive)
def __selectable__(self):
# TODO: conditionally attach this method based on clause_element ?
return self
-
@util.memoized_property
def info(self):
"""Return the 'info' dictionary for the underlying SQL element.
@@ -97,22 +97,23 @@ class QueryableAttribute(interfaces._MappedAttribute,
* If the attribute is a :class:`.ColumnProperty` but is mapped to
any other kind of SQL expression other than a :class:`.Column`,
- the attribute will refer to the :attr:`.MapperProperty.info` dictionary
- associated directly with the :class:`.ColumnProperty`, assuming the SQL
- expression itself does not have its own ``.info`` attribute
- (which should be the case, unless a user-defined SQL construct
- has defined one).
-
- * If the attribute refers to any other kind of :class:`.MapperProperty`,
- including :class:`.RelationshipProperty`, the attribute will refer
- to the :attr:`.MapperProperty.info` dictionary associated with
- that :class:`.MapperProperty`.
-
- * To access the :attr:`.MapperProperty.info` dictionary of the :class:`.MapperProperty`
- unconditionally, including for a :class:`.ColumnProperty` that's
- associated directly with a :class:`.schema.Column`, the attribute
- can be referred to using :attr:`.QueryableAttribute.property`
- attribute, as ``MyClass.someattribute.property.info``.
+ the attribute will refer to the :attr:`.MapperProperty.info`
+ dictionary associated directly with the :class:`.ColumnProperty`,
+ assuming the SQL expression itself does not have its own ``.info``
+ attribute (which should be the case, unless a user-defined SQL
+ construct has defined one).
+
+ * If the attribute refers to any other kind of
+ :class:`.MapperProperty`, including :class:`.RelationshipProperty`,
+ the attribute will refer to the :attr:`.MapperProperty.info`
+ dictionary associated with that :class:`.MapperProperty`.
+
+ * To access the :attr:`.MapperProperty.info` dictionary of the
+ :class:`.MapperProperty` unconditionally, including for a
+ :class:`.ColumnProperty` that's associated directly with a
+ :class:`.schema.Column`, the attribute can be referred to using
+ :attr:`.QueryableAttribute.property` attribute, as
+ ``MyClass.someattribute.property.info``.
.. versionadded:: 0.8.0
@@ -152,18 +153,20 @@ class QueryableAttribute(interfaces._MappedAttribute,
def adapt_to_entity(self, adapt_to_entity):
assert not self._of_type
- return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
- comparator=self.comparator.adapt_to_entity(adapt_to_entity),
- parententity=adapt_to_entity)
+ return self.__class__(adapt_to_entity.entity,
+ self.key, impl=self.impl,
+ comparator=self.comparator.adapt_to_entity(
+ adapt_to_entity),
+ parententity=adapt_to_entity)
def of_type(self, cls):
return QueryableAttribute(
- self.class_,
- self.key,
- self.impl,
- self.comparator.of_type(cls),
- self._parententity,
- of_type=cls)
+ self.class_,
+ self.key,
+ self.impl,
+ self.comparator.of_type(cls),
+ self._parententity,
+ of_type=cls)
def label(self, name):
return self._query_clause_element().label(name)
@@ -182,8 +185,8 @@ class QueryableAttribute(interfaces._MappedAttribute,
return getattr(self.comparator, key)
except AttributeError:
raise AttributeError(
- 'Neither %r object nor %r object associated with %s '
- 'has an attribute %r' % (
+ 'Neither %r object nor %r object associated with %s '
+ 'has an attribute %r' % (
type(self).__name__,
type(self.comparator).__name__,
self,
@@ -218,7 +221,7 @@ class InstrumentedAttribute(QueryableAttribute):
def __set__(self, instance, value):
self.impl.set(instance_state(instance),
- instance_dict(instance), value, None)
+ instance_dict(instance), value, None)
def __delete__(self, instance):
self.impl.delete(instance_state(instance), instance_dict(instance))
@@ -252,9 +255,9 @@ def create_proxied_attribute(descriptor):
"""
def __init__(self, class_, key, descriptor,
- comparator,
- adapt_to_entity=None, doc=None,
- original_property=None):
+ comparator,
+ adapt_to_entity=None, doc=None,
+ original_property=None):
self.class_ = class_
self.key = key
self.descriptor = descriptor
@@ -273,13 +276,15 @@ def create_proxied_attribute(descriptor):
self._comparator = self._comparator()
if self._adapt_to_entity:
self._comparator = self._comparator.adapt_to_entity(
- self._adapt_to_entity)
+ self._adapt_to_entity)
return self._comparator
def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
- self._comparator,
- adapt_to_entity)
+ return self.__class__(adapt_to_entity.entity,
+ self.key,
+ self.descriptor,
+ self._comparator,
+ adapt_to_entity)
def __get__(self, instance, owner):
if instance is None:
@@ -303,10 +308,10 @@ def create_proxied_attribute(descriptor):
raise AttributeError(
'Neither %r object nor %r object associated with %s '
'has an attribute %r' % (
- type(descriptor).__name__,
- type(self.comparator).__name__,
- self,
- attribute)
+ type(descriptor).__name__,
+ type(self.comparator).__name__,
+ self,
+ attribute)
)
Proxy.__name__ = type(descriptor).__name__ + 'Proxy'
@@ -320,6 +325,7 @@ OP_REMOVE = util.symbol("REMOVE")
OP_APPEND = util.symbol("APPEND")
OP_REPLACE = util.symbol("REPLACE")
+
class Event(object):
"""A token propagated throughout the course of a chain of attribute
events.
@@ -360,6 +366,7 @@ class Event(object):
return isinstance(other, Event) and \
other.impl is self.impl and \
other.op == self.op
+
@property
def key(self):
return self.impl.key
@@ -367,15 +374,16 @@ class Event(object):
def hasparent(self, state):
return self.impl.hasparent(state)
+
class AttributeImpl(object):
"""internal implementation for instrumented attributes."""
def __init__(self, class_, key,
- callable_, dispatch, trackparent=False, extension=None,
- compare_function=None, active_history=False,
- parent_token=None, expire_missing=True,
- send_modified_events=True,
- **kwargs):
+ callable_, dispatch, trackparent=False, extension=None,
+ compare_function=None, active_history=False,
+ parent_token=None, expire_missing=True,
+ send_modified_events=True,
+ **kwargs):
"""Construct an AttributeImpl.
\class_
@@ -419,8 +427,8 @@ class AttributeImpl(object):
for this key.
send_modified_events
- if False, the InstanceState._modified_event method will have no effect;
- this means the attribute will never show up as changed in a
+ if False, the InstanceState._modified_event method will have no
+ effect; this means the attribute will never show up as changed in a
history entry.
"""
self.class_ = class_
@@ -480,7 +488,7 @@ class AttributeImpl(object):
assert self.trackparent, msg
return state.parents.get(id(self.parent_token), optimistic) \
- is not False
+ is not False
def sethasparent(self, state, parent_state, value):
"""Set a boolean flag on the given item corresponding to
@@ -499,7 +507,7 @@ class AttributeImpl(object):
last_parent = state.parents[id_]
if last_parent is not False and \
- last_parent.key != parent_state.key:
+ last_parent.key != parent_state.key:
if last_parent.obj() is None:
raise orm_exc.StaleDataError(
@@ -509,8 +517,8 @@ class AttributeImpl(object):
"has gone stale, can't be sure this "
"is the most recent parent." %
(state_str(state),
- state_str(parent_state),
- self.key))
+ state_str(parent_state),
+ self.key))
return
@@ -591,9 +599,9 @@ class AttributeImpl(object):
except KeyError:
# TODO: no test coverage here.
raise KeyError(
- "Deferred loader for attribute "
- "%r failed to populate "
- "correctly" % key)
+ "Deferred loader for attribute "
+ "%r failed to populate "
+ "correctly" % key)
elif value is not ATTR_EMPTY:
return self.set_committed_value(state, dict_, value)
@@ -608,14 +616,14 @@ class AttributeImpl(object):
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, None, initiator,
- passive=passive, check_old=value)
+ passive=passive, check_old=value)
def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, None, initiator,
- passive=passive, check_old=value, pop=True)
+ passive=passive, check_old=value, pop=True)
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
raise NotImplementedError()
def get_committed_value(self, state, dict_, passive=PASSIVE_OFF):
@@ -672,7 +680,7 @@ class ScalarAttributeImpl(AttributeImpl):
return History.from_scalar_attribute(self, state, current)
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
if self.dispatch._active_history:
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
else:
@@ -680,7 +688,7 @@ class ScalarAttributeImpl(AttributeImpl):
if self.dispatch.set:
value = self.fire_replace_event(state, dict_,
- value, old, initiator)
+ value, old, initiator)
state._modified_event(dict_, self, old)
dict_[self.key] = value
@@ -698,7 +706,8 @@ class ScalarAttributeImpl(AttributeImpl):
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self._replace_token)
+ value = fn(
+ state, value, previous, initiator or self._replace_token)
return value
def fire_remove_event(self, state, dict_, value, initiator):
@@ -767,32 +776,32 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
return ret
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
"""Set a value on the given InstanceState.
"""
if self.dispatch._active_history:
- old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
+ old = self.get(
+ state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
else:
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK)
if check_old is not None and \
- old is not PASSIVE_NO_RESULT and \
- check_old is not old:
+ old is not PASSIVE_NO_RESULT and \
+ check_old is not old:
if pop:
return
else:
raise ValueError(
"Object %s not associated with %s on attribute '%s'" % (
- instance_str(check_old),
- state_str(state),
- self.key
- ))
+ instance_str(check_old),
+ state_str(state),
+ self.key
+ ))
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
-
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), state, False)
@@ -809,7 +818,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
self.sethasparent(instance_state(previous), state, False)
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self._replace_token)
+ value = fn(
+ state, value, previous, initiator or self._replace_token)
state._modified_event(dict_, self, previous)
@@ -837,16 +847,16 @@ class CollectionAttributeImpl(AttributeImpl):
collection = True
def __init__(self, class_, key, callable_, dispatch,
- typecallable=None, trackparent=False, extension=None,
- copy_function=None, compare_function=None, **kwargs):
+ typecallable=None, trackparent=False, extension=None,
+ copy_function=None, compare_function=None, **kwargs):
super(CollectionAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- trackparent=trackparent,
- extension=extension,
- compare_function=compare_function,
- **kwargs)
+ class_,
+ key,
+ callable_, dispatch,
+ trackparent=trackparent,
+ extension=extension,
+ compare_function=compare_function,
+ **kwargs)
if copy_function is None:
copy_function = self.__copy
@@ -876,11 +886,11 @@ class CollectionAttributeImpl(AttributeImpl):
original = state.committed_state[self.key]
if original not in (NO_VALUE, NEVER_SET):
current_states = [((c is not None) and
- instance_state(c) or None, c)
- for c in current]
+ instance_state(c) or None, c)
+ for c in current]
original_states = [((c is not None) and
instance_state(c) or None, c)
- for c in original]
+ for c in original]
current_set = dict(current_states)
original_set = dict(original_states)
@@ -953,7 +963,7 @@ class CollectionAttributeImpl(AttributeImpl):
if collection is PASSIVE_NO_RESULT:
value = self.fire_append_event(state, dict_, value, initiator)
assert self.key not in dict_, \
- "Collection was loaded during event handling."
+ "Collection was loaded during event handling."
state._get_pending_mutation(self.key).append(value)
else:
collection.append_with_event(value, initiator)
@@ -963,7 +973,7 @@ class CollectionAttributeImpl(AttributeImpl):
if collection is PASSIVE_NO_RESULT:
self.fire_remove_event(state, dict_, value, initiator)
assert self.key not in dict_, \
- "Collection was loaded during event handling."
+ "Collection was loaded during event handling."
state._get_pending_mutation(self.key).remove(value)
else:
collection.remove_with_event(value, initiator)
@@ -978,7 +988,7 @@ class CollectionAttributeImpl(AttributeImpl):
pass
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, pop=False):
+ passive=PASSIVE_OFF, pop=False):
"""Set a value on the given object.
"""
@@ -1055,7 +1065,7 @@ class CollectionAttributeImpl(AttributeImpl):
return user_data
def get_collection(self, state, dict_,
- user_data=None, passive=PASSIVE_OFF):
+ user_data=None, passive=PASSIVE_OFF):
"""Retrieve the CollectionAdapter associated with the given state.
Creates a new CollectionAdapter if one does not exist.
@@ -1099,20 +1109,20 @@ def backref_listeners(attribute, key, uselist):
# With lazy=None, there's no guarantee that the full collection is
# present when updating via a backref.
old_state, old_dict = instance_state(oldchild),\
- instance_dict(oldchild)
+ instance_dict(oldchild)
impl = old_state.manager[key].impl
if initiator.impl is not impl or \
initiator.op not in (OP_REPLACE, OP_REMOVE):
impl.pop(old_state,
- old_dict,
- state.obj(),
- parent_impl._append_token,
- passive=PASSIVE_NO_FETCH)
+ old_dict,
+ state.obj(),
+ parent_impl._append_token,
+ passive=PASSIVE_NO_FETCH)
if child is not None:
child_state, child_dict = instance_state(child),\
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
@@ -1120,11 +1130,11 @@ def backref_listeners(attribute, key, uselist):
elif initiator.impl is not child_impl or \
initiator.op not in (OP_APPEND, OP_REPLACE):
child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_append_event(state, child, initiator):
@@ -1132,7 +1142,7 @@ def backref_listeners(attribute, key, uselist):
return
child_state, child_dict = instance_state(child), \
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.parent_token is not parent_token and \
@@ -1141,48 +1151,48 @@ def backref_listeners(attribute, key, uselist):
elif initiator.impl is not child_impl or \
initiator.op not in (OP_APPEND, OP_REPLACE):
child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_remove_event(state, child, initiator):
if child is not None:
child_state, child_dict = instance_state(child),\
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.impl is not child_impl or \
initiator.op not in (OP_REMOVE, OP_REPLACE):
child_impl.pop(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
if uselist:
event.listen(attribute, "append",
- emit_backref_from_collection_append_event,
- retval=True, raw=True)
+ emit_backref_from_collection_append_event,
+ retval=True, raw=True)
else:
event.listen(attribute, "set",
- emit_backref_from_scalar_set_event,
- retval=True, raw=True)
+ emit_backref_from_scalar_set_event,
+ retval=True, raw=True)
# TODO: need coverage in test/orm/ of remove event
event.listen(attribute, "remove",
- emit_backref_from_collection_remove_event,
- retval=True, raw=True)
+ emit_backref_from_collection_remove_event,
+ retval=True, raw=True)
_NO_HISTORY = util.symbol('NO_HISTORY')
_NO_STATE_SYMBOLS = frozenset([
- id(PASSIVE_NO_RESULT),
- id(NO_VALUE),
- id(NEVER_SET)])
+ id(PASSIVE_NO_RESULT),
+ id(NO_VALUE),
+ id(NEVER_SET)])
History = util.namedtuple("History", [
- "added", "unchanged", "deleted"
+ "added", "unchanged", "deleted"
])
@@ -1222,28 +1232,28 @@ class History(History):
"""
return not bool(
- (self.added or self.deleted)
- or self.unchanged
- )
+ (self.added or self.deleted)
+ or self.unchanged
+ )
def sum(self):
"""Return a collection of added + unchanged + deleted."""
return (self.added or []) +\
- (self.unchanged or []) +\
- (self.deleted or [])
+ (self.unchanged or []) +\
+ (self.deleted or [])
def non_deleted(self):
"""Return a collection of added + unchanged."""
return (self.added or []) +\
- (self.unchanged or [])
+ (self.unchanged or [])
def non_added(self):
"""Return a collection of unchanged + deleted."""
return (self.unchanged or []) +\
- (self.deleted or [])
+ (self.deleted or [])
def has_changes(self):
"""Return True if this :class:`.History` has changes."""
@@ -1261,7 +1271,7 @@ class History(History):
[(c is not None)
and instance_state(c) or None
for c in self.deleted],
- )
+ )
@classmethod
def from_scalar_attribute(cls, attribute, state, current):
@@ -1331,13 +1341,13 @@ class History(History):
else:
current_states = [((c is not None) and instance_state(c)
- or None, c)
- for c in current
- ]
+ or None, c)
+ for c in current
+ ]
original_states = [((c is not None) and instance_state(c)
or None, c)
- for c in original
- ]
+ for c in original
+ ]
current_set = dict(current_states)
original_set = dict(original_states)
@@ -1369,11 +1379,11 @@ def get_history(obj, key, passive=PASSIVE_OFF):
"""
if passive is True:
util.warn_deprecated("Passing True for 'passive' is deprecated. "
- "Use attributes.PASSIVE_NO_INITIALIZE")
+ "Use attributes.PASSIVE_NO_INITIALIZE")
passive = PASSIVE_NO_INITIALIZE
elif passive is False:
util.warn_deprecated("Passing False for 'passive' is "
- "deprecated. Use attributes.PASSIVE_OFF")
+ "deprecated. Use attributes.PASSIVE_OFF")
passive = PASSIVE_OFF
return get_state_history(instance_state(obj), key, passive)
@@ -1395,15 +1405,15 @@ def register_attribute(class_, key, **kw):
parententity = kw.pop('parententity', None)
doc = kw.pop('doc', None)
desc = register_descriptor(class_, key,
- comparator, parententity, doc=doc)
+ comparator, parententity, doc=doc)
register_attribute_impl(class_, key, **kw)
return desc
def register_attribute_impl(class_, key,
- uselist=False, callable_=None,
- useobject=False,
- impl_class=None, backref=None, **kw):
+ uselist=False, callable_=None,
+ useobject=False,
+ impl_class=None, backref=None, **kw):
manager = manager_of_class(class_)
if uselist:
@@ -1422,7 +1432,7 @@ def register_attribute_impl(class_, key,
typecallable=typecallable, **kw)
elif useobject:
impl = ScalarObjectAttributeImpl(class_, key, callable_,
- dispatch, **kw)
+ dispatch, **kw)
else:
impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
@@ -1436,11 +1446,11 @@ def register_attribute_impl(class_, key,
def register_descriptor(class_, key, comparator=None,
- parententity=None, doc=None):
+ parententity=None, doc=None):
manager = manager_of_class(class_)
descriptor = InstrumentedAttribute(class_, key, comparator=comparator,
- parententity=parententity)
+ parententity=parententity)
descriptor.__doc__ = doc
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
index de103bf71..a85f59f37 100644
--- a/lib/sqlalchemy/orm/base.py
+++ b/lib/sqlalchemy/orm/base.py
@@ -14,102 +14,127 @@ from ..sql import expression
from . import exc
import operator
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
-"""Symbol returned by a loader callable or other attribute/history
-retrieval operation when a value could not be determined, based
-on loader callable flags.
-"""
+PASSIVE_NO_RESULT = util.symbol(
+ 'PASSIVE_NO_RESULT',
+ """Symbol returned by a loader callable or other attribute/history
+ retrieval operation when a value could not be determined, based
+ on loader callable flags.
+ """
)
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
-"""Symbol returned by a loader callable to indicate the
-retrieved value, or values, were assigned to their attributes
-on the target object.
-""")
+ATTR_WAS_SET = util.symbol(
+ 'ATTR_WAS_SET',
+ """Symbol returned by a loader callable to indicate the
+ retrieved value, or values, were assigned to their attributes
+ on the target object.
+ """
+)
-ATTR_EMPTY = util.symbol('ATTR_EMPTY',
-"""Symbol used internally to indicate an attribute had no callable.
-""")
+ATTR_EMPTY = util.symbol(
+ 'ATTR_EMPTY',
+ """Symbol used internally to indicate an attribute had no callable."""
+)
-NO_VALUE = util.symbol('NO_VALUE',
-"""Symbol which may be placed as the 'previous' value of an attribute,
-indicating no value was loaded for an attribute when it was modified,
-and flags indicated we were not to load it.
-"""
+NO_VALUE = util.symbol(
+ 'NO_VALUE',
+ """Symbol which may be placed as the 'previous' value of an attribute,
+ indicating no value was loaded for an attribute when it was modified,
+ and flags indicated we were not to load it.
+ """
)
-NEVER_SET = util.symbol('NEVER_SET',
-"""Symbol which may be placed as the 'previous' value of an attribute
-indicating that the attribute had not been assigned to previously.
-"""
+NEVER_SET = util.symbol(
+ 'NEVER_SET',
+ """Symbol which may be placed as the 'previous' value of an attribute
+ indicating that the attribute had not been assigned to previously.
+ """
)
-NO_CHANGE = util.symbol("NO_CHANGE",
-"""No callables or SQL should be emitted on attribute access
-and no state should change""", canonical=0
+NO_CHANGE = util.symbol(
+ "NO_CHANGE",
+ """No callables or SQL should be emitted on attribute access
+ and no state should change
+ """, canonical=0
)
-CALLABLES_OK = util.symbol("CALLABLES_OK",
-"""Loader callables can be fired off if a value
-is not present.""", canonical=1
+CALLABLES_OK = util.symbol(
+ "CALLABLES_OK",
+ """Loader callables can be fired off if a value
+ is not present.
+ """, canonical=1
)
-SQL_OK = util.symbol("SQL_OK",
-"""Loader callables can emit SQL at least on scalar value
-attributes.""", canonical=2)
+SQL_OK = util.symbol(
+ "SQL_OK",
+ """Loader callables can emit SQL at least on scalar value attributes.""",
+ canonical=2
+)
-RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
-"""callables can use SQL to load related objects as well
-as scalar value attributes.
-""", canonical=4
+RELATED_OBJECT_OK = util.symbol(
+ "RELATED_OBJECT_OK",
+ """Callables can use SQL to load related objects as well
+ as scalar value attributes.
+ """, canonical=4
)
-INIT_OK = util.symbol("INIT_OK",
-"""Attributes should be initialized with a blank
-value (None or an empty collection) upon get, if no other
-value can be obtained.
-""", canonical=8
+INIT_OK = util.symbol(
+ "INIT_OK",
+ """Attributes should be initialized with a blank
+ value (None or an empty collection) upon get, if no other
+ value can be obtained.
+ """, canonical=8
)
-NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
-"""callables can be emitted if the parent is not persistent.""",
-canonical=16
+NON_PERSISTENT_OK = util.symbol(
+ "NON_PERSISTENT_OK",
+ """Callables can be emitted if the parent is not persistent.""",
+ canonical=16
)
-LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
-"""callables should use committed values as primary/foreign keys during a load
-""", canonical=32
+LOAD_AGAINST_COMMITTED = util.symbol(
+ "LOAD_AGAINST_COMMITTED",
+ """Callables should use committed values as primary/foreign keys during a
+ load.
+ """, canonical=32
)
-NO_AUTOFLUSH = util.symbol("NO_AUTOFLUSH",
-"""loader callables should disable autoflush.
-""", canonical=64)
+NO_AUTOFLUSH = util.symbol(
+ "NO_AUTOFLUSH",
+ """Loader callables should disable autoflush.""",
+ canonical=64
+)
# pre-packaged sets of flags used as inputs
-PASSIVE_OFF = util.symbol("PASSIVE_OFF",
+PASSIVE_OFF = util.symbol(
+ "PASSIVE_OFF",
"Callables can be emitted in all cases.",
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
- INIT_OK | CALLABLES_OK | SQL_OK)
+ INIT_OK | CALLABLES_OK | SQL_OK)
)
-PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
- """PASSIVE_OFF ^ INIT_OK""",
- canonical=PASSIVE_OFF ^ INIT_OK
+PASSIVE_RETURN_NEVER_SET = util.symbol(
+ "PASSIVE_RETURN_NEVER_SET",
+ """PASSIVE_OFF ^ INIT_OK""",
+ canonical=PASSIVE_OFF ^ INIT_OK
)
-PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
- "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
- canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
+PASSIVE_NO_INITIALIZE = util.symbol(
+ "PASSIVE_NO_INITIALIZE",
+ "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
+ canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
)
-PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
- "PASSIVE_OFF ^ SQL_OK",
- canonical=PASSIVE_OFF ^ SQL_OK
+PASSIVE_NO_FETCH = util.symbol(
+ "PASSIVE_NO_FETCH",
+ "PASSIVE_OFF ^ SQL_OK",
+ canonical=PASSIVE_OFF ^ SQL_OK
)
-PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
- "PASSIVE_OFF ^ RELATED_OBJECT_OK",
- canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
+PASSIVE_NO_FETCH_RELATED = util.symbol(
+ "PASSIVE_NO_FETCH_RELATED",
+ "PASSIVE_OFF ^ RELATED_OBJECT_OK",
+ canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
)
-PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
- "PASSIVE_OFF ^ NON_PERSISTENT_OK",
- canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
+PASSIVE_ONLY_PERSISTENT = util.symbol(
+ "PASSIVE_ONLY_PERSISTENT",
+ "PASSIVE_OFF ^ NON_PERSISTENT_OK",
+ canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
)
DEFAULT_MANAGER_ATTR = '_sa_class_manager'
@@ -120,7 +145,7 @@ EXT_CONTINUE = util.symbol('EXT_CONTINUE')
EXT_STOP = util.symbol('EXT_STOP')
ONETOMANY = util.symbol('ONETOMANY',
-"""Indicates the one-to-many direction for a :func:`.relationship`.
+ """Indicates the one-to-many direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -128,7 +153,7 @@ certain API features.
""")
MANYTOONE = util.symbol('MANYTOONE',
-"""Indicates the many-to-one direction for a :func:`.relationship`.
+ """Indicates the many-to-one direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -136,7 +161,7 @@ certain API features.
""")
MANYTOMANY = util.symbol('MANYTOMANY',
-"""Indicates the many-to-many direction for a :func:`.relationship`.
+ """Indicates the many-to-many direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -144,7 +169,7 @@ certain API features.
""")
NOT_EXTENSION = util.symbol('NOT_EXTENSION',
-"""Symbol indicating an :class:`_InspectionAttr` that's
+ """Symbol indicating an :class:`_InspectionAttr` that's
not part of sqlalchemy.ext.
Is assigned to the :attr:`._InspectionAttr.extension_type`
@@ -177,11 +202,13 @@ instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
instance_dict = operator.attrgetter('__dict__')
+
def instance_str(instance):
"""Return a string describing an instance."""
return state_str(instance_state(instance))
+
def state_str(state):
"""Return a string describing an instance via its InstanceState."""
@@ -190,8 +217,11 @@ def state_str(state):
else:
return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
+
def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
+ """Return a string describing an instance's class via its
+ InstanceState.
+ """
if state is None:
return "None"
@@ -206,6 +236,7 @@ def attribute_str(instance, attribute):
def state_attribute_str(state, attribute):
return state_str(state) + "." + attribute
+
def object_mapper(instance):
"""Given an object, return the primary Mapper associated with the object
instance.
@@ -261,7 +292,6 @@ def _inspect_mapped_object(instance):
return None
-
def _class_to_mapper(class_or_mapper):
insp = inspection.inspect(class_or_mapper, False)
if insp is not None:
@@ -272,7 +302,8 @@ def _class_to_mapper(class_or_mapper):
def _mapper_or_none(entity):
"""Return the :class:`.Mapper` for the given class or None if the
- class is not mapped."""
+ class is not mapped.
+ """
insp = inspection.inspect(entity, False)
if insp is not None:
@@ -283,7 +314,8 @@ def _mapper_or_none(entity):
def _is_mapped_class(entity):
"""Return True if the given object is a mapped class,
- :class:`.Mapper`, or :class:`.AliasedClass`."""
+ :class:`.Mapper`, or :class:`.AliasedClass`.
+ """
insp = inspection.inspect(entity, False)
return insp is not None and \
@@ -293,6 +325,7 @@ def _is_mapped_class(entity):
or insp.is_aliased_class
)
+
def _attr_as_key(attr):
if hasattr(attr, 'key'):
return attr.key
@@ -300,7 +333,6 @@ def _attr_as_key(attr):
return expression._column_as_key(attr)
-
def _orm_columns(entity):
insp = inspection.inspect(entity, False)
if hasattr(insp, 'selectable'):
@@ -309,7 +341,6 @@ def _orm_columns(entity):
return [entity]
-
def _is_aliased_class(entity):
insp = inspection.inspect(entity, False)
return insp is not None and \
@@ -339,12 +370,13 @@ def _entity_descriptor(entity, key):
return getattr(entity, key)
except AttributeError:
raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (description, key)
- )
+ "Entity '%s' has no property '%s'" %
+ (description, key)
+ )
_state_mapper = util.dottedgetter('manager.mapper')
+
@inspection._inspects(type)
def _inspect_mapped_class(class_, configure=False):
try:
@@ -381,7 +413,7 @@ def class_mapper(class_, configure=True):
if mapper is None:
if not isinstance(class_, type):
raise sa_exc.ArgumentError(
- "Class object expected, got '%r'." % (class_, ))
+ "Class object expected, got '%r'." % (class_, ))
raise exc.UnmappedClassError(class_)
else:
return mapper
@@ -452,6 +484,7 @@ class _InspectionAttr(object):
"""
+
class _MappedAttribute(object):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 9741895db..698677a0b 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -128,6 +128,7 @@ class _PlainColumnGetter(object):
and some rare caveats.
"""
+
def __init__(self, cols):
self.cols = cols
self.composite = len(cols) > 1
@@ -159,6 +160,7 @@ class _SerializableColumnGetter(object):
Remains here for pickle compatibility with 0.7.6.
"""
+
def __init__(self, colkeys):
self.colkeys = colkeys
self.composite = len(colkeys) > 1
@@ -170,9 +172,9 @@ class _SerializableColumnGetter(object):
state = base.instance_state(value)
m = base._state_mapper(state)
key = [m._get_state_attr_by_column(
- state, state.dict,
- m.mapped_table.columns[k])
- for k in self.colkeys]
+ state, state.dict,
+ m.mapped_table.columns[k])
+ for k in self.colkeys]
if self.composite:
return tuple(key)
else:
@@ -213,8 +215,8 @@ class _SerializableColumnGetterV2(_PlainColumnGetter):
metadata = getattr(mapper.local_table, 'metadata', None)
for (ckey, tkey) in self.colkeys:
if tkey is None or \
- metadata is None or \
- tkey not in metadata:
+ metadata is None or \
+ tkey not in metadata:
cols.append(mapper.local_table.c[ckey])
else:
cols.append(metadata.tables[tkey].c[ckey])
@@ -235,7 +237,7 @@ def column_mapped_collection(mapping_spec):
"""
cols = [expression._only_column_elements(q, "mapping_spec")
- for q in util.to_list(mapping_spec)
+ for q in util.to_list(mapping_spec)
]
keyfunc = _PlainColumnGetter(cols)
return lambda: MappedCollection(keyfunc)
@@ -534,9 +536,9 @@ class collection(object):
def removes_return():
"""Mark the method as removing an entity in the collection.
- Adds "remove from collection" handling to the method. The return value
- of the method, if any, is considered the value to remove. The method
- arguments are not inspected::
+ Adds "remove from collection" handling to the method. The return
+ value of the method, if any, is considered the value to remove. The
+ method arguments are not inspected::
@collection.removes_return()
def pop(self): ...
@@ -594,7 +596,6 @@ class CollectionAdapter(object):
if data._sa_linker:
data._sa_linker(self)
-
def unlink(self, data):
"""Unlink a collection from any adapter"""
@@ -632,7 +633,7 @@ class CollectionAdapter(object):
raise TypeError(
"Incompatible collection type: %s is not %s-like" % (
- given, wanted))
+ given, wanted))
# If the object is an adapted collection, return the (iterable)
# adapter.
@@ -710,9 +711,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
return self.attr.fire_append_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ item, initiator)
else:
return item
@@ -728,9 +729,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
self.attr.fire_remove_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ item, initiator)
def fire_pre_remove_event(self, initiator=None):
"""Notify that an entity is about to be removed from the collection.
@@ -742,9 +743,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
self.attr.fire_pre_remove_event(
- self.owner_state,
- self.owner_state.dict,
- initiator=initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ initiator=initiator)
def __getstate__(self):
return {'key': self._key,
@@ -848,6 +849,7 @@ def __converting_factory(specimen_cls, original_factory):
return wrapper
+
def _instrument_class(cls):
"""Modify methods in a class and install instrumentation."""
@@ -906,7 +908,7 @@ def _instrument_class(cls):
for method, decorator in decorators.items():
fn = getattr(cls, method, None)
if (fn and method not in methods and
- not hasattr(fn, '_sa_instrumented')):
+ not hasattr(fn, '_sa_instrumented')):
setattr(cls, method, decorator(fn))
# ensure all roles are present, and apply implicit instrumentation if
@@ -951,11 +953,12 @@ def _instrument_class(cls):
def _instrument_membership_mutator(method, before, argument, after):
- """Route method args and/or return value through the collection adapter."""
+ """Route method args and/or return value through the collection
+ adapter."""
# This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
if before:
fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0]))
- if type(argument) is int:
+ if isinstance(argument, int):
pos_arg = argument
named_arg = len(fn_args) > argument and fn_args[argument] or None
else:
@@ -1145,8 +1148,8 @@ def _list_decorators():
def __iadd__(fn):
def __iadd__(self, iterable):
- # list.__iadd__ takes any iterable and seems to let TypeError raise
- # as-is instead of returning NotImplemented
+ # list.__iadd__ takes any iterable and seems to let TypeError
+ # raise as-is instead of returning NotImplemented
for value in iterable:
self.append(value)
return self
@@ -1251,7 +1254,7 @@ def _dict_decorators():
if hasattr(__other, 'keys'):
for key in list(__other):
if (key not in self or
- self[key] is not __other[key]):
+ self[key] is not __other[key]):
self[key] = __other[key]
else:
for key, value in __other:
@@ -1447,23 +1450,23 @@ __canned_instrumentation = {
list: InstrumentedList,
set: InstrumentedSet,
dict: InstrumentedDict,
- }
+}
__interfaces = {
list: (
{'appender': 'append', 'remover': 'remove',
- 'iterator': '__iter__'}, _list_decorators()
- ),
+ 'iterator': '__iter__'}, _list_decorators()
+ ),
set: ({'appender': 'add',
- 'remover': 'remove',
- 'iterator': '__iter__'}, _set_decorators()
- ),
+ 'remover': 'remove',
+ 'iterator': '__iter__'}, _set_decorators()
+ ),
# decorators are required for dicts and object collections.
dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k
- else ({'iterator': 'itervalues'}, _dict_decorators()),
- }
+ else ({'iterator': 'itervalues'}, _dict_decorators()),
+}
class MappedCollection(dict):
@@ -1538,7 +1541,7 @@ class MappedCollection(dict):
"Found incompatible key %r for value %r; this "
"collection's "
"keying function requires a key of %r for this value." % (
- incoming_key, value, new_key))
+ incoming_key, value, new_key))
yield value
# ensure instrumentation is associated with
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 3864eb1bd..c1cf66f14 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -11,7 +11,7 @@
from .. import sql, util, exc as sa_exc
from . import attributes, exc, sync, unitofwork, \
- util as mapperutil
+ util as mapperutil
from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
@@ -39,10 +39,10 @@ class DependencyProcessor(object):
self.key = prop.key
if not self.prop.synchronize_pairs:
raise sa_exc.ArgumentError(
- "Can't build a DependencyProcessor for relationship %s. "
- "No target attributes to populate between parent and "
- "child are present" %
- self.prop)
+ "Can't build a DependencyProcessor for relationship %s. "
+ "No target attributes to populate between parent and "
+ "child are present" %
+ self.prop)
@classmethod
def from_relationship(cls, prop):
@@ -70,31 +70,31 @@ class DependencyProcessor(object):
before_delete = unitofwork.ProcessAll(uow, self, True, True)
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.primary_base_mapper
- )
+ uow,
+ self.parent.primary_base_mapper
+ )
child_saves = unitofwork.SaveUpdateAll(
- uow,
- self.mapper.primary_base_mapper
- )
+ uow,
+ self.mapper.primary_base_mapper
+ )
parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.primary_base_mapper
- )
+ uow,
+ self.parent.primary_base_mapper
+ )
child_deletes = unitofwork.DeleteAll(
- uow,
- self.mapper.primary_base_mapper
- )
+ uow,
+ self.mapper.primary_base_mapper
+ )
self.per_property_dependencies(uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- )
+ parent_saves,
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete
+ )
def per_state_flush_actions(self, uow, states, isdelete):
"""establish actions and dependencies related to a flush.
@@ -141,15 +141,15 @@ class DependencyProcessor(object):
# check if the "parent" side is part of the cycle
if not isdelete:
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
parent_deletes = before_delete = None
if parent_saves in uow.cycles:
parent_in_cycles = True
else:
parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
parent_saves = after_save = None
if parent_deletes in uow.cycles:
parent_in_cycles = True
@@ -162,28 +162,28 @@ class DependencyProcessor(object):
# case of deletes we may try to load missing items here as well.
sum_ = state.manager[self.key].impl.get_all_pending(
state, state.dict,
- self._passive_delete_flag
- if isdelete
- else attributes.PASSIVE_NO_INITIALIZE)
+ self._passive_delete_flag
+ if isdelete
+ else attributes.PASSIVE_NO_INITIALIZE)
if not sum_:
continue
if isdelete:
before_delete = unitofwork.ProcessState(uow,
- self, True, state)
+ self, True, state)
if parent_in_cycles:
parent_deletes = unitofwork.DeleteState(
- uow,
- state,
- parent_base_mapper)
+ uow,
+ state,
+ parent_base_mapper)
else:
after_save = unitofwork.ProcessState(uow, self, False, state)
if parent_in_cycles:
parent_saves = unitofwork.SaveUpdateState(
- uow,
- state,
- parent_base_mapper)
+ uow,
+ state,
+ parent_base_mapper)
if child_in_cycles:
child_actions = []
@@ -194,26 +194,26 @@ class DependencyProcessor(object):
(deleted, listonly) = uow.states[child_state]
if deleted:
child_action = (
- unitofwork.DeleteState(
- uow, child_state,
- child_base_mapper),
- True)
+ unitofwork.DeleteState(
+ uow, child_state,
+ child_base_mapper),
+ True)
else:
child_action = (
- unitofwork.SaveUpdateState(
- uow, child_state,
- child_base_mapper),
- False)
+ unitofwork.SaveUpdateState(
+ uow, child_state,
+ child_base_mapper),
+ False)
child_actions.append(child_action)
# establish dependencies between our possibly per-state
# parent action and our possibly per-state child action.
for child_action, childisdelete in child_actions:
self.per_state_dependencies(uow, parent_saves,
- parent_deletes,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete)
+ parent_deletes,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete)
def presort_deletes(self, uowcommit, states):
return False
@@ -240,9 +240,9 @@ class DependencyProcessor(object):
# to InstanceState which returns: attribute
# has a non-None value, or had one
history = uowcommit.get_attribute_history(
- s,
- self.key,
- passive)
+ s,
+ self.key,
+ passive)
if history and not history.empty():
return True
else:
@@ -253,27 +253,27 @@ class DependencyProcessor(object):
def _verify_canload(self, state):
if self.prop.uselist and state is None:
raise exc.FlushError(
- "Can't flush None value found in "
- "collection %s" % (self.prop, ))
+ "Can't flush None value found in "
+ "collection %s" % (self.prop, ))
elif state is not None and \
- not self.mapper._canload(state,
- allow_subtypes=not self.enable_typechecks):
+ not self.mapper._canload(
+ state, allow_subtypes=not self.enable_typechecks):
if self.mapper._canload(state, allow_subtypes=True):
raise exc.FlushError('Attempting to flush an item of type '
- '%(x)s as a member of collection '
- '"%(y)s". Expected an object of type '
- '%(z)s or a polymorphic subclass of '
- 'this type. If %(x)s is a subclass of '
- '%(z)s, configure mapper "%(zm)s" to '
- 'load this subtype polymorphically, or '
- 'set enable_typechecks=False to allow '
- 'any subtype to be accepted for flush. '
- % {
- 'x': state.class_,
- 'y': self.prop,
- 'z': self.mapper.class_,
- 'zm': self.mapper,
- })
+ '%(x)s as a member of collection '
+ '"%(y)s". Expected an object of type '
+ '%(z)s or a polymorphic subclass of '
+ 'this type. If %(x)s is a subclass of '
+ '%(z)s, configure mapper "%(zm)s" to '
+ 'load this subtype polymorphically, or '
+ 'set enable_typechecks=False to allow '
+ 'any subtype to be accepted for flush. '
+ % {
+ 'x': state.class_,
+ 'y': self.prop,
+ 'z': self.mapper.class_,
+ 'zm': self.mapper,
+ })
else:
raise exc.FlushError(
'Attempting to flush an item of type '
@@ -287,7 +287,7 @@ class DependencyProcessor(object):
})
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit):
+ clearkeys, uowcommit):
raise NotImplementedError()
def _get_reversed_processed_set(self, uow):
@@ -295,20 +295,20 @@ class DependencyProcessor(object):
return None
process_key = tuple(sorted(
- [self.key] +
- [p.key for p in self.prop._reverse_property]
- ))
+ [self.key] +
+ [p.key for p in self.prop._reverse_property]
+ ))
return uow.memo(
- ('reverse_key', process_key),
- set
- )
+ ('reverse_key', process_key),
+ set
+ )
def _post_update(self, state, uowcommit, related):
for x in related:
if x is not None:
uowcommit.issue_post_update(
- state,
- [r for l, r in self.prop.synchronize_pairs]
+ state,
+ [r for l, r in self.prop.synchronize_pairs]
)
break
@@ -322,21 +322,21 @@ class DependencyProcessor(object):
class OneToManyDP(DependencyProcessor):
def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete,
- ):
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete,
+ ):
if self.post_update:
child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
+ uow,
+ self.mapper.primary_base_mapper,
+ False)
child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
+ uow,
+ self.mapper.primary_base_mapper,
+ True)
uow.dependencies.update([
(child_saves, after_save),
@@ -362,22 +362,22 @@ class OneToManyDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if self.post_update:
child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
+ uow,
+ self.mapper.primary_base_mapper,
+ False)
child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
+ uow,
+ self.mapper.primary_base_mapper,
+ True)
# TODO: this whole block is not covered
# by any tests
@@ -421,13 +421,13 @@ class OneToManyDP(DependencyProcessor):
# child objects the child objects have to have their
# foreign key to the parent set to NULL
should_null_fks = not self.cascade.delete and \
- not self.passive_deletes == 'all'
+ not self.passive_deletes == 'all'
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if child is not None and self.hasparent(child) is False:
@@ -439,8 +439,8 @@ class OneToManyDP(DependencyProcessor):
if should_null_fks:
for child in history.unchanged:
if child is not None:
- uowcommit.register_object(child,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, operation="delete", prop=self.prop)
def presort_saves(self, uowcommit, states):
children_added = uowcommit.memo(('children_added', self), set)
@@ -454,28 +454,29 @@ class OneToManyDP(DependencyProcessor):
passive = attributes.PASSIVE_OFF
history = uowcommit.get_attribute_history(
- state,
- self.key,
- passive)
+ state,
+ self.key,
+ passive)
if history:
for child in history.added:
if child is not None:
uowcommit.register_object(child, cancel_delete=True,
- operation="add",
- prop=self.prop)
+ operation="add",
+ prop=self.prop)
children_added.update(history.added)
for child in history.deleted:
if not self.cascade.delete_orphan:
uowcommit.register_object(child, isdelete=False,
- operation='delete',
- prop=self.prop)
+ operation='delete',
+ prop=self.prop)
elif self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete', child):
+ 'delete', child):
uowcommit.register_object(
st_,
isdelete=True)
@@ -485,11 +486,11 @@ class OneToManyDP(DependencyProcessor):
for child in history.unchanged:
if child is not None:
uowcommit.register_object(
- child,
- False,
- self.passive_updates,
- operation="pk change",
- prop=self.prop)
+ child,
+ False,
+ self.passive_updates,
+ operation="pk change",
+ prop=self.prop)
def process_deletes(self, uowcommit, states):
# head object is being deleted, and we manage its list of
@@ -503,67 +504,67 @@ class OneToManyDP(DependencyProcessor):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if child is not None and \
- self.hasparent(child) is False:
+ self.hasparent(child) is False:
self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
+ state,
+ child,
+ None, True,
+ uowcommit, False)
if self.post_update and child:
self._post_update(child, uowcommit, [state])
if self.post_update or not self.cascade.delete:
for child in set(history.unchanged).\
- difference(children_added):
+ difference(children_added):
if child is not None:
self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
+ state,
+ child,
+ None, True,
+ uowcommit, False)
if self.post_update and child:
self._post_update(child,
- uowcommit,
- [state])
+ uowcommit,
+ [state])
# technically, we can even remove each child from the
# collection here too. but this would be a somewhat
# inconsistent behavior since it wouldn't happen
- #if the old parent wasn't deleted but child was moved.
+ # if the old parent wasn't deleted but child was moved.
def process_saves(self, uowcommit, states):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
for child in history.added:
self._synchronize(state, child, None,
- False, uowcommit, False)
+ False, uowcommit, False)
if child is not None and self.post_update:
self._post_update(child, uowcommit, [state])
for child in history.deleted:
if not self.cascade.delete_orphan and \
- not self.hasparent(child):
+ not self.hasparent(child):
self._synchronize(state, child, None, True,
- uowcommit, False)
+ uowcommit, False)
if self._pks_changed(uowcommit, state):
for child in history.unchanged:
self._synchronize(state, child, None,
- False, uowcommit, True)
+ False, uowcommit, True)
def _synchronize(self, state, child,
- associationrow, clearkeys, uowcommit,
- pks_changed):
+ associationrow, clearkeys, uowcommit,
+ pks_changed):
source = state
dest = child
self._verify_canload(child)
@@ -574,15 +575,15 @@ class OneToManyDP(DependencyProcessor):
sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
else:
sync.populate(source, self.parent, dest, self.mapper,
- self.prop.synchronize_pairs, uowcommit,
- self.passive_updates and pks_changed)
+ self.prop.synchronize_pairs, uowcommit,
+ self.passive_updates and pks_changed)
def _pks_changed(self, uowcommit, state):
return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
+ uowcommit,
+ state,
+ self.parent,
+ self.prop.synchronize_pairs)
class ManyToOneDP(DependencyProcessor):
@@ -591,22 +592,22 @@ class ManyToOneDP(DependencyProcessor):
self.mapper._dependency_processors.append(DetectKeySwitch(prop))
def per_property_dependencies(self, uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete):
+ parent_saves,
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete):
if self.post_update:
parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
+ uow,
+ self.parent.primary_base_mapper,
+ False)
parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
+ uow,
+ self.parent.primary_base_mapper,
+ True)
uow.dependencies.update([
(child_saves, after_save),
@@ -627,19 +628,19 @@ class ManyToOneDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if self.post_update:
if not isdelete:
parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
+ uow,
+ self.parent.primary_base_mapper,
+ False)
if childisdelete:
uow.dependencies.update([
(after_save, parent_post_updates),
@@ -654,9 +655,9 @@ class ManyToOneDP(DependencyProcessor):
])
else:
parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
+ uow,
+ self.parent.primary_base_mapper,
+ True)
uow.dependencies.update([
(before_delete, parent_pre_updates),
@@ -685,9 +686,9 @@ class ManyToOneDP(DependencyProcessor):
if self.cascade.delete or self.cascade.delete_orphan:
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
if self.cascade.delete_orphan:
todelete = history.sum()
@@ -696,8 +697,9 @@ class ManyToOneDP(DependencyProcessor):
for child in todelete:
if child is None:
continue
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
t = self.mapper.cascade_iterator('delete', child)
for c, m, st_, dct_ in t:
uowcommit.register_object(
@@ -708,14 +710,15 @@ class ManyToOneDP(DependencyProcessor):
uowcommit.register_object(state, operation="add", prop=self.prop)
if self.cascade.delete_orphan:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
t = self.mapper.cascade_iterator('delete', child)
for c, m, st_, dct_ in t:
@@ -733,35 +736,35 @@ class ManyToOneDP(DependencyProcessor):
self._synchronize(state, None, None, True, uowcommit)
if state and self.post_update:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
self._post_update(state, uowcommit, history.sum())
def process_saves(self, uowcommit, states):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
if history.added:
for child in history.added:
self._synchronize(state, child, None, False,
- uowcommit, "add")
+ uowcommit, "add")
if self.post_update:
self._post_update(state, uowcommit, history.sum())
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation=None):
+ clearkeys, uowcommit, operation=None):
if state is None or \
- (not self.post_update and uowcommit.is_deleted(state)):
+ (not self.post_update and uowcommit.is_deleted(state)):
return
if operation is not None and \
- child is not None and \
- not uowcommit.session._contains_state(child):
+ child is not None and \
+ not uowcommit.session._contains_state(child):
util.warn(
"Object of type %s not in session, %s "
"operation along '%s' won't proceed" %
@@ -773,10 +776,10 @@ class ManyToOneDP(DependencyProcessor):
else:
self._verify_canload(child)
sync.populate(child, self.mapper, state,
- self.parent,
- self.prop.synchronize_pairs,
- uowcommit,
- False)
+ self.parent,
+ self.prop.synchronize_pairs,
+ uowcommit,
+ False)
class DetectKeySwitch(DependencyProcessor):
@@ -796,16 +799,16 @@ class DetectKeySwitch(DependencyProcessor):
if self.passive_updates:
return
else:
- if False in (prop.passive_updates for \
- prop in self.prop._reverse_property):
+ if False in (prop.passive_updates for
+ prop in self.prop._reverse_property):
return
uow.register_preprocessor(self, False)
def per_property_flush_actions(self, uow):
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
after_save = unitofwork.ProcessAll(uow, self, False, False)
uow.dependencies.update([
(parent_saves, after_save)
@@ -844,8 +847,8 @@ class DetectKeySwitch(DependencyProcessor):
def _key_switchers(self, uow, states):
switched, notswitched = uow.memo(
('pk_switchers', self),
- lambda: (set(), set())
- )
+ lambda: (set(), set())
+ )
allstates = switched.union(notswitched)
for s in states:
@@ -865,37 +868,35 @@ class DetectKeySwitch(DependencyProcessor):
if not issubclass(state.class_, self.parent.class_):
continue
dict_ = state.dict
- related = state.get_impl(self.key).get(state, dict_,
- passive=self._passive_update_flag)
+ related = state.get_impl(self.key).get(
+ state, dict_, passive=self._passive_update_flag)
if related is not attributes.PASSIVE_NO_RESULT and \
related is not None:
related_state = attributes.instance_state(dict_[self.key])
if related_state in switchers:
uowcommit.register_object(state,
- False,
- self.passive_updates)
+ False,
+ self.passive_updates)
sync.populate(
- related_state,
- self.mapper, state,
- self.parent, self.prop.synchronize_pairs,
- uowcommit, self.passive_updates)
+ related_state,
+ self.mapper, state,
+ self.parent, self.prop.synchronize_pairs,
+ uowcommit, self.passive_updates)
def _pks_changed(self, uowcommit, state):
- return bool(state.key) and sync.source_modified(uowcommit,
- state,
- self.mapper,
- self.prop.synchronize_pairs)
+ return bool(state.key) and sync.source_modified(
+ uowcommit, state, self.mapper, self.prop.synchronize_pairs)
class ManyToManyDP(DependencyProcessor):
def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- ):
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete
+ ):
uow.dependencies.update([
(parent_saves, after_save),
@@ -915,11 +916,11 @@ class ManyToManyDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if not isdelete:
if childisdelete:
uow.dependencies.update([
@@ -946,9 +947,9 @@ class ManyToManyDP(DependencyProcessor):
# returns True
for state in states:
uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
def presort_saves(self, uowcommit, states):
if not self.passive_updates:
@@ -958,9 +959,9 @@ class ManyToManyDP(DependencyProcessor):
for state in states:
if self._pks_changed(uowcommit, state):
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_OFF)
+ state,
+ self.key,
+ attributes.PASSIVE_OFF)
if not self.cascade.delete_orphan:
return
@@ -969,17 +970,18 @@ class ManyToManyDP(DependencyProcessor):
# if delete_orphan check is turned on.
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
for child in history.deleted:
if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete',
- child):
+ 'delete',
+ child):
uowcommit.register_object(
st_, isdelete=True)
@@ -994,9 +996,9 @@ class ManyToManyDP(DependencyProcessor):
# this history should be cached already, as
# we loaded it in preprocess_deletes
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.non_added():
if child is None or \
@@ -1005,10 +1007,10 @@ class ManyToManyDP(DependencyProcessor):
continue
associationrow = {}
if not self._synchronize(
- state,
- child,
- associationrow,
- False, uowcommit, "delete"):
+ state,
+ child,
+ associationrow,
+ False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
@@ -1018,7 +1020,7 @@ class ManyToManyDP(DependencyProcessor):
processed.update(tmp)
self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
+ secondary_update, secondary_delete)
def process_saves(self, uowcommit, states):
secondary_delete = []
@@ -1030,23 +1032,23 @@ class ManyToManyDP(DependencyProcessor):
for state in states:
need_cascade_pks = not self.passive_updates and \
- self._pks_changed(uowcommit, state)
+ self._pks_changed(uowcommit, state)
if need_cascade_pks:
passive = attributes.PASSIVE_OFF
else:
passive = attributes.PASSIVE_NO_INITIALIZE
history = uowcommit.get_attribute_history(state, self.key,
- passive)
+ passive)
if history:
for child in history.added:
if (processed is not None and
- (state, child) in processed):
+ (state, child) in processed):
continue
associationrow = {}
if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "add"):
+ child,
+ associationrow,
+ False, uowcommit, "add"):
continue
secondary_insert.append(associationrow)
for child in history.deleted:
@@ -1055,14 +1057,14 @@ class ManyToManyDP(DependencyProcessor):
continue
associationrow = {}
if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "delete"):
+ child,
+ associationrow,
+ False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
tmp.update((c, state)
- for c in history.added + history.deleted)
+ for c in history.added + history.deleted)
if need_cascade_pks:
@@ -1085,45 +1087,45 @@ class ManyToManyDP(DependencyProcessor):
processed.update(tmp)
self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
+ secondary_update, secondary_delete)
def _run_crud(self, uowcommit, secondary_insert,
- secondary_update, secondary_delete):
+ secondary_update, secondary_delete):
connection = uowcommit.transaction.connection(self.mapper)
if secondary_delete:
associationrow = secondary_delete[0]
statement = self.secondary.delete(sql.and_(*[
- c == sql.bindparam(c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
+ c == sql.bindparam(c.key, type_=c.type)
+ for c in self.secondary.c
+ if c.key in associationrow
+ ]))
result = connection.execute(statement, secondary_delete)
if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_delete):
+ result.rowcount != len(secondary_delete):
raise exc.StaleDataError(
"DELETE statement on table '%s' expected to delete "
"%d row(s); Only %d were matched." %
(self.secondary.description, len(secondary_delete),
- result.rowcount)
+ result.rowcount)
)
if secondary_update:
associationrow = secondary_update[0]
statement = self.secondary.update(sql.and_(*[
- c == sql.bindparam("old_" + c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
+ c == sql.bindparam("old_" + c.key, type_=c.type)
+ for c in self.secondary.c
+ if c.key in associationrow
+ ]))
result = connection.execute(statement, secondary_update)
if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_update):
+ result.rowcount != len(secondary_update):
raise exc.StaleDataError(
"UPDATE statement on table '%s' expected to update "
"%d row(s); Only %d were matched." %
(self.secondary.description, len(secondary_update),
- result.rowcount)
+ result.rowcount)
)
if secondary_insert:
@@ -1131,7 +1133,7 @@ class ManyToManyDP(DependencyProcessor):
connection.execute(statement, secondary_insert)
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation):
+ clearkeys, uowcommit, operation):
# this checks for None if uselist=True
self._verify_canload(child)
@@ -1150,18 +1152,18 @@ class ManyToManyDP(DependencyProcessor):
return False
sync.populate_dict(state, self.parent, associationrow,
- self.prop.synchronize_pairs)
+ self.prop.synchronize_pairs)
sync.populate_dict(child, self.mapper, associationrow,
- self.prop.secondary_synchronize_pairs)
+ self.prop.secondary_synchronize_pairs)
return True
def _pks_changed(self, uowcommit, state):
return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
+ uowcommit,
+ state,
+ self.parent,
+ self.prop.synchronize_pairs)
_direction_to_processor = {
ONETOMANY: OneToManyDP,
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index cd918cafe..fa693c968 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -8,6 +8,7 @@
from .. import event, util
from .interfaces import EXT_CONTINUE
+
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class MapperExtension(object):
"""Base implementation for :class:`.Mapper` event hooks.
@@ -64,20 +65,20 @@ class MapperExtension(object):
cls._adapt_listener_methods(
self, listener,
(
- 'init_instance',
- 'init_failed',
- 'translate_row',
- 'create_instance',
- 'append_result',
- 'populate_instance',
- 'reconstruct_instance',
- 'before_insert',
- 'after_insert',
- 'before_update',
- 'after_update',
- 'before_delete',
- 'after_delete'
- ))
+ 'init_instance',
+ 'init_failed',
+ 'translate_row',
+ 'create_instance',
+ 'append_result',
+ 'populate_instance',
+ 'reconstruct_instance',
+ 'before_insert',
+ 'after_insert',
+ 'before_update',
+ 'after_update',
+ 'before_delete',
+ 'after_delete'
+ ))
@classmethod
def _adapt_listener_methods(cls, self, listener, methods):
@@ -93,29 +94,30 @@ class MapperExtension(object):
ls_meth(self, instance)
return reconstruct
event.listen(self.class_manager, 'load',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
elif meth == 'init_instance':
def go(ls_meth):
def init_instance(instance, args, kwargs):
ls_meth(self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
+ self.class_manager.original_init,
+ instance, args, kwargs)
return init_instance
event.listen(self.class_manager, 'init',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
elif meth == 'init_failed':
def go(ls_meth):
def init_failed(instance, args, kwargs):
- util.warn_exception(ls_meth, self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
+ util.warn_exception(
+ ls_meth, self, self.class_,
+ self.class_manager.original_init,
+ instance, args, kwargs)
return init_failed
event.listen(self.class_manager, 'init_failure',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
else:
event.listen(self, "%s" % meth, ls_meth,
- raw=False, retval=True, propagate=True)
+ raw=False, retval=True, propagate=True)
def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed, and has
@@ -198,7 +200,7 @@ class MapperExtension(object):
return EXT_CONTINUE
def append_result(self, mapper, selectcontext, row, instance,
- result, **flags):
+ result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
@@ -232,7 +234,7 @@ class MapperExtension(object):
return EXT_CONTINUE
def populate_instance(self, mapper, selectcontext, row,
- instance, **flags):
+ instance, **flags):
"""Receive an instance before that instance has
its attributes populated.
@@ -555,14 +557,14 @@ class AttributeExtension(object):
@classmethod
def _adapt_listener(cls, self, listener):
event.listen(self, 'append', listener.append,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
event.listen(self, 'remove', listener.remove,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
event.listen(self, 'set', listener.set,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
def append(self, state, value, initiator):
"""Receive a collection append event.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index 4c335a71c..5ed24b8c0 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -39,7 +39,7 @@ class DescriptorProperty(MapperProperty):
if hasattr(prop, 'get_history'):
def get_history(self, state, dict_,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
return prop.get_history(state, dict_, passive)
if self.descriptor is None:
@@ -63,16 +63,15 @@ class DescriptorProperty(MapperProperty):
fdel=fdel,
)
- proxy_attr = attributes.\
- create_proxied_attribute(self.descriptor)\
- (
- self.parent.class_,
- self.key,
- self.descriptor,
- lambda: self._comparator_factory(mapper),
- doc=self.doc,
- original_property=self
- )
+ proxy_attr = attributes.create_proxied_attribute(
+ self.descriptor)(
+ self.parent.class_,
+ self.key,
+ self.descriptor,
+ lambda: self._comparator_factory(mapper),
+ doc=self.doc,
+ original_property=self
+ )
proxy_attr.impl = _ProxyImpl(self.key)
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
@@ -90,11 +89,12 @@ class CompositeProperty(DescriptorProperty):
:ref:`mapper_composite`
"""
+
def __init__(self, class_, *attrs, **kwargs):
"""Return a composite column-based property for use with a Mapper.
- See the mapping documentation section :ref:`mapper_composite` for a full
- usage example.
+ See the mapping documentation section :ref:`mapper_composite` for a
+ full usage example.
The :class:`.MapperProperty` returned by :func:`.composite`
is the :class:`.CompositeProperty`.
@@ -118,13 +118,14 @@ class CompositeProperty(DescriptorProperty):
A group name for this property when marked as deferred.
:param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
+ When True, the column property is "deferred", meaning that it does
+ not load immediately, and is instead loaded when the attribute is
+ first accessed on an instance. See also
+ :func:`~sqlalchemy.orm.deferred`.
:param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
+ :class:`.CompositeProperty.Comparator` which provides custom SQL
+ clause generation for comparison operations.
:param doc:
optional string that will be applied as the doc on the
@@ -138,8 +139,8 @@ class CompositeProperty(DescriptorProperty):
:param extension:
an :class:`.AttributeExtension` instance,
or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
+ attribute listeners for the resulting descriptor placed on the
+ class. **Deprecated.** Please see :class:`.AttributeEvents`.
"""
@@ -149,14 +150,13 @@ class CompositeProperty(DescriptorProperty):
self.deferred = kwargs.get('deferred', False)
self.group = kwargs.get('group', None)
self.comparator_factory = kwargs.pop('comparator_factory',
- self.__class__.Comparator)
+ self.__class__.Comparator)
if 'info' in kwargs:
self.info = kwargs.pop('info')
util.set_creation_order(self)
self._create_descriptor()
-
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
@@ -241,16 +241,17 @@ class CompositeProperty(DescriptorProperty):
props = []
for attr in self.attrs:
if isinstance(attr, str):
- prop = self.parent.get_property(attr, _configure_mappers=False)
+ prop = self.parent.get_property(
+ attr, _configure_mappers=False)
elif isinstance(attr, schema.Column):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
else:
raise sa_exc.ArgumentError(
- "Composite expects Column objects or mapped "
- "attributes/attribute names as arguments, got: %r"
- % (attr,))
+ "Composite expects Column objects or mapped "
+ "attributes/attribute names as arguments, got: %r"
+ % (attr,))
props.append(prop)
return props
@@ -268,8 +269,8 @@ class CompositeProperty(DescriptorProperty):
if self.deferred:
prop.deferred = self.deferred
prop.strategy_class = prop._strategy_lookup(
- ("deferred", True),
- ("instrument", True))
+ ("deferred", True),
+ ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
@@ -288,11 +289,11 @@ class CompositeProperty(DescriptorProperty):
if k not in dict_:
return
- #assert self.key not in dict_
+ # assert self.key not in dict_
dict_[self.key] = self.composite_class(
- *[state.dict[key] for key in
- self._attribute_keys]
- )
+ *[state.dict[key] for key in
+ self._attribute_keys]
+ )
def expire_handler(state, keys):
if keys is None or set(self._attribute_keys).intersection(keys):
@@ -309,15 +310,15 @@ class CompositeProperty(DescriptorProperty):
state.dict.pop(self.key, None)
event.listen(self.parent, 'after_insert',
- insert_update_handler, raw=True)
+ insert_update_handler, raw=True)
event.listen(self.parent, 'after_update',
- insert_update_handler, raw=True)
+ insert_update_handler, raw=True)
event.listen(self.parent, 'load',
- load_handler, raw=True, propagate=True)
+ load_handler, raw=True, propagate=True)
event.listen(self.parent, 'refresh',
- load_handler, raw=True, propagate=True)
+ load_handler, raw=True, propagate=True)
event.listen(self.parent, 'expire',
- expire_handler, raw=True, propagate=True)
+ expire_handler, raw=True, propagate=True)
# TODO: need a deserialize hook here
@@ -368,14 +369,14 @@ class CompositeProperty(DescriptorProperty):
def __init__(self, property, expr):
self.property = property
super(CompositeProperty.CompositeBundle, self).__init__(
- property.key, *expr)
+ property.key, *expr)
def create_row_processor(self, query, procs, labels):
def proc(row, result):
- return self.property.composite_class(*[proc(row, result) for proc in procs])
+ return self.property.composite_class(
+ *[proc(row, result) for proc in procs])
return proc
-
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
@@ -395,7 +396,6 @@ class CompositeProperty(DescriptorProperty):
"""
-
__hash__ = None
@property
@@ -403,20 +403,22 @@ class CompositeProperty(DescriptorProperty):
return self.__clause_element__()
def __clause_element__(self):
- return expression.ClauseList(group=False, *self._comparable_elements)
+ return expression.ClauseList(
+ group=False, *self._comparable_elements)
def _query_clause_element(self):
- return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
+ return CompositeProperty.CompositeBundle(
+ self.prop, self.__clause_element__())
@util.memoized_property
def _comparable_elements(self):
if self._adapt_to_entity:
return [
- getattr(
- self._adapt_to_entity.entity,
- prop.key
- ) for prop in self.prop._comparable_elements
- ]
+ getattr(
+ self._adapt_to_entity.entity,
+ prop.key
+ ) for prop in self.prop._comparable_elements
+ ]
else:
return self.prop._comparable_elements
@@ -471,9 +473,9 @@ class ConcreteInheritedProperty(DescriptorProperty):
def __init__(self):
def warn():
raise AttributeError("Concrete %s does not implement "
- "attribute %r at the instance level. Add this "
- "property explicitly to %s." %
- (self.parent, self.key, self.parent))
+ "attribute %r at the instance level. Add "
+ "this property explicitly to %s." %
+ (self.parent, self.key, self.parent))
class NoninheritedConcreteProp(object):
def __set__(s, obj, value):
@@ -493,8 +495,8 @@ class ConcreteInheritedProperty(DescriptorProperty):
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
- descriptor=None, comparator_factory=None,
- doc=None):
+ descriptor=None, comparator_factory=None,
+ doc=None):
"""Denote an attribute name as a synonym to a mapped property,
in that the attribute will mirror the value and expression behavior
of another attribute.
@@ -523,11 +525,11 @@ class SynonymProperty(DescriptorProperty):
job_status = synonym("_job_status", map_column=True)
The above class ``MyClass`` will now have the ``job_status``
- :class:`.Column` object mapped to the attribute named ``_job_status``,
- and the attribute named ``job_status`` will refer to the synonym
- itself. This feature is typically used in conjunction with the
- ``descriptor`` argument in order to link a user-defined descriptor
- as a "wrapper" for an existing column.
+ :class:`.Column` object mapped to the attribute named
+ ``_job_status``, and the attribute named ``job_status`` will refer
+ to the synonym itself. This feature is typically used in
+ conjunction with the ``descriptor`` argument in order to link a
+ user-defined descriptor as a "wrapper" for an existing column.
:param comparator_factory: A subclass of :class:`.PropComparator`
that will provide custom comparison behavior at the SQL expression
@@ -580,12 +582,12 @@ class SynonymProperty(DescriptorProperty):
raise sa_exc.ArgumentError(
"Can't compile synonym '%s': no column on table "
"'%s' named '%s'"
- % (self.name, parent.mapped_table.description, self.key))
+ % (self.name, parent.mapped_table.description, self.key))
elif parent.mapped_table.c[self.key] in \
parent._columntoproperty and \
parent._columntoproperty[
- parent.mapped_table.c[self.key]
- ].key == self.name:
+ parent.mapped_table.c[self.key]
+ ].key == self.name:
raise sa_exc.ArgumentError(
"Can't call map_column=True for synonym %r=%r, "
"a ColumnProperty already exists keyed to the name "
@@ -594,9 +596,9 @@ class SynonymProperty(DescriptorProperty):
)
p = properties.ColumnProperty(parent.mapped_table.c[self.key])
parent._configure_property(
- self.name, p,
- init=init,
- setparent=True)
+ self.name, p,
+ init=init,
+ setparent=True)
p._mapped_by_synonym = self.key
self.parent = parent
@@ -646,7 +648,8 @@ class ComparableProperty(DescriptorProperty):
id = Column(Integer, primary_key=True)
word = Column(String)
word_insensitive = comparable_property(lambda prop, mapper:
- CaseInsensitiveComparator(mapper.c.word, mapper)
+ CaseInsensitiveComparator(
+ mapper.c.word, mapper)
)
@@ -675,5 +678,3 @@ class ComparableProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
-
-
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 68a09ff8c..51db1b107 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -17,9 +17,10 @@ from ..sql import operators
from . import (
attributes, object_session, util as orm_util, strategies,
object_mapper, exc as orm_exc, properties
- )
+)
from .query import Query
+
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
@@ -30,7 +31,8 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
"On relationship %s, 'dynamic' loaders cannot be used with "
"many-to-one/one-to-one relationships and/or "
"uselist=False." % self.parent_property)
- strategies._register_attribute(self,
+ strategies._register_attribute(
+ self,
mapper,
useobject=True,
uselist=True,
@@ -41,6 +43,7 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
backref=self.parent_property.back_populates,
)
+
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
@@ -48,10 +51,10 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection = False
def __init__(self, class_, key, typecallable,
- dispatch,
- target_mapper, order_by, query_class=None, **kw):
+ dispatch,
+ target_mapper, order_by, query_class=None, **kw):
super(DynamicAttributeImpl, self).\
- __init__(class_, key, typecallable, dispatch, **kw)
+ __init__(class_, key, typecallable, dispatch, **kw)
self.target_mapper = target_mapper
self.order_by = order_by
if not query_class:
@@ -63,16 +66,16 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
if not passive & attributes.SQL_OK:
- return self._get_collection_history(state,
- attributes.PASSIVE_NO_INITIALIZE).added_items
+ return self._get_collection_history(
+ state, attributes.PASSIVE_NO_INITIALIZE).added_items
else:
return self.query_class(self, state)
def get_collection(self, state, dict_, user_data=None,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
if not passive & attributes.SQL_OK:
return self._get_collection_history(state,
- passive).added_items
+ passive).added_items
else:
history = self._get_collection_history(state, passive)
return history.added_plus_unchanged
@@ -86,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return attributes.Event(self, attributes.OP_REMOVE)
def fire_append_event(self, state, dict_, value, initiator,
- collection_history=None):
+ collection_history=None):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
@@ -99,7 +102,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
self.sethasparent(attributes.instance_state(value), state, True)
def fire_remove_event(self, state, dict_, value, initiator,
- collection_history=None):
+ collection_history=None):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
@@ -117,8 +120,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
state.committed_state[self.key] = CollectionHistory(self, state)
state._modified_event(dict_,
- self,
- attributes.NEVER_SET)
+ self,
+ attributes.NEVER_SET)
# this is a hack to allow the fixtures.ComparableEntity fixture
# to work
@@ -126,8 +129,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return state.committed_state[self.key]
def set(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF,
- check_old=None, pop=False):
+ passive=attributes.PASSIVE_OFF,
+ check_old=None, pop=False):
if initiator and initiator.parent_token is self.parent_token:
return
@@ -145,7 +148,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
old_collection = collection_history.added_items
else:
old_collection = old_collection.union(
- collection_history.added_items)
+ collection_history.added_items)
idset = util.IdentitySet
constants = old_collection.intersection(new_values)
@@ -155,11 +158,11 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
for member in new_values:
if member in additions:
self.fire_append_event(state, dict_, member, None,
- collection_history=collection_history)
+ collection_history=collection_history)
for member in removals:
self.fire_remove_event(state, dict_, member, None,
- collection_history=collection_history)
+ collection_history=collection_history)
def delete(self, *args, **kwargs):
raise NotImplementedError()
@@ -173,14 +176,14 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return c.as_history()
def get_all_pending(self, state, dict_,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
c = self._get_collection_history(
state, passive)
return [
- (attributes.instance_state(x), x)
- for x in
- c.all_items
- ]
+ (attributes.instance_state(x), x)
+ for x in
+ c.all_items
+ ]
def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
if self.key in state.committed_state:
@@ -194,17 +197,17 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return c
def append(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_append_event(state, dict_, value, initiator)
def remove(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_remove_event(state, dict_, value, initiator)
def pop(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
self.remove(state, dict_, value, initiator, passive=passive)
@@ -219,10 +222,10 @@ class AppenderMixin(object):
mapper = object_mapper(instance)
prop = mapper._props[self.attr.key]
self._criterion = prop.compare(
- operators.eq,
- instance,
- value_is_parent=True,
- alias_secondary=False)
+ operators.eq,
+ instance,
+ value_is_parent=True,
+ alias_secondary=False)
if self.attr.order_by:
self._order_by = self.attr.order_by
@@ -230,7 +233,7 @@ class AppenderMixin(object):
def session(self):
sess = object_session(self.instance)
if sess is not None and self.autoflush and sess.autoflush \
- and self.instance in sess:
+ and self.instance in sess:
sess.flush()
if not orm_util.has_identity(self.instance):
return None
@@ -339,7 +342,7 @@ class CollectionHistory(object):
@property
def all_items(self):
return list(self.added_items.union(
- self.unchanged_items).union(self.deleted_items))
+ self.unchanged_items).union(self.deleted_items))
def as_history(self):
if self._reconcile_collection:
@@ -348,13 +351,13 @@ class CollectionHistory(object):
unchanged = self.unchanged_items.difference(deleted)
else:
added, unchanged, deleted = self.added_items,\
- self.unchanged_items,\
- self.deleted_items
+ self.unchanged_items,\
+ self.deleted_items
return attributes.History(
- list(added),
- list(unchanged),
- list(deleted),
- )
+ list(added),
+ list(unchanged),
+ list(deleted),
+ )
def indexed(self, index):
return list(self.added_items)[index]
@@ -367,4 +370,3 @@ class CollectionHistory(object):
self.added_items.remove(value)
else:
self.deleted_items.add(value)
-
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 3b56ff55a..2026e5d0a 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -14,15 +14,15 @@ class UnevaluatableError(Exception):
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
- 'div',
- 'mod', 'truediv',
+ 'div',
+ 'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
- for op in ('like_op', 'notlike_op', 'ilike_op',
- 'notilike_op', 'between_op', 'in_op',
- 'notin_op', 'endswith_op', 'concat_op'))
+ for op in ('like_op', 'notlike_op', 'ilike_op',
+ 'notilike_op', 'between_op', 'in_op',
+ 'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
@@ -55,7 +55,7 @@ class EvaluatorCompiler(object):
self.target_cls, parentmapper.class_):
raise UnevaluatableError(
"Can't evaluate criteria against alternate class %s" %
- parentmapper.class_
+ parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
@@ -95,7 +95,7 @@ class EvaluatorCompiler(object):
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
- [clause.left, clause.right]))
+ [clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
@@ -112,8 +112,8 @@ class EvaluatorCompiler(object):
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
- (type(clause).__name__, clause.operator))
+ "Cannot evaluate %s with operator %s" %
+ (type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
@@ -126,8 +126,8 @@ class EvaluatorCompiler(object):
return not value
return evaluate
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
- (type(clause).__name__, clause.operator))
+ "Cannot evaluate %s with operator %s" %
+ (type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 0e08a0898..aa99673ba 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -18,6 +18,7 @@ from .session import Session, sessionmaker
from .scoping import scoped_session
from .attributes import QueryableAttribute
+
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
@@ -50,7 +51,6 @@ class InstrumentationEvents(event.Events):
_target_class_doc = "SomeBaseClass"
_dispatch_target = instrumentation.InstrumentationFactory
-
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
@@ -71,10 +71,11 @@ class InstrumentationEvents(event.Events):
return fn(target_cls, *arg)
def remove(ref):
- key = event.registry._EventKey(None, identifier, listen,
- instrumentation._instrumentation_factory)
+ key = event.registry._EventKey(
+ None, identifier, listen,
+ instrumentation._instrumentation_factory)
getattr(instrumentation._instrumentation_factory.dispatch,
- identifier).remove(key)
+ identifier).remove(key)
target = weakref.ref(target.class_, remove)
@@ -107,17 +108,18 @@ class InstrumentationEvents(event.Events):
"""Called when an attribute is instrumented."""
-
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
"""
+
def __init__(self, class_):
self.class_ = class_
dispatch = event.dispatcher(InstrumentationEvents)
+
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
@@ -201,7 +203,8 @@ class InstanceEvents(event.Events):
if propagate:
for mgr in target.subclass_managers(True):
- event_key.with_dispatch_target(mgr).base_listen(propagate=True)
+ event_key.with_dispatch_target(mgr).base_listen(
+ propagate=True)
@classmethod
def _clear(cls):
@@ -330,6 +333,7 @@ class InstanceEvents(event.Events):
"""
+
class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
@@ -337,6 +341,7 @@ class _EventsHold(event.RefCollection):
those objects are created for that class.
"""
+
def __init__(self, class_):
self.class_ = class_
@@ -387,9 +392,9 @@ class _EventsHold(event.RefCollection):
collection = cls.all_holds[subclass]
for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
- # since we can't be sure in what order different classes
- # in a hierarchy are triggered with populate(),
- # we rely upon _EventsHold for all event
+ # since we can't be sure in what order different
+ # classes in a hierarchy are triggered with
+ # populate(), we rely upon _EventsHold for all event
# assignment, instead of using the generic propagate
# flag.
event_key.with_dispatch_target(subject).\
@@ -502,16 +507,17 @@ class MapperEvents(event.Events):
return target
@classmethod
- def _listen(cls, event_key, raw=False, retval=False, propagate=False, **kw):
+ def _listen(
+ cls, event_key, raw=False, retval=False, propagate=False, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
if identifier in ("before_configured", "after_configured") and \
- target is not mapperlib.Mapper:
+ target is not mapperlib.Mapper:
util.warn(
- "'before_configured' and 'after_configured' ORM events "
- "only invoke with the mapper() function or Mapper class "
- "as the target.")
+ "'before_configured' and 'after_configured' ORM events "
+ "only invoke with the mapper() function or Mapper class "
+ "as the target.")
if not raw or not retval:
if not raw:
@@ -536,7 +542,7 @@ class MapperEvents(event.Events):
if propagate:
for mapper in target.self_and_descendants:
event_key.with_dispatch_target(mapper).base_listen(
- propagate=True, **kw)
+ propagate=True, **kw)
else:
event_key.base_listen(**kw)
@@ -722,7 +728,7 @@ class MapperEvents(event.Events):
"""
def append_result(self, mapper, context, row, target,
- result, **flags):
+ result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
@@ -759,7 +765,7 @@ class MapperEvents(event.Events):
"""
def populate_instance(self, mapper, context, row,
- target, **flags):
+ target, **flags):
"""Receive an instance before that instance has
its attributes populated.
@@ -1165,6 +1171,7 @@ class MapperEvents(event.Events):
"""
+
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
@@ -1215,11 +1222,11 @@ class SessionEvents(event.Events):
(
not isinstance(target, type) or
not issubclass(target, Session)
- ):
+ ):
raise exc.ArgumentError(
- "Session event listen on a scoped_session "
- "requires that its creation callable "
- "is associated with the Session class.")
+ "Session event listen on a scoped_session "
+ "requires that its creation callable "
+ "is associated with the Session class.")
if isinstance(target, sessionmaker):
return target.class_
@@ -1284,8 +1291,10 @@ class SessionEvents(event.Events):
The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
- For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
- :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec`
+ For interception of these events, use the
+ :meth:`~.SessionEvents.before_flush`,
+ :meth:`~.SessionEvents.after_flush`, or
+ :meth:`~.SessionEvents.after_flush_postexec`
events.
:param session: The target :class:`.Session`.
@@ -1310,16 +1319,19 @@ class SessionEvents(event.Events):
The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
- For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
- :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec`
+ For interception of these events, use the
+ :meth:`~.SessionEvents.before_flush`,
+ :meth:`~.SessionEvents.after_flush`, or
+ :meth:`~.SessionEvents.after_flush_postexec`
events.
.. note::
The :class:`.Session` is not in an active transaction
- when the :meth:`~.SessionEvents.after_commit` event is invoked, and therefore
- can not emit SQL. To emit SQL corresponding to every transaction,
- use the :meth:`~.SessionEvents.before_commit` event.
+ when the :meth:`~.SessionEvents.after_commit` event is invoked,
+ and therefore can not emit SQL. To emit SQL corresponding to
+ every transaction, use the :meth:`~.SessionEvents.before_commit`
+ event.
:param session: The target :class:`.Session`.
@@ -1467,8 +1479,8 @@ class SessionEvents(event.Events):
This is called before an add, delete or merge causes
the object to be part of the session.
- .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
- fires off after the item is part of the session.
+ .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach`
+ now fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
@@ -1501,12 +1513,12 @@ class SessionEvents(event.Events):
"""
@event._legacy_signature("0.9",
- ["session", "query", "query_context", "result"],
- lambda update_context: (
- update_context.session,
- update_context.query,
- update_context.context,
- update_context.result))
+ ["session", "query", "query_context", "result"],
+ lambda update_context: (
+ update_context.session,
+ update_context.query,
+ update_context.context,
+ update_context.result))
def after_bulk_update(self, update_context):
"""Execute after a bulk update operation to the session.
@@ -1516,8 +1528,8 @@ class SessionEvents(event.Events):
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
- * ``query`` -the :class:`.Query` object that this update operation was
- called upon.
+ * ``query`` -the :class:`.Query` object that this update operation
+ was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
@@ -1527,12 +1539,12 @@ class SessionEvents(event.Events):
"""
@event._legacy_signature("0.9",
- ["session", "query", "query_context", "result"],
- lambda delete_context: (
- delete_context.session,
- delete_context.query,
- delete_context.context,
- delete_context.result))
+ ["session", "query", "query_context", "result"],
+ lambda delete_context: (
+ delete_context.session,
+ delete_context.query,
+ delete_context.context,
+ delete_context.result))
def after_bulk_delete(self, delete_context):
"""Execute after a bulk delete operation to the session.
@@ -1542,8 +1554,8 @@ class SessionEvents(event.Events):
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
- * ``query`` -the :class:`.Query` object that this update operation was
- called upon.
+ * ``query`` -the :class:`.Query` object that this update operation
+ was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
@@ -1628,8 +1640,8 @@ class AttributeEvents(event.Events):
@classmethod
def _listen(cls, event_key, active_history=False,
- raw=False, retval=False,
- propagate=False):
+ raw=False, retval=False,
+ propagate=False):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
@@ -1654,7 +1666,8 @@ class AttributeEvents(event.Events):
manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
- event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
+ event_key.with_dispatch_target(
+ mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -1672,8 +1685,9 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
@@ -1693,8 +1707,9 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: No return value is defined for this event.
"""
@@ -1721,11 +1736,11 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
-
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index 11e69d221..ff0ece411 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -121,7 +121,7 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
- "row is otherwise not present." % base.state_str(state)
+ "row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
@@ -150,6 +150,7 @@ def _safe_cls_name(cls):
cls_name = repr(cls)
return cls_name
+
@util.dependencies("sqlalchemy.orm.base")
def _default_unmapped(base, cls):
try:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 745b9d569..d9cdd791f 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -9,6 +9,7 @@ import weakref
from . import attributes
from .. import util
+
class IdentityMap(object):
def __init__(self):
self._dict = {}
@@ -237,7 +238,6 @@ class StrongInstanceDict(IdentityMap):
def items(self):
return self._dict.items()
-
def all_states(self):
return [attributes.instance_state(o) for o in self.values()]
@@ -262,8 +262,8 @@ class StrongInstanceDict(IdentityMap):
if state.key in self:
if attributes.instance_state(self._dict[state.key]) is not state:
raise AssertionError('A conflicting state is already '
- 'present in the identity map for key %r'
- % (state.key, ))
+ 'present in the identity map for key %r'
+ % (state.key, ))
else:
self._dict[state.key] = state.obj()
self._manage_incoming_state(state)
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index fd74704df..f58b8807f 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -34,6 +34,7 @@ from . import exc, collections, interfaces, state
from .. import util
from . import base
+
class ClassManager(dict):
"""tracks state information at the class level."""
@@ -54,16 +55,16 @@ class ClassManager(dict):
self.originals = {}
self._bases = [mgr for mgr in [
- manager_of_class(base)
- for base in self.class_.__bases__
- if isinstance(base, type)
- ] if mgr is not None]
+ manager_of_class(base)
+ for base in self.class_.__bases__
+ if isinstance(base, type)
+ ] if mgr is not None]
for base in self._bases:
self.update(base)
self.dispatch._events._new_classmanager_instance(class_, self)
- #events._InstanceEventsHold.populate(class_, self)
+ # events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
@@ -74,10 +75,10 @@ class ClassManager(dict):
if '__del__' in class_.__dict__:
util.warn("__del__() method on class %s will "
- "cause unreachable cycles and memory leaks, "
- "as SQLAlchemy instrumentation often creates "
- "reference cycles. Please remove this method." %
- class_)
+ "cause unreachable cycles and memory leaks, "
+ "as SQLAlchemy instrumentation often creates "
+ "reference cycles. Please remove this method." %
+ class_)
def __hash__(self):
return id(self)
@@ -99,7 +100,8 @@ class ClassManager(dict):
implement :class:`._InspectionAttr`.
This includes :class:`.QueryableAttribute` as well as extension
- types such as :class:`.hybrid_property` and :class:`.AssociationProxy`.
+ types such as :class:`.hybrid_property` and
+ :class:`.AssociationProxy`.
"""
if exclude is None:
@@ -111,7 +113,6 @@ class ClassManager(dict):
if isinstance(val, interfaces._InspectionAttr):
yield key, val
-
def _attr_has_impl(self, key):
"""Return True if the given attribute is fully initialized.
@@ -185,7 +186,6 @@ class ClassManager(dict):
def dict_getter(self):
return _default_dict_getter
-
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
if key in self.local_attrs:
@@ -210,7 +210,7 @@ class ClassManager(dict):
def post_configure_attribute(self, key):
_instrumentation_factory.dispatch.\
- attribute_instrument(self.class_, key, self[key])
+ attribute_instrument(self.class_, key, self[key])
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
@@ -284,19 +284,19 @@ class ClassManager(dict):
def attributes(self):
return iter(self.values())
- ## InstanceState management
+ # InstanceState management
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ self._state_constructor(instance, self)
+ if not state else state)
return instance
def setup_instance(self, instance, state=None):
setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ self._state_constructor(instance, self)
+ if not state else state)
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
@@ -320,7 +320,7 @@ class ClassManager(dict):
# to be constructed, so that it is usable
# in a non-ORM context at least.
return self._subclass_manager(instance.__class__).\
- _new_state_if_none(instance)
+ _new_state_if_none(instance)
else:
state = self._state_constructor(instance, self)
setattr(instance, self.STATE_ATTR, state)
@@ -343,6 +343,7 @@ class ClassManager(dict):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
+
class _SerializeManager(object):
"""Provide serialization of a :class:`.ClassManager`.
@@ -350,6 +351,7 @@ class _SerializeManager(object):
and ``__call__()`` on deserialize.
"""
+
def __init__(self, state, d):
self.class_ = state.class_
manager = state.manager
@@ -359,12 +361,12 @@ class _SerializeManager(object):
state.manager = manager = manager_of_class(self.class_)
if manager is None:
raise exc.UnmappedInstanceError(
- inst,
- "Cannot deserialize object of type %r - "
- "no mapper() has "
- "been configured for this class within the current "
- "Python process!" %
- self.class_)
+ inst,
+ "Cannot deserialize object of type %r - "
+ "no mapper() has "
+ "been configured for this class within the current "
+ "Python process!" %
+ self.class_)
elif manager.is_mapped and not manager.mapper.configured:
manager.mapper._configure_all()
@@ -375,6 +377,7 @@ class _SerializeManager(object):
manager.setup_instance(inst, state)
manager.dispatch.unpickle(state, state_dict)
+
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
@@ -426,6 +429,7 @@ instance_dict = _default_dict_getter = base.instance_dict
manager_of_class = _default_manager_getter = base.manager_of_class
+
def register_class(class_):
"""Register class instrumentation.
@@ -454,7 +458,7 @@ def is_instrumented(instance, key):
"""
return manager_of_class(instance.__class__).\
- is_instrumented(key, search=True)
+ is_instrumented(key, search=True)
def _generate_init(class_, class_manager):
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index d5e430506..9bc1c3dd0 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -22,7 +22,8 @@ from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
-from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
+from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
+ EXT_CONTINUE, EXT_STOP, NOT_EXTENSION)
from .base import _InspectionAttr, _MappedAttribute
from .path_registry import PathRegistry
import collections
@@ -43,8 +44,7 @@ __all__ = (
'PropComparator',
'SessionExtension',
'StrategizedProperty',
- )
-
+)
class MapperProperty(_MappedAttribute, _InspectionAttr):
@@ -82,14 +82,14 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
pass
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
- halt_on=None):
+ halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
@@ -200,7 +200,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
+ dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
@@ -223,6 +223,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
+
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
@@ -434,10 +435,10 @@ class StrategizedProperty(MapperProperty):
# search among: exact match, "attr.*", "default" strategy
# if any.
for path_key in (
- search_path._loader_key,
- search_path._wildcard_path_loader_key,
- search_path._default_path_loader_key
- ):
+ search_path._loader_key,
+ search_path._wildcard_path_loader_key,
+ search_path._default_path_loader_key
+ ):
if path_key in context.attributes:
load = context.attributes[path_key]
break
@@ -449,7 +450,8 @@ class StrategizedProperty(MapperProperty):
return self._strategies[key]
except KeyError:
cls = self._strategy_lookup(*key)
- self._strategies[key] = self._strategies[cls] = strategy = cls(self)
+ self._strategies[key] = self._strategies[
+ cls] = strategy = cls(self)
return strategy
def _get_strategy_by_cls(self, cls):
@@ -470,7 +472,7 @@ class StrategizedProperty(MapperProperty):
else:
strat = self.strategy
return strat.create_row_processor(context, path, loader,
- mapper, row, adapter)
+ mapper, row, adapter)
def do_init(self):
self._strategies = {}
@@ -478,10 +480,9 @@ class StrategizedProperty(MapperProperty):
def post_instrument_class(self, mapper):
if self.is_primary() and \
- not mapper.class_manager._attr_has_impl(self.key):
+ not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
-
_strategies = collections.defaultdict(dict)
@classmethod
@@ -527,8 +528,6 @@ class MapperOption(object):
self.process_query(query)
-
-
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
@@ -552,6 +551,7 @@ class LoaderStrategy(object):
on a particular mapped instance.
"""
+
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
@@ -565,7 +565,7 @@ class LoaderStrategy(object):
pass
def create_row_processor(self, context, path, loadopt, mapper,
- row, adapter):
+ row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 3c152717c..232eb89de 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -31,11 +31,11 @@ def instances(query, cursor, context):
context.runid = _new_runid()
filter_fns = [ent.filter_fn
- for ent in query._entities]
+ for ent in query._entities]
filtered = id in filter_fns
single_entity = len(query._entities) == 1 and \
- query._entities[0].supports_single_entity
+ query._entities[0].supports_single_entity
if filtered:
if single_entity:
@@ -45,14 +45,14 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
custom_rows = single_entity and \
- query._entities[0].custom_rows
+ query._entities[0].custom_rows
(process, labels) = \
- list(zip(*[
- query_entity.row_processor(query,
- context, custom_rows)
- for query_entity in query._entities
- ]))
+ list(zip(*[
+ query_entity.row_processor(query,
+ context, custom_rows)
+ for query_entity in query._entities
+ ]))
while True:
context.progress = {}
@@ -79,9 +79,9 @@ def instances(query, cursor, context):
rows = util.unique_list(rows, filter_fn)
if context.refresh_state and query._only_load_props \
- and context.refresh_state in context.progress:
+ and context.refresh_state in context.progress:
context.refresh_state._commit(
- context.refresh_state.dict, query._only_load_props)
+ context.refresh_state.dict, query._only_load_props)
context.progress.pop(context.refresh_state)
statelib.InstanceState._commit_all_states(
@@ -115,15 +115,15 @@ def merge_result(querylib, query, iterator, load=True):
if single_entity:
if isinstance(query._entities[0], querylib._MapperEntity):
result = [session._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive={})
- for instance in iterator]
+ attributes.instance_state(instance),
+ attributes.instance_dict(instance),
+ load=load, _recursive={})
+ for instance in iterator]
else:
result = list(iterator)
else:
mapped_entities = [i for i, e in enumerate(query._entities)
- if isinstance(e, querylib._MapperEntity)]
+ if isinstance(e, querylib._MapperEntity)]
result = []
keys = [ent._label_name for ent in query._entities]
for row in iterator:
@@ -131,9 +131,9 @@ def merge_result(querylib, query, iterator, load=True):
for i in mapped_entities:
if newrow[i] is not None:
newrow[i] = session._merge(
- attributes.instance_state(newrow[i]),
- attributes.instance_dict(newrow[i]),
- load=load, _recursive={})
+ attributes.instance_state(newrow[i]),
+ attributes.instance_dict(newrow[i]),
+ load=load, _recursive={})
result.append(util.KeyedTuple(newrow, keys))
return iter(result)
@@ -171,8 +171,8 @@ def get_from_identity(session, key, passive):
def load_on_ident(query, key,
- refresh_state=None, lockmode=None,
- only_load_props=None):
+ refresh_state=None, lockmode=None,
+ only_load_props=None):
"""Load the given identity key from the database."""
if key is not None:
@@ -196,10 +196,10 @@ def load_on_ident(query, key,
if None in ident:
nones = set([
_get_params[col].key for col, value in
- zip(mapper.primary_key, ident) if value is None
+ zip(mapper.primary_key, ident) if value is None
])
_get_clause = sql_util.adapt_criterion_to_null(
- _get_clause, nones)
+ _get_clause, nones)
_get_clause = q._adapt_clause(_get_clause, True, False)
q._criterion = _get_clause
@@ -234,11 +234,10 @@ def load_on_ident(query, key,
def instance_processor(mapper, context, path, adapter,
- polymorphic_from=None,
- only_load_props=None,
- refresh_state=None,
- polymorphic_discriminator=None):
-
+ polymorphic_from=None,
+ only_load_props=None,
+ refresh_state=None,
+ polymorphic_discriminator=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
@@ -258,10 +257,10 @@ def instance_processor(mapper, context, path, adapter,
else:
polymorphic_on = mapper.polymorphic_on
polymorphic_instances = util.PopulateDict(
- _configure_subclass_mapper(
- mapper,
- context, path, adapter)
- )
+ _configure_subclass_mapper(
+ mapper,
+ context, path, adapter)
+ )
version_id_col = mapper.version_id_col
@@ -279,8 +278,8 @@ def instance_processor(mapper, context, path, adapter,
eager_populators = []
load_path = context.query._current_path + path \
- if context.query._current_path.path \
- else path
+ if context.query._current_path.path \
+ else path
def populate_state(state, dict_, row, isnew, only_load_props):
if isnew:
@@ -291,10 +290,10 @@ def instance_processor(mapper, context, path, adapter,
if not new_populators:
_populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators
- )
+ new_populators,
+ existing_populators,
+ eager_populators
+ )
if isnew:
populators = new_populators
@@ -313,7 +312,7 @@ def instance_processor(mapper, context, path, adapter,
listeners = mapper.dispatch
- ### legacy events - I'd very much like to yank these totally
+ # legacy events - I'd very much like to yank these totally
translate_row = listeners.translate_row or None
create_instance = listeners.create_instance or None
populate_instance = listeners.populate_instance or None
@@ -335,9 +334,9 @@ def instance_processor(mapper, context, path, adapter,
def _instance(row, result):
if not new_populators and invoke_all_eagers:
_populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators)
+ new_populators,
+ existing_populators,
+ eager_populators)
if translate_row:
for fn in translate_row:
@@ -363,9 +362,9 @@ def instance_processor(mapper, context, path, adapter,
identitykey = mapper._identity_key_from_state(refresh_state)
else:
identitykey = (
- identity_class,
- tuple([row[column] for column in pk_cols])
- )
+ identity_class,
+ tuple([row[column] for column in pk_cols])
+ )
instance = session_identity_map.get(identitykey)
@@ -381,19 +380,19 @@ def instance_processor(mapper, context, path, adapter,
version_id_col is not None and \
context.version_check and \
mapper._get_state_attr_by_column(
- state,
- dict_,
- mapper.version_id_col) != \
- row[version_id_col]:
+ state,
+ dict_,
+ mapper.version_id_col) != \
+ row[version_id_col]:
raise orm_exc.StaleDataError(
- "Instance '%s' has version id '%s' which "
- "does not match database-loaded version id '%s'."
- % (state_str(state),
- mapper._get_state_attr_by_column(
- state, dict_,
- mapper.version_id_col),
- row[version_id_col]))
+ "Instance '%s' has version id '%s' which "
+ "does not match database-loaded version id '%s'."
+ % (state_str(state),
+ mapper._get_state_attr_by_column(
+ state, dict_,
+ mapper.version_id_col),
+ row[version_id_col]))
elif refresh_state:
# out of band refresh_state detected (i.e. its not in the
# session.identity_map) honor it anyway. this can happen
@@ -418,10 +417,10 @@ def instance_processor(mapper, context, path, adapter,
if create_instance:
for fn in create_instance:
instance = fn(mapper, context,
- row, mapper.class_)
+ row, mapper.class_)
if instance is not EXT_CONTINUE:
manager = attributes.manager_of_class(
- instance.__class__)
+ instance.__class__)
# TODO: if manager is None, raise a friendly error
# about returning instances of unmapped types
manager.setup_instance(instance)
@@ -449,8 +448,8 @@ def instance_processor(mapper, context, path, adapter,
if populate_instance:
for fn in populate_instance:
ret = fn(mapper, context, row, state,
- only_load_props=only_load_props,
- instancekey=identitykey, isnew=isnew)
+ only_load_props=only_load_props,
+ instancekey=identitykey, isnew=isnew)
if ret is not EXT_CONTINUE:
break
else:
@@ -461,7 +460,8 @@ def instance_processor(mapper, context, path, adapter,
if loaded_instance and load_evt:
state.manager.dispatch.load(state, context)
elif isnew and refresh_evt:
- state.manager.dispatch.refresh(state, context, only_load_props)
+ state.manager.dispatch.refresh(
+ state, context, only_load_props)
elif state in context.partials or state.unloaded or eager_populators:
# state is having a partial set of its attributes
@@ -478,8 +478,8 @@ def instance_processor(mapper, context, path, adapter,
if populate_instance:
for fn in populate_instance:
ret = fn(mapper, context, row, state,
- only_load_props=attrs,
- instancekey=identitykey, isnew=isnew)
+ only_load_props=attrs,
+ instancekey=identitykey, isnew=isnew)
if ret is not EXT_CONTINUE:
break
else:
@@ -498,8 +498,8 @@ def instance_processor(mapper, context, path, adapter,
if append_result:
for fn in append_result:
if fn(mapper, context, row, state,
- result, instancekey=identitykey,
- isnew=isnew) is not EXT_CONTINUE:
+ result, instancekey=identitykey,
+ isnew=isnew) is not EXT_CONTINUE:
break
else:
result.append(instance)
@@ -511,20 +511,20 @@ def instance_processor(mapper, context, path, adapter,
def _populators(mapper, context, path, row, adapter,
- new_populators, existing_populators, eager_populators):
+ new_populators, existing_populators, eager_populators):
"""Produce a collection of attribute level row processor
callables."""
delayed_populators = []
pops = (new_populators, existing_populators, delayed_populators,
- eager_populators)
+ eager_populators)
for prop in mapper._props.values():
for i, pop in enumerate(prop.create_row_processor(
- context,
- path,
- mapper, row, adapter)):
+ context,
+ path,
+ mapper, row, adapter)):
if pop is not None:
pops[i].append((prop.key, pop))
@@ -541,30 +541,30 @@ def _configure_subclass_mapper(mapper, context, path, adapter):
sub_mapper = mapper.polymorphic_map[discriminator]
except KeyError:
raise AssertionError(
- "No such polymorphic_identity %r is defined" %
- discriminator)
+ "No such polymorphic_identity %r is defined" %
+ discriminator)
if sub_mapper is mapper:
return None
return instance_processor(
- sub_mapper,
- context,
- path,
- adapter,
- polymorphic_from=mapper)
+ sub_mapper,
+ context,
+ path,
+ adapter,
+ polymorphic_from=mapper)
return configure_subclass_mapper
def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
- #assert mapper is _state_mapper(state)
+ # assert mapper is _state_mapper(state)
session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
- "Instance %s is not bound to a Session; "
- "attribute refresh operation cannot proceed" %
- (state_str(state)))
+ "Instance %s is not bound to a Session; "
+ "attribute refresh operation cannot proceed" %
+ (state_str(state)))
has_key = bool(state.key)
@@ -574,11 +574,11 @@ def load_scalar_attributes(mapper, state, attribute_names):
statement = mapper._optimized_get_statement(state, attribute_names)
if statement is not None:
result = load_on_ident(
- session.query(mapper).from_statement(statement),
- None,
- only_load_props=attribute_names,
- refresh_state=state
- )
+ session.query(mapper).from_statement(statement),
+ None,
+ only_load_props=attribute_names,
+ refresh_state=state
+ )
if result is False:
if has_key:
@@ -592,25 +592,25 @@ def load_scalar_attributes(mapper, state, attribute_names):
for col in mapper.primary_key]
if state.expired_attributes.intersection(pk_attrs):
raise sa_exc.InvalidRequestError(
- "Instance %s cannot be refreshed - it's not "
- " persistent and does not "
- "contain a full primary key." % state_str(state))
+ "Instance %s cannot be refreshed - it's not "
+ " persistent and does not "
+ "contain a full primary key." % state_str(state))
identity_key = mapper._identity_key_from_state(state)
- if (_none_set.issubset(identity_key) and \
+ if (_none_set.issubset(identity_key) and
not mapper.allow_partial_pks) or \
_none_set.issuperset(identity_key):
util.warn("Instance %s to be refreshed doesn't "
- "contain a full primary key - can't be refreshed "
- "(and shouldn't be expired, either)."
- % state_str(state))
+ "contain a full primary key - can't be refreshed "
+ "(and shouldn't be expired, either)."
+ % state_str(state))
return
result = load_on_ident(
- session.query(mapper),
- identity_key,
- refresh_state=state,
- only_load_props=attribute_names)
+ session.query(mapper),
+ identity_key,
+ refresh_state=state,
+ only_load_props=attribute_names)
# if instance is pending, a refresh operation
# may not complete (even if PK attributes are assigned)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 6c1b149bb..7e5166393 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -29,7 +29,7 @@ from . import util as orm_util
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
from .base import _class_to_mapper, _state_mapper, class_mapper, \
- state_str, _INSTRUMENTOR
+ state_str, _INSTRUMENTOR
from .path_registry import PathRegistry
import sys
@@ -214,13 +214,14 @@ class Mapper(_InspectionAttr):
:param confirm_deleted_rows: defaults to True; when a DELETE occurs
of one more rows based on specific primary keys, a warning is
emitted when the number of rows matched does not equal the number
- of rows expected. This parameter may be set to False to handle the case
- where database ON DELETE CASCADE rules may be deleting some of those
- rows automatically. The warning may be changed to an exception
- in a future release.
+ of rows expected. This parameter may be set to False to handle the
+ case where database ON DELETE CASCADE rules may be deleting some of
+ those rows automatically. The warning may be changed to an
+ exception in a future release.
- .. versionadded:: 0.9.4 - added :paramref:`.mapper.confirm_deleted_rows`
- as well as conditional matched row checking on delete.
+ .. versionadded:: 0.9.4 - added
+ :paramref:`.mapper.confirm_deleted_rows` as well as conditional
+ matched row checking on delete.
:param eager_defaults: if True, the ORM will immediately fetch the
value of server-generated default values after an INSERT or UPDATE,
@@ -230,8 +231,8 @@ class Mapper(_InspectionAttr):
this scheme will emit an individual ``SELECT`` statement per row
inserted or updated, which note can add significant performance
overhead. However, if the
- target database supports :term:`RETURNING`, the default values will be
- returned inline with the INSERT or UPDATE statement, which can
+ target database supports :term:`RETURNING`, the default values will
+ be returned inline with the INSERT or UPDATE statement, which can
greatly enhance performance for an application that needs frequent
access to just-generated server defaults.
@@ -269,10 +270,10 @@ class Mapper(_InspectionAttr):
define how the two tables are joined; defaults to a natural join
between the two tables.
- :param inherit_foreign_keys: When ``inherit_condition`` is used and the
- columns present are missing a :class:`.ForeignKey` configuration,
- this parameter can be used to specify which columns are "foreign".
- In most cases can be left as ``None``.
+ :param inherit_foreign_keys: When ``inherit_condition`` is used and
+ the columns present are missing a :class:`.ForeignKey`
+ configuration, this parameter can be used to specify which columns
+ are "foreign". In most cases can be left as ``None``.
:param legacy_is_orphan: Boolean, defaults to ``False``.
When ``True``, specifies that "legacy" orphan consideration
@@ -280,12 +281,12 @@ class Mapper(_InspectionAttr):
that a pending (that is, not persistent) object is auto-expunged
from an owning :class:`.Session` only when it is de-associated
from *all* parents that specify a ``delete-orphan`` cascade towards
- this mapper. The new default behavior is that the object is auto-expunged
- when it is de-associated with *any* of its parents that specify
- ``delete-orphan`` cascade. This behavior is more consistent with
- that of a persistent object, and allows behavior to be consistent
- in more scenarios independently of whether or not an orphanable
- object has been flushed yet or not.
+ this mapper. The new default behavior is that the object is
+ auto-expunged when it is de-associated with *any* of its parents
+ that specify ``delete-orphan`` cascade. This behavior is more
+ consistent with that of a persistent object, and allows behavior to
+ be consistent in more scenarios independently of whether or not an
+ orphanable object has been flushed yet or not.
See the change note and example at :ref:`legacy_is_orphan_addition`
for more detail on this change.
@@ -296,9 +297,9 @@ class Mapper(_InspectionAttr):
is expunged from the :class:`.Session` as soon as it is
de-associated from any of its orphan-enabled parents. Previously,
the pending object would be expunged only if de-associated
- from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
- is added to :func:`.orm.mapper` which re-establishes the
- legacy behavior.
+ from all of its orphan-enabled parents. The new flag
+ ``legacy_is_orphan`` is added to :func:`.orm.mapper` which
+ re-establishes the legacy behavior.
:param non_primary: Specify that this :class:`.Mapper` is in addition
to the "primary" mapper, that is, the one used for persistence.
@@ -447,8 +448,8 @@ class Mapper(_InspectionAttr):
based on all those :class:`.MapperProperty` instances declared
in the declared class body.
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
+ :param primary_key: A list of :class:`.Column` objects which define
+ the primary key to be used against this mapper's selectable unit.
This is normally simply the primary key of the ``local_table``, but
can be overridden here.
@@ -478,13 +479,13 @@ class Mapper(_InspectionAttr):
return next_version
Alternatively, server-side versioning functions such as triggers,
- or programmatic versioning schemes outside of the version id generator
- may be used, by specifying the value ``False``.
+ or programmatic versioning schemes outside of the version id
+ generator may be used, by specifying the value ``False``.
Please see :ref:`server_side_version_counter` for a discussion
of important points when using this option.
- .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
- version number generation.
+ .. versionadded:: 0.9.0 ``version_id_generator`` supports
+ server-side version number generation.
.. seealso::
@@ -505,7 +506,8 @@ class Mapper(_InspectionAttr):
.. seealso::
- :ref:`with_polymorphic` - discussion of polymorphic querying techniques.
+ :ref:`with_polymorphic` - discussion of polymorphic querying
+ techniques.
"""
@@ -547,7 +549,7 @@ class Mapper(_InspectionAttr):
self.eager_defaults = eager_defaults
self.column_prefix = column_prefix
self.polymorphic_on = expression._clause_element_as_expr(
- polymorphic_on)
+ polymorphic_on)
self._dependency_processors = []
self.validators = util.immutabledict()
self.passive_updates = passive_updates
@@ -574,13 +576,13 @@ class Mapper(_InspectionAttr):
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
- )
+ )
if self.with_polymorphic and \
- isinstance(self.with_polymorphic[1],
- expression.SelectBase):
+ isinstance(self.with_polymorphic[1],
+ expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
- self.with_polymorphic[1].alias())
+ self.with_polymorphic[1].alias())
# our 'polymorphic identity', a string name that when located in a
# result set row indicates this Mapper should be used to construct
@@ -892,20 +894,20 @@ class Mapper(_InspectionAttr):
self.inherits = class_mapper(self.inherits, configure=False)
if not issubclass(self.class_, self.inherits.class_):
raise sa_exc.ArgumentError(
- "Class '%s' does not inherit from '%s'" %
- (self.class_.__name__, self.inherits.class_.__name__))
+ "Class '%s' does not inherit from '%s'" %
+ (self.class_.__name__, self.inherits.class_.__name__))
if self.non_primary != self.inherits.non_primary:
np = not self.non_primary and "primary" or "non-primary"
raise sa_exc.ArgumentError(
- "Inheritance of %s mapper for class '%s' is "
- "only allowed from a %s mapper" %
- (np, self.class_.__name__, np))
+ "Inheritance of %s mapper for class '%s' is "
+ "only allowed from a %s mapper" %
+ (np, self.class_.__name__, np))
# inherit_condition is optional.
if self.local_table is None:
self.local_table = self.inherits.local_table
self.mapped_table = self.inherits.mapped_table
self.single = True
- elif not self.local_table is self.inherits.local_table:
+ elif self.local_table is not self.inherits.local_table:
if self.concrete:
self.mapped_table = self.local_table
for mapper in self.iterate_to_root():
@@ -918,17 +920,18 @@ class Mapper(_InspectionAttr):
# full table which could pull in other stuff we don't
# want (allows test/inheritance.InheritTest4 to pass)
self.inherit_condition = sql_util.join_condition(
- self.inherits.local_table,
- self.local_table)
+ self.inherits.local_table,
+ self.local_table)
self.mapped_table = sql.join(
- self.inherits.mapped_table,
- self.local_table,
- self.inherit_condition)
+ self.inherits.mapped_table,
+ self.local_table,
+ self.inherit_condition)
fks = util.to_set(self.inherit_foreign_keys)
- self._inherits_equated_pairs = sql_util.criterion_as_pairs(
- self.mapped_table.onclause,
- consider_as_foreign_keys=fks)
+ self._inherits_equated_pairs = \
+ sql_util.criterion_as_pairs(
+ self.mapped_table.onclause,
+ consider_as_foreign_keys=fks)
else:
self.mapped_table = self.local_table
@@ -941,7 +944,7 @@ class Mapper(_InspectionAttr):
self.version_id_col = self.inherits.version_id_col
self.version_id_generator = self.inherits.version_id_generator
elif self.inherits.version_id_col is not None and \
- self.version_id_col is not self.inherits.version_id_col:
+ self.version_id_col is not self.inherits.version_id_col:
util.warn(
"Inheriting version_id_col '%s' does not match inherited "
"version_id_col '%s' and will not automatically populate "
@@ -949,12 +952,12 @@ class Mapper(_InspectionAttr):
"version_id_col should only be specified on "
"the base-most mapper that includes versioning." %
(self.version_id_col.description,
- self.inherits.version_id_col.description)
+ self.inherits.version_id_col.description)
)
if self.order_by is False and \
- not self.concrete and \
- self.inherits.order_by is not False:
+ not self.concrete and \
+ self.inherits.order_by is not False:
self.order_by = self.inherits.order_by
self.polymorphic_map = self.inherits.polymorphic_map
@@ -977,14 +980,15 @@ class Mapper(_InspectionAttr):
if self.mapped_table is None:
raise sa_exc.ArgumentError(
- "Mapper '%s' does not have a mapped_table specified."
- % self)
+ "Mapper '%s' does not have a mapped_table specified."
+ % self)
def _set_with_polymorphic(self, with_polymorphic):
if with_polymorphic == '*':
self.with_polymorphic = ('*', None)
elif isinstance(with_polymorphic, (tuple, list)):
- if isinstance(with_polymorphic[0], util.string_types + (tuple, list)):
+ if isinstance(
+ with_polymorphic[0], util.string_types + (tuple, list)):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
@@ -999,13 +1003,13 @@ class Mapper(_InspectionAttr):
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
- )
+ )
if self.with_polymorphic and \
- isinstance(self.with_polymorphic[1],
- expression.SelectBase):
+ isinstance(self.with_polymorphic[1],
+ expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
- self.with_polymorphic[1].alias())
+ self.with_polymorphic[1].alias())
if self.configured:
self._expire_memoizations()
@@ -1032,7 +1036,7 @@ class Mapper(_InspectionAttr):
for key, prop in mapper._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
- column=None):
+ column=None):
self._adapt_inherited_property(key, prop, False)
def _set_polymorphic_on(self, polymorphic_on):
@@ -1044,7 +1048,7 @@ class Mapper(_InspectionAttr):
if self.inherits:
self.dispatch._update(self.inherits.dispatch)
super_extensions = set(
- chain(*[m._deprecated_extensions
+ chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
@@ -1056,7 +1060,7 @@ class Mapper(_InspectionAttr):
def _configure_listeners(self):
if self.inherits:
super_extensions = set(
- chain(*[m._deprecated_extensions
+ chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
@@ -1098,7 +1102,7 @@ class Mapper(_InspectionAttr):
"create a non primary Mapper. clear_mappers() will "
"remove *all* current mappers from all classes." %
self.class_)
- #else:
+ # else:
# a ClassManager may already exist as
# ClassManager.instrument_attribute() creates
# new managers for each subclass if they don't yet exist.
@@ -1114,7 +1118,7 @@ class Mapper(_InspectionAttr):
manager.mapper = self
manager.deferred_scalar_loader = util.partial(
- loading.load_scalar_attributes, self)
+ loading.load_scalar_attributes, self)
# The remaining members can be added by any mapper,
# e_name None or not.
@@ -1139,7 +1143,6 @@ class Mapper(_InspectionAttr):
manager.info[_INSTRUMENTOR] = self
-
@classmethod
def _configure_all(cls):
"""Class-level path to the :func:`.configure_mappers` call.
@@ -1167,8 +1170,8 @@ class Mapper(_InspectionAttr):
self._cols_by_table = {}
all_cols = util.column_set(chain(*[
- col.proxy_set for col in
- self._columntoproperty]))
+ col.proxy_set for col in
+ self._columntoproperty]))
pk_cols = util.column_set(c for c in all_cols if c.primary_key)
@@ -1180,11 +1183,11 @@ class Mapper(_InspectionAttr):
# ordering is important since it determines the ordering of
# mapper.primary_key (and therefore query.get())
self._pks_by_table[t] = \
- util.ordered_column_set(t.primary_key).\
- intersection(pk_cols)
+ util.ordered_column_set(t.primary_key).\
+ intersection(pk_cols)
self._cols_by_table[t] = \
- util.ordered_column_set(t.c).\
- intersection(all_cols)
+ util.ordered_column_set(t.c).\
+ intersection(all_cols)
# determine cols that aren't expressed within our tables; mark these
# as "read only" properties which are refreshed upon INSERT/UPDATE
@@ -1204,17 +1207,17 @@ class Mapper(_InspectionAttr):
# otherwise, see that we got a full PK for the mapped table
elif self.mapped_table not in self._pks_by_table or \
- len(self._pks_by_table[self.mapped_table]) == 0:
- raise sa_exc.ArgumentError(
- "Mapper %s could not assemble any primary "
- "key columns for mapped table '%s'" %
- (self, self.mapped_table.description))
+ len(self._pks_by_table[self.mapped_table]) == 0:
+ raise sa_exc.ArgumentError(
+ "Mapper %s could not assemble any primary "
+ "key columns for mapped table '%s'" %
+ (self, self.mapped_table.description))
elif self.local_table not in self._pks_by_table and \
- isinstance(self.local_table, schema.Table):
+ isinstance(self.local_table, schema.Table):
util.warn("Could not assemble any primary "
- "keys for locally mapped table '%s' - "
- "no rows will be persisted in this Table."
- % self.local_table.description)
+ "keys for locally mapped table '%s' - "
+ "no rows will be persisted in this Table."
+ % self.local_table.description)
if self.inherits and \
not self.concrete and \
@@ -1228,12 +1231,12 @@ class Mapper(_InspectionAttr):
if self._primary_key_argument:
primary_key = sql_util.reduce_columns(
[self.mapped_table.corresponding_column(c) for c in
- self._primary_key_argument],
+ self._primary_key_argument],
ignore_nonexistent_tables=True)
else:
primary_key = sql_util.reduce_columns(
- self._pks_by_table[self.mapped_table],
- ignore_nonexistent_tables=True)
+ self._pks_by_table[self.mapped_table],
+ ignore_nonexistent_tables=True)
if len(primary_key) == 0:
raise sa_exc.ArgumentError(
@@ -1267,7 +1270,7 @@ class Mapper(_InspectionAttr):
for key, prop in self.inherits._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
- column=None):
+ column=None):
self._adapt_inherited_property(key, prop, False)
# create properties for each column in the mapped table,
@@ -1279,10 +1282,10 @@ class Mapper(_InspectionAttr):
column_key = (self.column_prefix or '') + column.key
if self._should_exclude(
- column.key, column_key,
- local=self.local_table.c.contains_column(column),
- column=column
- ):
+ column.key, column_key,
+ local=self.local_table.c.contains_column(column),
+ column=column
+ ):
continue
# adjust the "key" used for this column to that
@@ -1292,9 +1295,9 @@ class Mapper(_InspectionAttr):
column_key = mapper._columntoproperty[column].key
self._configure_property(column_key,
- column,
- init=False,
- setparent=True)
+ column,
+ init=False,
+ setparent=True)
def _configure_polymorphic_setter(self, init=False):
"""Configure an attribute on the mapper representing the
@@ -1319,9 +1322,9 @@ class Mapper(_InspectionAttr):
self.polymorphic_on = self._props[self.polymorphic_on]
except KeyError:
raise sa_exc.ArgumentError(
- "Can't determine polymorphic_on "
- "value '%s' - no attribute is "
- "mapped to this name." % self.polymorphic_on)
+ "Can't determine polymorphic_on "
+ "value '%s' - no attribute is "
+ "mapped to this name." % self.polymorphic_on)
if self.polymorphic_on in self._columntoproperty:
# polymorphic_on is a column that is already mapped
@@ -1334,11 +1337,11 @@ class Mapper(_InspectionAttr):
# polymorphic_on is directly a MapperProperty,
# ensure it's a ColumnProperty
if not isinstance(self.polymorphic_on,
- properties.ColumnProperty):
+ properties.ColumnProperty):
raise sa_exc.ArgumentError(
- "Only direct column-mapped "
- "property or SQL expression "
- "can be passed for polymorphic_on")
+ "Only direct column-mapped "
+ "property or SQL expression "
+ "can be passed for polymorphic_on")
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
@@ -1357,7 +1360,7 @@ class Mapper(_InspectionAttr):
# 2. a totally standalone SQL expression which we'd
# hope is compatible with this mapper's mapped_table
col = self.mapped_table.corresponding_column(
- self.polymorphic_on)
+ self.polymorphic_on)
if col is None:
# polymorphic_on doesn't derive from any
# column/expression isn't present in the mapped
@@ -1373,15 +1376,14 @@ class Mapper(_InspectionAttr):
instrument = False
col = self.polymorphic_on
if isinstance(col, schema.Column) and (
- self.with_polymorphic is None or \
- self.with_polymorphic[1].\
- corresponding_column(col) is None
- ):
+ self.with_polymorphic is None or
+ self.with_polymorphic[1].
+ corresponding_column(col) is None):
raise sa_exc.InvalidRequestError(
"Could not map polymorphic_on column "
"'%s' to the mapped table - polymorphic "
"loads will not function properly"
- % col.description)
+ % col.description)
else:
# column/expression that polymorphic_on derives from
# is present in our mapped table
@@ -1396,19 +1398,19 @@ class Mapper(_InspectionAttr):
if key:
if self._should_exclude(col.key, col.key, False, col):
raise sa_exc.InvalidRequestError(
- "Cannot exclude or override the "
- "discriminator column %r" %
- col.key)
+ "Cannot exclude or override the "
+ "discriminator column %r" %
+ col.key)
else:
self.polymorphic_on = col = \
- col.label("_sa_polymorphic_on")
+ col.label("_sa_polymorphic_on")
key = col.key
self._configure_property(
- key,
- properties.ColumnProperty(col,
- _instrument=instrument),
- init=init, setparent=True)
+ key,
+ properties.ColumnProperty(col,
+ _instrument=instrument),
+ init=init, setparent=True)
polymorphic_key = key
else:
# no polymorphic_on was set.
@@ -1424,8 +1426,8 @@ class Mapper(_InspectionAttr):
self.polymorphic_on = mapper.polymorphic_on
else:
self.polymorphic_on = \
- self.mapped_table.corresponding_column(
- mapper.polymorphic_on)
+ self.mapped_table.corresponding_column(
+ mapper.polymorphic_on)
# we can use the parent mapper's _set_polymorphic_identity
# directly; it ensures the polymorphic_identity of the
# instance's mapper is used so is portable to subclasses.
@@ -1441,28 +1443,30 @@ class Mapper(_InspectionAttr):
if setter:
def _set_polymorphic_identity(state):
dict_ = state.dict
- state.get_impl(polymorphic_key).set(state, dict_,
- state.manager.mapper.polymorphic_identity, None)
+ state.get_impl(polymorphic_key).set(
+ state, dict_,
+ state.manager.mapper.polymorphic_identity,
+ None)
def _validate_polymorphic_identity(mapper, state, dict_):
if polymorphic_key in dict_ and \
- dict_[polymorphic_key] not in \
- mapper._acceptable_polymorphic_identities:
+ dict_[polymorphic_key] not in \
+ mapper._acceptable_polymorphic_identities:
util.warn(
- "Flushing object %s with "
- "incompatible polymorphic identity %r; the "
- "object may not refresh and/or load correctly" % (
- state_str(state),
- dict_[polymorphic_key]
- )
- )
+ "Flushing object %s with "
+ "incompatible polymorphic identity %r; the "
+ "object may not refresh and/or load correctly" % (
+ state_str(state),
+ dict_[polymorphic_key]
+ )
+ )
self._set_polymorphic_identity = _set_polymorphic_identity
- self._validate_polymorphic_identity = _validate_polymorphic_identity
+ self._validate_polymorphic_identity = \
+ _validate_polymorphic_identity
else:
self._set_polymorphic_identity = None
-
_validate_polymorphic_identity = None
@_memoized_configured_property
@@ -1490,9 +1494,9 @@ class Mapper(_InspectionAttr):
self._configure_property(key, prop, init=False, setparent=False)
elif key not in self._props:
self._configure_property(
- key,
- properties.ConcreteInheritedProperty(),
- init=init, setparent=True)
+ key,
+ properties.ConcreteInheritedProperty(),
+ init=init, setparent=True)
def _configure_property(self, key, prop, init=True, setparent=True):
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
@@ -1514,7 +1518,7 @@ class Mapper(_InspectionAttr):
for m2 in path:
m2.mapped_table._reset_exported()
col = self.mapped_table.corresponding_column(
- prop.columns[0])
+ prop.columns[0])
break
path.append(m)
@@ -1528,14 +1532,14 @@ class Mapper(_InspectionAttr):
if hasattr(self, '_readonly_props') and \
(not hasattr(col, 'table') or
col.table not in self._cols_by_table):
- self._readonly_props.add(prop)
+ self._readonly_props.add(prop)
else:
# if column is coming in after _cols_by_table was
# initialized, ensure the col is in the right set
if hasattr(self, '_cols_by_table') and \
- col.table in self._cols_by_table and \
- col not in self._cols_by_table[col.table]:
+ col.table in self._cols_by_table and \
+ col not in self._cols_by_table[col.table]:
self._cols_by_table[col.table].add(col)
# if this properties.ColumnProperty represents the "polymorphic
@@ -1543,8 +1547,8 @@ class Mapper(_InspectionAttr):
# columns in SELECT statements.
if not hasattr(prop, '_is_polymorphic_discriminator'):
prop._is_polymorphic_discriminator = \
- (col is self.polymorphic_on or
- prop.columns[0] is self.polymorphic_on)
+ (col is self.polymorphic_on or
+ prop.columns[0] is self.polymorphic_on)
self.columns[key] = col
for col in prop.columns + prop._orig_columns:
@@ -1560,20 +1564,20 @@ class Mapper(_InspectionAttr):
getattr(self._props[key], '_mapped_by_synonym', False):
syn = self._props[key]._mapped_by_synonym
raise sa_exc.ArgumentError(
- "Can't call map_column=True for synonym %r=%r, "
- "a ColumnProperty already exists keyed to the name "
- "%r for column %r" % (syn, key, key, syn)
- )
+ "Can't call map_column=True for synonym %r=%r, "
+ "a ColumnProperty already exists keyed to the name "
+ "%r for column %r" % (syn, key, key, syn)
+ )
if key in self._props and \
not isinstance(prop, properties.ColumnProperty) and \
not isinstance(self._props[key], properties.ColumnProperty):
util.warn("Property %s on %s being replaced with new "
- "property %s; the old property will be discarded" % (
- self._props[key],
- self,
- prop,
- ))
+ "property %s; the old property will be discarded" % (
+ self._props[key],
+ self,
+ prop,
+ ))
self._props[key] = prop
@@ -1600,8 +1604,8 @@ class Mapper(_InspectionAttr):
column = columns[0]
if not expression._is_column(column):
raise sa_exc.ArgumentError(
- "%s=%r is not an instance of MapperProperty or Column"
- % (key, prop))
+ "%s=%r is not an instance of MapperProperty or Column"
+ % (key, prop))
prop = self._props.get(key, None)
@@ -1609,15 +1613,15 @@ class Mapper(_InspectionAttr):
if (
not self._inherits_equated_pairs or
(prop.columns[0], column) not in self._inherits_equated_pairs
- ) and \
+ ) and \
not prop.columns[0].shares_lineage(column) and \
prop.columns[0] is not self.version_id_col and \
column is not self.version_id_col:
warn_only = prop.parent is not self
msg = ("Implicitly combining column %s with column "
- "%s under attribute '%s'. Please configure one "
- "or more attributes for these same-named columns "
- "explicitly." % (prop.columns[-1], column, key))
+ "%s under attribute '%s'. Please configure one "
+ "or more attributes for these same-named columns "
+ "explicitly." % (prop.columns[-1], column, key))
if warn_only:
util.warn(msg)
else:
@@ -1628,10 +1632,10 @@ class Mapper(_InspectionAttr):
prop = prop.copy()
prop.columns.insert(0, column)
self._log("inserting column to existing list "
- "in properties.ColumnProperty %s" % (key))
+ "in properties.ColumnProperty %s" % (key))
return prop
elif prop is None or isinstance(prop,
- properties.ConcreteInheritedProperty):
+ properties.ConcreteInheritedProperty):
mapped_column = []
for c in columns:
mc = self.mapped_table.corresponding_column(c)
@@ -1646,11 +1650,11 @@ class Mapper(_InspectionAttr):
mc = self.mapped_table.corresponding_column(c)
if mc is None:
raise sa_exc.ArgumentError(
- "When configuring property '%s' on %s, "
- "column '%s' is not represented in the mapper's "
- "table. Use the `column_property()` function to "
- "force this column to be mapped as a read-only "
- "attribute." % (key, self, c))
+ "When configuring property '%s' on %s, "
+ "column '%s' is not represented in the mapper's "
+ "table. Use the `column_property()` function to "
+ "force this column to be mapped as a read-only "
+ "attribute." % (key, self, c))
mapped_column.append(mc)
return properties.ColumnProperty(*mapped_column)
else:
@@ -1721,7 +1725,7 @@ class Mapper(_InspectionAttr):
self.local_table.description or
str(self.local_table)) +\
(self.non_primary and
- "|non-primary" or "") + ")"
+ "|non-primary" or "") + ")"
def _log(self, msg, *args):
self.logger.info(
@@ -1752,7 +1756,7 @@ class Mapper(_InspectionAttr):
orphan_possible = True
has_parent = attributes.manager_of_class(cls).has_parent(
- state, key, optimistic=state.has_identity)
+ state, key, optimistic=state.has_identity)
if self.legacy_is_orphan and has_parent:
return False
@@ -1778,7 +1782,7 @@ class Mapper(_InspectionAttr):
return self._props[key]
except KeyError:
raise sa_exc.InvalidRequestError(
- "Mapper '%s' has no property '%s'" % (self, key))
+ "Mapper '%s' has no property '%s'" % (self, key))
def get_property_by_column(self, column):
"""Given a :class:`.Column` object, return the
@@ -1809,8 +1813,8 @@ class Mapper(_InspectionAttr):
m = _class_to_mapper(m)
if not m.isa(self):
raise sa_exc.InvalidRequestError(
- "%r does not inherit from %r" %
- (m, self))
+ "%r does not inherit from %r" %
+ (m, self))
if selectable is None:
mappers.update(m.iterate_to_root())
@@ -1822,7 +1826,7 @@ class Mapper(_InspectionAttr):
if selectable is not None:
tables = set(sql_util.find_tables(selectable,
- include_aliases=True))
+ include_aliases=True))
mappers = [m for m in mappers if m.local_table in tables]
return mappers
@@ -1838,23 +1842,23 @@ class Mapper(_InspectionAttr):
continue
if m.concrete:
raise sa_exc.InvalidRequestError(
- "'with_polymorphic()' requires 'selectable' argument "
- "when concrete-inheriting mappers are used.")
+ "'with_polymorphic()' requires 'selectable' argument "
+ "when concrete-inheriting mappers are used.")
elif not m.single:
if innerjoin:
from_obj = from_obj.join(m.local_table,
- m.inherit_condition)
+ m.inherit_condition)
else:
from_obj = from_obj.outerjoin(m.local_table,
- m.inherit_condition)
+ m.inherit_condition)
return from_obj
@_memoized_configured_property
def _single_table_criterion(self):
if self.single and \
- self.inherits and \
- self.polymorphic_on is not None:
+ self.inherits and \
+ self.polymorphic_on is not None:
return self.polymorphic_on.in_(
m.polymorphic_identity
for m in self.self_and_descendants)
@@ -1879,8 +1883,8 @@ class Mapper(_InspectionAttr):
return selectable
else:
return self._selectable_from_mappers(
- self._mappers_from_spec(spec, selectable),
- False)
+ self._mappers_from_spec(spec, selectable),
+ False)
with_polymorphic_mappers = _with_polymorphic_mappers
"""The list of :class:`.Mapper` objects included in the
@@ -1901,7 +1905,7 @@ class Mapper(_InspectionAttr):
return self._with_polymorphic_selectable
def _with_polymorphic_args(self, spec=None, selectable=False,
- innerjoin=False):
+ innerjoin=False):
if self.with_polymorphic:
if not spec:
spec = self.with_polymorphic[0]
@@ -1914,14 +1918,13 @@ class Mapper(_InspectionAttr):
return mappers, selectable
else:
return mappers, self._selectable_from_mappers(mappers,
- innerjoin)
+ innerjoin)
@_memoized_configured_property
def _polymorphic_properties(self):
return list(self._iterate_polymorphic_properties(
self._with_polymorphic_mappers))
-
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
@@ -1937,14 +1940,14 @@ class Mapper(_InspectionAttr):
# mapper's polymorphic selectable (which we don't want rendered)
for c in util.unique_list(
chain(*[
- list(mapper.iterate_properties) for mapper in
- [self] + mappers
- ])
+ list(mapper.iterate_properties) for mapper in
+ [self] + mappers
+ ])
):
if getattr(c, '_is_polymorphic_discriminator', False) and \
(self.polymorphic_on is None or
- c.columns[0] is not self.polymorphic_on):
- continue
+ c.columns[0] is not self.polymorphic_on):
+ continue
yield c
@util.memoized_property
@@ -1979,15 +1982,16 @@ class Mapper(_InspectionAttr):
"""A namespace of all :class:`._InspectionAttr` attributes associated
with the mapped class.
- These attributes are in all cases Python :term:`descriptors` associated
- with the mapped class or its superclasses.
+ These attributes are in all cases Python :term:`descriptors`
+ associated with the mapped class or its superclasses.
This namespace includes attributes that are mapped to the class
as well as attributes declared by extension modules.
It includes any Python descriptor type that inherits from
- :class:`._InspectionAttr`. This includes :class:`.QueryableAttribute`,
- as well as extension types such as :class:`.hybrid_property`,
- :class:`.hybrid_method` and :class:`.AssociationProxy`.
+ :class:`._InspectionAttr`. This includes
+ :class:`.QueryableAttribute`, as well as extension types such as
+ :class:`.hybrid_property`, :class:`.hybrid_method` and
+ :class:`.AssociationProxy`.
To distinguish between mapped attributes and extension attributes,
the attribute :attr:`._InspectionAttr.extension_type` will refer
@@ -1995,8 +1999,9 @@ class Mapper(_InspectionAttr):
When dealing with a :class:`.QueryableAttribute`, the
:attr:`.QueryableAttribute.property` attribute refers to the
- :class:`.MapperProperty` property, which is what you get when referring
- to the collection of mapped properties via :attr:`.Mapper.attrs`.
+ :class:`.MapperProperty` property, which is what you get when
+ referring to the collection of mapped properties via
+ :attr:`.Mapper.attrs`.
.. versionadded:: 0.8.0
@@ -2006,7 +2011,7 @@ class Mapper(_InspectionAttr):
"""
return util.ImmutableProperties(
- dict(self.class_manager._all_sqla_attributes()))
+ dict(self.class_manager._all_sqla_attributes()))
@_memoized_configured_property
def synonyms(self):
@@ -2078,7 +2083,7 @@ class Mapper(_InspectionAttr):
params = [(primary_key, sql.bindparam(None, type_=primary_key.type))
for primary_key in self.primary_key]
return sql.and_(*[k == v for (k, v) in params]), \
- util.column_dict(params)
+ util.column_dict(params)
@_memoized_configured_property
def _equivalent_columns(self):
@@ -2116,8 +2121,8 @@ class Mapper(_InspectionAttr):
for mapper in self.base_mapper.self_and_descendants:
if mapper.inherit_condition is not None:
visitors.traverse(
- mapper.inherit_condition, {},
- {'binary': visit_binary})
+ mapper.inherit_condition, {},
+ {'binary': visit_binary})
return result
@@ -2142,13 +2147,13 @@ class Mapper(_InspectionAttr):
# either local or from an inherited class
if local:
if self.class_.__dict__.get(assigned_name, None) is not None \
- and self._is_userland_descriptor(
- self.class_.__dict__[assigned_name]):
+ and self._is_userland_descriptor(
+ self.class_.__dict__[assigned_name]):
return True
else:
if getattr(self.class_, assigned_name, None) is not None \
- and self._is_userland_descriptor(
- getattr(self.class_, assigned_name)):
+ and self._is_userland_descriptor(
+ getattr(self.class_, assigned_name)):
return True
if self.include_properties is not None and \
@@ -2158,10 +2163,10 @@ class Mapper(_InspectionAttr):
return True
if self.exclude_properties is not None and \
- (
- name in self.exclude_properties or \
- (column is not None and column in self.exclude_properties)
- ):
+ (
+ name in self.exclude_properties or
+ (column is not None and column in self.exclude_properties)
+ ):
self._log("excluding property %s" % (name))
return True
@@ -2237,11 +2242,11 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
- :param row: A :class:`.RowProxy` instance. The columns which are mapped
- by this :class:`.Mapper` should be locatable in the row, preferably
- via the :class:`.Column` object directly (as is the case when a
- :func:`.select` construct is executed), or via string names of the form
- ``<tablename>_<colname>``.
+ :param row: A :class:`.RowProxy` instance. The columns which are
+ mapped by this :class:`.Mapper` should be locatable in the row,
+ preferably via the :class:`.Column` object directly (as is the case
+ when a :func:`.select` construct is executed), or via string names of
+ the form ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
@@ -2249,7 +2254,7 @@ class Mapper(_InspectionAttr):
pk_cols = [adapter.columns[c] for c in pk_cols]
return self._identity_class, \
- tuple(row[column] for column in pk_cols)
+ tuple(row[column] for column in pk_cols)
def identity_key_from_primary_key(self, primary_key):
"""Return an identity-map key for use in storing/retrieving an
@@ -2274,14 +2279,14 @@ class Mapper(_InspectionAttr):
"""
return self.identity_key_from_primary_key(
- self.primary_key_from_instance(instance))
+ self.primary_key_from_instance(instance))
def _identity_key_from_state(self, state):
dict_ = state.dict
manager = state.manager
return self._identity_class, tuple([
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET)
+ manager[self._columntoproperty[col].key].
+ impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET)
for col in self.primary_key
])
@@ -2302,14 +2307,15 @@ class Mapper(_InspectionAttr):
dict_ = state.dict
manager = state.manager
return [
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_,
- attributes.PASSIVE_RETURN_NEVER_SET)
+ manager[self._columntoproperty[col].key].
+ impl.get(state, dict_,
+ attributes.PASSIVE_RETURN_NEVER_SET)
for col in self.primary_key
]
- def _get_state_attr_by_column(self, state, dict_, column,
- passive=attributes.PASSIVE_RETURN_NEVER_SET):
+ def _get_state_attr_by_column(
+ self, state, dict_, column,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
@@ -2322,13 +2328,13 @@ class Mapper(_InspectionAttr):
dict_ = attributes.instance_dict(obj)
return self._get_committed_state_attr_by_column(state, dict_, column)
- def _get_committed_state_attr_by_column(self, state, dict_,
- column,
- passive=attributes.PASSIVE_RETURN_NEVER_SET):
+ def _get_committed_state_attr_by_column(
+ self, state, dict_, column,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.\
- get_committed_value(state, dict_, passive=passive)
+ get_committed_value(state, dict_, passive=passive)
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
@@ -2343,10 +2349,10 @@ class Mapper(_InspectionAttr):
props = self._props
tables = set(chain(
- *[sql_util.find_tables(c, check_columns=True)
- for key in attribute_names
- for c in props[key].columns]
- ))
+ *[sql_util.find_tables(c, check_columns=True)
+ for key in attribute_names
+ for c in props[key].columns]
+ ))
if self.base_mapper.local_table in tables:
return None
@@ -2362,22 +2368,22 @@ class Mapper(_InspectionAttr):
if leftcol.table not in tables:
leftval = self._get_committed_state_attr_by_column(
- state, state.dict,
- leftcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state, state.dict,
+ leftcol,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if leftval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.left = sql.bindparam(None, leftval,
type_=binary.right.type)
elif rightcol.table not in tables:
rightval = self._get_committed_state_attr_by_column(
- state, state.dict,
- rightcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state, state.dict,
+ rightcol,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if rightval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.right = sql.bindparam(None, rightval,
- type_=binary.right.type)
+ type_=binary.right.type)
allconds = []
@@ -2386,15 +2392,16 @@ class Mapper(_InspectionAttr):
for mapper in reversed(list(self.iterate_to_root())):
if mapper.local_table in tables:
start = True
- elif not isinstance(mapper.local_table, expression.TableClause):
+ elif not isinstance(mapper.local_table,
+ expression.TableClause):
return None
if start and not mapper.single:
allconds.append(visitors.cloned_traverse(
- mapper.inherit_condition,
- {},
- {'binary': visit_binary}
- )
- )
+ mapper.inherit_condition,
+ {},
+ {'binary': visit_binary}
+ )
+ )
except ColumnsNotAvailable:
return None
@@ -2425,7 +2432,7 @@ class Mapper(_InspectionAttr):
prp, mpp = object(), object()
visitables = deque([(deque(self._props.values()), prp,
- state, state.dict)])
+ state, state.dict)])
while visitables:
iterator, item_type, parent_state, parent_dict = visitables[-1]
@@ -2437,18 +2444,19 @@ class Mapper(_InspectionAttr):
prop = iterator.popleft()
if type_ not in prop.cascade:
continue
- queue = deque(prop.cascade_iterator(type_, parent_state,
- parent_dict, visited_states, halt_on))
+ queue = deque(prop.cascade_iterator(
+ type_, parent_state, parent_dict,
+ visited_states, halt_on))
if queue:
visitables.append((queue, mpp, None, None))
elif item_type is mpp:
instance, instance_mapper, corresponding_state, \
- corresponding_dict = iterator.popleft()
+ corresponding_dict = iterator.popleft()
yield instance, instance_mapper, \
- corresponding_state, corresponding_dict
+ corresponding_state, corresponding_dict
visitables.append((deque(instance_mapper._props.values()),
- prp, corresponding_state,
- corresponding_dict))
+ prp, corresponding_state,
+ corresponding_dict))
@_memoized_configured_property
def _compiled_cache(self):
@@ -2469,7 +2477,7 @@ class Mapper(_InspectionAttr):
extra_dependencies.extend([
(super_table, table)
for super_table in super_.tables
- ])
+ ])
def skip(fk):
# attempt to skip dependencies that are not
@@ -2486,15 +2494,15 @@ class Mapper(_InspectionAttr):
cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
cols = cols.union(sql_util._find_columns(
- parent.inherit_condition))
+ parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
return fk.parent not in cols
return False
sorted_ = sql_util.sort_tables(table_to_mapper,
- skip_fn=skip,
- extra_dependencies=extra_dependencies)
+ skip_fn=skip,
+ extra_dependencies=extra_dependencies)
ret = util.OrderedDict()
for t in sorted_:
@@ -2519,10 +2527,11 @@ class Mapper(_InspectionAttr):
cols = set(table.c)
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
- cols.intersection(
- util.reduce(set.union,
- [l.proxy_set for l, r in m._inherits_equated_pairs])
- ):
+ cols.intersection(
+ util.reduce(set.union,
+ [l.proxy_set for l, r in
+ m._inherits_equated_pairs])
+ ):
result[table].append((m, m._inherits_equated_pairs))
return result
@@ -2561,10 +2570,10 @@ def configure_mappers():
for mapper in list(_mapper_registry):
if getattr(mapper, '_configure_failed', False):
e = sa_exc.InvalidRequestError(
- "One or more mappers failed to initialize - "
- "can't proceed with initialization of other "
- "mappers. Original exception was: %s"
- % mapper._configure_failed)
+ "One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: %s"
+ % mapper._configure_failed)
e._configure_failed = mapper._configure_failed
raise e
if not mapper.configured:
@@ -2572,7 +2581,7 @@ def configure_mappers():
mapper._post_configure_properties()
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
- mapper, mapper.class_)
+ mapper, mapper.class_)
except:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
@@ -2649,9 +2658,9 @@ def validates(*names, **kw):
def wrap(fn):
fn.__sa_validators__ = names
fn.__sa_validation_opts__ = {
- "include_removes": include_removes,
- "include_backrefs": include_backrefs
- }
+ "include_removes": include_removes,
+ "include_backrefs": include_backrefs
+ }
return fn
return wrap
@@ -2700,7 +2709,7 @@ def _event_on_resurrect(state):
if instrumenting_mapper:
for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
instrumenting_mapper._set_state_attr_by_column(
- state, state.dict, col, val)
+ state, state.dict, col, val)
class _ColumnMapping(dict):
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
index fb4f4b986..f10a125a8 100644
--- a/lib/sqlalchemy/orm/path_registry.py
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -14,6 +14,7 @@ from .. import exc
from itertools import chain
from .base import class_mapper
+
def _unreduce_path(path):
return PathRegistry.deserialize(path)
@@ -21,6 +22,7 @@ def _unreduce_path(path):
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
+
class PathRegistry(object):
"""Represent query load paths and registry functions.
@@ -81,7 +83,7 @@ class PathRegistry(object):
self.path[i] for i in range(0, len(self.path), 2)
]:
if path_mapper.is_mapper and \
- path_mapper.isa(mapper):
+ path_mapper.isa(mapper):
return True
else:
return False
@@ -105,9 +107,9 @@ class PathRegistry(object):
return None
p = tuple(chain(*[(class_mapper(mcls),
- class_mapper(mcls).attrs[key]
- if key is not None else None)
- for mcls, key in path]))
+ class_mapper(mcls).attrs[key]
+ if key is not None else None)
+ for mcls, key in path]))
if p and p[-1] is None:
p = p[0:-1]
return cls.coerce(p)
@@ -115,8 +117,8 @@ class PathRegistry(object):
@classmethod
def per_mapper(cls, mapper):
return EntityRegistry(
- cls.root, mapper
- )
+ cls.root, mapper
+ )
@classmethod
def coerce(cls, raw):
@@ -132,8 +134,8 @@ class PathRegistry(object):
def __add__(self, other):
return util.reduce(
- lambda prev, next: prev[next],
- other.path, self)
+ lambda prev, next: prev[next],
+ other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path, )
@@ -146,11 +148,13 @@ class RootRegistry(PathRegistry):
"""
path = ()
has_entity = False
+
def __getitem__(self, entity):
return entity._path_registry
PathRegistry.root = RootRegistry()
+
class TokenRegistry(PathRegistry):
def __init__(self, parent, token):
self.token = token
@@ -162,6 +166,7 @@ class TokenRegistry(PathRegistry):
def __getitem__(self, entity):
raise NotImplementedError()
+
class PropRegistry(PathRegistry):
def __init__(self, parent, prop):
# restate this path in terms of the
@@ -171,7 +176,7 @@ class PropRegistry(PathRegistry):
parent = parent.parent[prop.parent]
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
if prop.parent is not insp.mapper and \
- prop.parent in insp.with_polymorphic_mappers:
+ prop.parent in insp.with_polymorphic_mappers:
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
@@ -196,16 +201,18 @@ class PropRegistry(PathRegistry):
"""
return ("loader",
self.parent.token(
- "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
- ).path
+ "%s:%s" % (
+ self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
+ ).path
)
@util.memoized_property
def _default_path_loader_key(self):
return ("loader",
self.parent.token(
- "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
- ).path
+ "%s:%s" % (self.prop.strategy_wildcard_key,
+ _DEFAULT_TOKEN)
+ ).path
)
@util.memoized_property
@@ -228,6 +235,7 @@ class PropRegistry(PathRegistry):
self, entity
)
+
class EntityRegistry(PathRegistry, dict):
is_aliased_class = False
has_entity = True
@@ -257,6 +265,3 @@ class EntityRegistry(PathRegistry, dict):
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item
-
-
-
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 6669efc56..295d4a3d0 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -41,18 +41,18 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
return
states_to_insert, states_to_update = _organize_states_for_save(
- base_mapper,
- states,
- uowtransaction)
+ base_mapper,
+ states,
+ uowtransaction)
cached_connections = _cached_connection_dict(base_mapper)
for table, mapper in base_mapper._sorted_tables.items():
insert = _collect_insert_commands(base_mapper, uowtransaction,
- table, states_to_insert)
+ table, states_to_insert)
update = _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update)
+ table, states_to_update)
if update:
_emit_update_statements(base_mapper, uowtransaction,
@@ -65,7 +65,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
mapper, table, insert)
_finalize_insert_update_commands(base_mapper, uowtransaction,
- states_to_insert, states_to_update)
+ states_to_insert, states_to_update)
def post_update(base_mapper, states, uowtransaction, post_update_cols):
@@ -76,18 +76,18 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
cached_connections = _cached_connection_dict(base_mapper)
states_to_update = _organize_states_for_post_update(
- base_mapper,
- states, uowtransaction)
+ base_mapper,
+ states, uowtransaction)
for table, mapper in base_mapper._sorted_tables.items():
update = _collect_post_update_commands(base_mapper, uowtransaction,
- table, states_to_update,
- post_update_cols)
+ table, states_to_update,
+ post_update_cols)
if update:
_emit_post_update_statements(base_mapper, uowtransaction,
- cached_connections,
- mapper, table, update)
+ cached_connections,
+ mapper, table, update)
def delete_obj(base_mapper, states, uowtransaction):
@@ -101,23 +101,23 @@ def delete_obj(base_mapper, states, uowtransaction):
cached_connections = _cached_connection_dict(base_mapper)
states_to_delete = _organize_states_for_delete(
- base_mapper,
- states,
- uowtransaction)
+ base_mapper,
+ states,
+ uowtransaction)
table_to_mapper = base_mapper._sorted_tables
for table in reversed(list(table_to_mapper.keys())):
delete = _collect_delete_commands(base_mapper, uowtransaction,
- table, states_to_delete)
+ table, states_to_delete)
mapper = table_to_mapper[table]
_emit_delete_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, delete)
+ cached_connections, mapper, table, delete)
for state, state_dict, mapper, has_identity, connection \
- in states_to_delete:
+ in states_to_delete:
mapper.dispatch.after_delete(mapper, connection, state)
@@ -137,8 +137,8 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
states_to_update = []
for state, dict_, mapper, connection in _connections_for_states(
- base_mapper, uowtransaction,
- states):
+ base_mapper, uowtransaction,
+ states):
has_identity = bool(state.key)
instance_key = state.key or mapper._identity_key_from_state(state)
@@ -183,19 +183,19 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
if not has_identity and not row_switch:
states_to_insert.append(
(state, dict_, mapper, connection,
- has_identity, instance_key, row_switch)
+ has_identity, instance_key, row_switch)
)
else:
states_to_update.append(
(state, dict_, mapper, connection,
- has_identity, instance_key, row_switch)
+ has_identity, instance_key, row_switch)
)
return states_to_insert, states_to_update
def _organize_states_for_post_update(base_mapper, states,
- uowtransaction):
+ uowtransaction):
"""Make an initial pass across a set of states for UPDATE
corresponding to post_update.
@@ -205,7 +205,7 @@ def _organize_states_for_post_update(base_mapper, states,
"""
return list(_connections_for_states(base_mapper, uowtransaction,
- states))
+ states))
def _organize_states_for_delete(base_mapper, states, uowtransaction):
@@ -219,25 +219,25 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction):
states_to_delete = []
for state, dict_, mapper, connection in _connections_for_states(
- base_mapper, uowtransaction,
- states):
+ base_mapper, uowtransaction,
+ states):
mapper.dispatch.before_delete(mapper, connection, state)
states_to_delete.append((state, dict_, mapper,
- bool(state.key), connection))
+ bool(state.key), connection))
return states_to_delete
def _collect_insert_commands(base_mapper, uowtransaction, table,
- states_to_insert):
+ states_to_insert):
"""Identify sets of values to use in INSERT statements for a
list of states.
"""
insert = []
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_insert:
+ instance_key, row_switch in states_to_insert:
if table not in mapper._pks_by_table:
continue
@@ -250,7 +250,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
has_all_defaults = True
for col in mapper._cols_by_table[table]:
if col is mapper.version_id_col and \
- mapper.version_id_generator is not False:
+ mapper.version_id_generator is not False:
val = mapper.version_id_generator(None)
params[col.key] = val
else:
@@ -263,10 +263,10 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
if col in pks:
has_all_pks = False
elif col.default is None and \
- col.server_default is None:
+ col.server_default is None:
params[col.key] = value
elif col.server_default is not None and \
- mapper.base_mapper.eager_defaults:
+ mapper.base_mapper.eager_defaults:
has_all_defaults = False
elif isinstance(value, sql.ClauseElement):
@@ -275,13 +275,13 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[col.key] = value
insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks,
- has_all_defaults))
+ connection, value_params, has_all_pks,
+ has_all_defaults))
return insert
def _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update):
+ table, states_to_update):
"""Identify sets of values to use in UPDATE statements for a
list of states.
@@ -295,7 +295,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
update = []
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_update:
+ instance_key, row_switch in states_to_update:
if table not in mapper._pks_by_table:
continue
@@ -309,10 +309,10 @@ def _collect_update_commands(base_mapper, uowtransaction,
if col is mapper.version_id_col:
params[col._label] = \
mapper._get_committed_state_attr_by_column(
- row_switch or state,
- row_switch and row_switch.dict
- or state_dict,
- col)
+ row_switch or state,
+ row_switch and row_switch.dict
+ or state_dict,
+ col)
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
@@ -331,19 +331,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
# in a different table than the one
# where the version_id_col is.
for prop in mapper._columntoproperty.values():
- history = state.manager[prop.key].impl.get_history(
+ history = (
+ state.manager[prop.key].impl.get_history(
state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ attributes.PASSIVE_NO_INITIALIZE))
if history.added:
hasdata = True
else:
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ state, state_dict,
+ attributes.PASSIVE_NO_INITIALIZE)
if history.added:
if isinstance(history.added[0],
- sql.ClauseElement):
+ sql.ClauseElement):
value_params[col] = history.added[0]
else:
value = history.added[0]
@@ -351,13 +352,13 @@ def _collect_update_commands(base_mapper, uowtransaction,
if col in pks:
if history.deleted and \
- not row_switch:
+ not row_switch:
# if passive_updates and sync detected
# this was a pk->pk sync, use the new
# value to locate the row, since the
# DB would already have set this
if ("pk_cascaded", state, col) in \
- uowtransaction.attributes:
+ uowtransaction.attributes:
value = history.added[0]
params[col._label] = value
else:
@@ -381,7 +382,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
hasdata = True
elif col in pks:
value = state.manager[prop.key].impl.get(
- state, state_dict)
+ state, state_dict)
if value is None:
hasnull = True
params[col._label] = value
@@ -389,16 +390,16 @@ def _collect_update_commands(base_mapper, uowtransaction,
if hasdata:
if hasnull:
raise orm_exc.FlushError(
- "Can't update table "
- "using NULL for primary "
- "key value")
+ "Can't update table "
+ "using NULL for primary "
+ "key value")
update.append((state, state_dict, params, mapper,
- connection, value_params))
+ connection, value_params))
return update
def _collect_post_update_commands(base_mapper, uowtransaction, table,
- states_to_update, post_update_cols):
+ states_to_update, post_update_cols):
"""Identify sets of values to use in UPDATE statements for a
list of states within a post_update operation.
@@ -415,34 +416,34 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
for col in mapper._cols_by_table[table]:
if col in pks:
params[col._label] = \
- mapper._get_state_attr_by_column(
- state,
- state_dict, col)
+ mapper._get_state_attr_by_column(
+ state,
+ state_dict, col)
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ state, state_dict,
+ attributes.PASSIVE_NO_INITIALIZE)
if history.added:
value = history.added[0]
params[col.key] = value
hasdata = True
if hasdata:
update.append((state, state_dict, params, mapper,
- connection))
+ connection))
return update
def _collect_delete_commands(base_mapper, uowtransaction, table,
- states_to_delete):
+ states_to_delete):
"""Identify values to use in DELETE statements for a list of
states to be deleted."""
delete = util.defaultdict(list)
for state, state_dict, mapper, has_identity, connection \
- in states_to_delete:
+ in states_to_delete:
if not has_identity or table not in mapper._pks_by_table:
continue
@@ -450,43 +451,44 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
delete[connection].append(params)
for col in mapper._pks_by_table[table]:
params[col.key] = \
- value = \
- mapper._get_committed_state_attr_by_column(
- state, state_dict, col)
+ value = \
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict, col)
if value is None:
raise orm_exc.FlushError(
- "Can't delete from table "
- "using NULL for primary "
- "key value")
+ "Can't delete from table "
+ "using NULL for primary "
+ "key value")
if mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col):
+ table.c.contains_column(mapper.version_id_col):
params[mapper.version_id_col.key] = \
- mapper._get_committed_state_attr_by_column(
- state, state_dict,
- mapper.version_id_col)
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict,
+ mapper.version_id_col)
return delete
def _emit_update_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, update):
+ cached_connections, mapper, table, update):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_update_commands()."""
needs_version_id = mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col)
+ table.c.contains_column(mapper.version_id_col)
def update_stmt():
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
+ type_=col.type))
if needs_version_id:
- clause.clauses.append(mapper.version_id_col ==\
- sql.bindparam(mapper.version_id_col._label,
- type_=mapper.version_id_col.type))
+ clause.clauses.append(
+ mapper.version_id_col == sql.bindparam(
+ mapper.version_id_col._label,
+ type_=mapper.version_id_col.type))
stmt = table.update(clause)
if mapper.base_mapper.eager_defaults:
@@ -500,43 +502,43 @@ def _emit_update_statements(base_mapper, uowtransaction,
rows = 0
for state, state_dict, params, mapper, \
- connection, value_params in update:
+ connection, value_params in update:
if value_params:
c = connection.execute(
- statement.values(value_params),
- params)
+ statement.values(value_params),
+ params)
else:
c = cached_connections[connection].\
- execute(statement, params)
+ execute(statement, params)
_postfetch(
- mapper,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- c.context.compiled_parameters[0],
- value_params)
+ mapper,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ c.context.compiled_parameters[0],
+ value_params)
rows += c.rowcount
if connection.dialect.supports_sane_rowcount:
if rows != len(update):
raise orm_exc.StaleDataError(
- "UPDATE statement on table '%s' expected to "
- "update %d row(s); %d were matched." %
- (table.description, len(update), rows))
+ "UPDATE statement on table '%s' expected to "
+ "update %d row(s); %d were matched." %
+ (table.description, len(update), rows))
elif needs_version_id:
util.warn("Dialect %s does not support updated rowcount "
- "- versioning cannot be verified." %
- c.dialect.dialect_description,
- stacklevel=12)
+ "- versioning cannot be verified." %
+ c.dialect.dialect_description,
+ stacklevel=12)
def _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, insert):
+ cached_connections, mapper, table, insert):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
@@ -544,37 +546,37 @@ def _emit_insert_statements(base_mapper, uowtransaction,
for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(insert,
- lambda rec: (rec[4],
- list(rec[2].keys()),
- bool(rec[5]),
- rec[6], rec[7])
- ):
+ lambda rec: (rec[4],
+ list(rec[2].keys()),
+ bool(rec[5]),
+ rec[6], rec[7])
+ ):
if \
- (
- has_all_defaults
- or not base_mapper.eager_defaults
- or not connection.dialect.implicit_returning
- ) and has_all_pks and not hasvalue:
+ (
+ has_all_defaults
+ or not base_mapper.eager_defaults
+ or not connection.dialect.implicit_returning
+ ) and has_all_pks and not hasvalue:
records = list(records)
multiparams = [rec[2] for rec in records]
c = cached_connections[connection].\
- execute(statement, multiparams)
+ execute(statement, multiparams)
for (state, state_dict, params, mapper_rec,
conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
_postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- last_inserted_params,
- value_params)
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ last_inserted_params,
+ value_params)
else:
if not has_all_defaults and base_mapper.eager_defaults:
@@ -583,45 +585,45 @@ def _emit_insert_statements(base_mapper, uowtransaction,
statement = statement.return_defaults(mapper.version_id_col)
for state, state_dict, params, mapper_rec, \
- connection, value_params, \
- has_all_pks, has_all_defaults in records:
+ connection, value_params, \
+ has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
- statement.values(value_params),
- params)
+ statement.values(value_params),
+ params)
else:
result = cached_connections[connection].\
- execute(statement, params)
+ execute(statement, params)
primary_key = result.context.inserted_primary_key
if primary_key is not None:
# set primary key attributes
for pk, col in zip(primary_key,
- mapper._pks_by_table[table]):
+ mapper._pks_by_table[table]):
prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
# TODO: would rather say:
- #state_dict[prop.key] = pk
+ # state_dict[prop.key] = pk
mapper_rec._set_state_attr_by_column(
- state,
- state_dict,
- col, pk)
+ state,
+ state_dict,
+ col, pk)
_postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- result,
- result.context.compiled_parameters[0],
- value_params)
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ result,
+ result.context.compiled_parameters[0],
+ value_params)
def _emit_post_update_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, update):
+ cached_connections, mapper, table, update):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_post_update_commands()."""
@@ -630,7 +632,7 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
+ type_=col.type))
return table.update(clause)
@@ -645,13 +647,13 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
):
connection = key[0]
multiparams = [params for state, state_dict,
- params, mapper, conn in grouper]
+ params, mapper, conn in grouper]
cached_connections[connection].\
- execute(statement, multiparams)
+ execute(statement, multiparams)
def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
- mapper, table, delete):
+ mapper, table, delete):
"""Emit DELETE statements corresponding to value lists collected
by _collect_delete_commands()."""
@@ -662,14 +664,14 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(
- col == sql.bindparam(col.key, type_=col.type))
+ col == sql.bindparam(col.key, type_=col.type))
if need_version_id:
clause.clauses.append(
mapper.version_id_col ==
sql.bindparam(
- mapper.version_id_col.key,
- type_=mapper.version_id_col.type
+ mapper.version_id_col.key,
+ type_=mapper.version_id_col.type
)
)
@@ -710,7 +712,7 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
connection.execute(statement, del_objects)
if base_mapper.confirm_deleted_rows and \
- rows_matched > -1 and expected != rows_matched:
+ rows_matched > -1 and expected != rows_matched:
if only_warn:
util.warn(
"DELETE statement on table '%s' expected to "
@@ -728,15 +730,16 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
(table.description, expected, rows_matched)
)
+
def _finalize_insert_update_commands(base_mapper, uowtransaction,
- states_to_insert, states_to_update):
+ states_to_insert, states_to_update):
"""finalize state on states that have been inserted or updated,
including calling after_insert/after_update events.
"""
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_insert + \
- states_to_update:
+ instance_key, row_switch in states_to_insert + \
+ states_to_update:
if mapper._readonly_props:
readonly = state.unmodified_intersection(
@@ -754,7 +757,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
if base_mapper.eager_defaults:
toload_now.extend(state._unloaded_non_object)
elif mapper.version_id_col is not None and \
- mapper.version_id_generator is False:
+ mapper.version_id_generator is False:
prop = mapper._columntoproperty[mapper.version_id_col]
if prop.key in state.unloaded:
toload_now.extend([prop.key])
@@ -774,7 +777,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
def _postfetch(mapper, uowtransaction, table,
- state, dict_, result, params, value_params):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
@@ -800,19 +803,19 @@ def _postfetch(mapper, uowtransaction, table,
if postfetch_cols:
state._expire_attributes(state.dict,
- [mapper._columntoproperty[c].key
- for c in postfetch_cols if c in
- mapper._columntoproperty]
- )
+ [mapper._columntoproperty[c].key
+ for c in postfetch_cols if c in
+ mapper._columntoproperty]
+ )
# synchronize newly inserted ids from one table to the next
# TODO: this still goes a little too often. would be nice to
# have definitive list of "columns that changed" here
for m, equated_pairs in mapper._table_to_equated[table]:
sync.populate(state, m, state, m,
- equated_pairs,
- uowtransaction,
- mapper.passive_updates)
+ equated_pairs,
+ uowtransaction,
+ mapper.passive_updates)
def _connections_for_states(base_mapper, uowtransaction, states):
@@ -828,7 +831,7 @@ def _connections_for_states(base_mapper, uowtransaction, states):
# to use for update
if uowtransaction.session.connection_callable:
connection_callable = \
- uowtransaction.session.connection_callable
+ uowtransaction.session.connection_callable
else:
connection = None
connection_callable = None
@@ -838,7 +841,7 @@ def _connections_for_states(base_mapper, uowtransaction, states):
connection = connection_callable(base_mapper, state.obj())
elif not connection:
connection = uowtransaction.transaction.connection(
- base_mapper)
+ base_mapper)
mapper = _state_mapper(state)
@@ -849,8 +852,8 @@ def _cached_connection_dict(base_mapper):
# dictionary of connection->connection_with_cache_options.
return util.PopulateDict(
lambda conn: conn.execution_options(
- compiled_cache=base_mapper._compiled_cache
- ))
+ compiled_cache=base_mapper._compiled_cache
+ ))
def _sort_states(states):
@@ -858,7 +861,7 @@ def _sort_states(states):
persistent = set(s for s in pending if s.key is not None)
pending.difference_update(persistent)
return sorted(pending, key=operator.attrgetter("insert_order")) + \
- sorted(persistent, key=lambda q: q.key[1])
+ sorted(persistent, key=lambda q: q.key[1])
class BulkUD(object):
@@ -877,9 +880,9 @@ class BulkUD(object):
klass = lookup[synchronize_session]
except KeyError:
raise sa_exc.ArgumentError(
- "Valid strategies for session synchronization "
- "are %s" % (", ".join(sorted(repr(x)
- for x in lookup))))
+ "Valid strategies for session synchronization "
+ "are %s" % (", ".join(sorted(repr(x)
+ for x in lookup))))
else:
return klass(*arg)
@@ -894,12 +897,12 @@ class BulkUD(object):
query = self.query
self.context = context = query._compile_context()
if len(context.statement.froms) != 1 or \
- not isinstance(context.statement.froms[0], schema.Table):
+ not isinstance(context.statement.froms[0], schema.Table):
self.primary_table = query._only_entity_zero(
- "This operation requires only one Table or "
- "entity be specified as the target."
- ).mapper.local_table
+ "This operation requires only one Table or "
+ "entity be specified as the target."
+ ).mapper.local_table
else:
self.primary_table = context.statement.froms[0]
@@ -929,7 +932,7 @@ class BulkEvaluate(BulkUD):
evaluator_compiler = evaluator.EvaluatorCompiler(target_cls)
if query.whereclause is not None:
eval_condition = evaluator_compiler.process(
- query.whereclause)
+ query.whereclause)
else:
def eval_condition(obj):
return True
@@ -938,16 +941,16 @@ class BulkEvaluate(BulkUD):
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
- "Could not evaluate current criteria in Python. "
- "Specify 'fetch' or False for the "
- "synchronize_session parameter.")
+ "Could not evaluate current criteria in Python. "
+ "Specify 'fetch' or False for the "
+ "synchronize_session parameter.")
- #TODO: detect when the where clause is a trivial primary key match
+ # TODO: detect when the where clause is a trivial primary key match
self.matched_objects = [
- obj for (cls, pk), obj in
- query.session.identity_map.items()
- if issubclass(cls, target_cls) and
- eval_condition(obj)]
+ obj for (cls, pk), obj in
+ query.session.identity_map.items()
+ if issubclass(cls, target_cls) and
+ eval_condition(obj)]
class BulkFetch(BulkUD):
@@ -957,10 +960,10 @@ class BulkFetch(BulkUD):
query = self.query
session = query.session
select_stmt = self.context.statement.with_only_columns(
- self.primary_table.primary_key)
+ self.primary_table.primary_key)
self.matched_rows = session.execute(
- select_stmt,
- params=query._params).fetchall()
+ select_stmt,
+ params=query._params).fetchall()
class BulkUpdate(BulkUD):
@@ -981,10 +984,10 @@ class BulkUpdate(BulkUD):
def _do_exec(self):
update_stmt = sql.update(self.primary_table,
- self.context.whereclause, self.values)
+ self.context.whereclause, self.values)
self.result = self.query.session.execute(
- update_stmt, params=self.query._params)
+ update_stmt, params=self.query._params)
self.rowcount = self.result.rowcount
def _do_post(self):
@@ -1009,10 +1012,10 @@ class BulkDelete(BulkUD):
def _do_exec(self):
delete_stmt = sql.delete(self.primary_table,
- self.context.whereclause)
+ self.context.whereclause)
self.result = self.query.session.execute(delete_stmt,
- params=self.query._params)
+ params=self.query._params)
self.rowcount = self.result.rowcount
def _do_post(self):
@@ -1029,7 +1032,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
for key, value in self.values.items():
key = _attr_as_key(key)
self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
+ expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
@@ -1037,11 +1040,11 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
evaluated_keys = list(self.value_evaluators.keys())
for obj in self.matched_objects:
state, dict_ = attributes.instance_state(obj),\
- attributes.instance_dict(obj)
+ attributes.instance_dict(obj)
# only evaluate unmodified attributes
to_evaluate = state.unmodified.intersection(
- evaluated_keys)
+ evaluated_keys)
for key in to_evaluate:
dict_[key] = self.value_evaluators[key](obj)
@@ -1050,8 +1053,8 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
# expire attributes with pending changes
# (there was no autoflush, so they are overwritten)
state._expire_attributes(dict_,
- set(evaluated_keys).
- difference(to_evaluate))
+ set(evaluated_keys).
+ difference(to_evaluate))
states.add(state)
session._register_altered(states)
@@ -1062,8 +1065,8 @@ class BulkDeleteEvaluate(BulkEvaluate, BulkDelete):
def _do_post_synchronize(self):
self.query.session._remove_newly_deleted(
- [attributes.instance_state(obj)
- for obj in self.matched_objects])
+ [attributes.instance_state(obj)
+ for obj in self.matched_objects])
class BulkUpdateFetch(BulkFetch, BulkUpdate):
@@ -1078,7 +1081,7 @@ class BulkUpdateFetch(BulkFetch, BulkUpdate):
attributes.instance_state(session.identity_map[identity_key])
for identity_key in [
target_mapper.identity_key_from_primary_key(
- list(primary_key))
+ list(primary_key))
for primary_key in self.matched_rows
]
if identity_key in session.identity_map
@@ -1100,7 +1103,7 @@ class BulkDeleteFetch(BulkFetch, BulkDelete):
# TODO: inline this and call remove_newly_deleted
# once
identity_key = target_mapper.identity_key_from_primary_key(
- list(primary_key))
+ list(primary_key))
if identity_key in session.identity_map:
session._remove_newly_deleted(
[attributes.instance_state(
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index fabacba05..62ea93fb3 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -39,12 +39,12 @@ class ColumnProperty(StrategizedProperty):
Column-based properties can normally be applied to the mapper's
``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
+ Use this function when the given column is not directly present within
+ the mapper's selectable; examples include SQL expressions, functions,
+ and scalar SELECT queries.
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
+ Columns that aren't present in the mapper's selectable won't be
+ persisted by the mapper and are effectively "read-only" attributes.
:param \*cols:
list of Column objects to be mapped.
@@ -63,8 +63,8 @@ class ColumnProperty(StrategizedProperty):
.. versionadded:: 0.6.6
:param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
+ :class:`.ColumnProperty.Comparator` which provides custom SQL
+ clause generation for comparison operations.
:param group:
a group name for this property when marked as deferred.
@@ -111,12 +111,12 @@ class ColumnProperty(StrategizedProperty):
"""
self._orig_columns = [expression._labeled(c) for c in columns]
self.columns = [expression._labeled(_orm_full_deannotate(c))
- for c in columns]
+ for c in columns]
self.group = kwargs.pop('group', None)
self.deferred = kwargs.pop('deferred', False)
self.instrument = kwargs.pop('_instrument', True)
self.comparator_factory = kwargs.pop('comparator_factory',
- self.__class__.Comparator)
+ self.__class__.Comparator)
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
self.active_history = kwargs.pop('active_history', False)
@@ -145,9 +145,9 @@ class ColumnProperty(StrategizedProperty):
util.set_creation_order(self)
self.strategy_class = self._strategy_lookup(
- ("deferred", self.deferred),
- ("instrument", self.instrument)
- )
+ ("deferred", self.deferred),
+ ("instrument", self.instrument)
+ )
@property
def expression(self):
@@ -166,7 +166,7 @@ class ColumnProperty(StrategizedProperty):
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc
- )
+ )
def do_init(self):
super(ColumnProperty, self).do_init()
@@ -181,18 +181,18 @@ class ColumnProperty(StrategizedProperty):
def copy(self):
return ColumnProperty(
- deferred=self.deferred,
- group=self.group,
- active_history=self.active_history,
- *self.columns)
+ deferred=self.deferred,
+ group=self.group,
+ active_history=self.active_history,
+ *self.columns)
def _getcommitted(self, state, dict_, column,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
return state.get_impl(self.key).\
- get_committed_value(state, dict_, passive=passive)
+ get_committed_value(state, dict_, passive=passive)
def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
+ dest_dict, load, _recursive):
if not self.instrument:
return
elif self.key in source_dict:
@@ -257,4 +257,3 @@ class ColumnProperty(StrategizedProperty):
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
-
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index c340e9856..12e11b26c 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -24,19 +24,19 @@ from itertools import chain
from . import (
attributes, interfaces, object_mapper, persistence,
exc as orm_exc, loading
- )
+)
from .base import _entity_descriptor, _is_aliased_class, \
- _is_mapped_class, _orm_columns, _generative
+ _is_mapped_class, _orm_columns, _generative
from .path_registry import PathRegistry
from .util import (
AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
- )
+)
from .. import sql, util, log, exc as sa_exc, inspect, inspection
from ..sql.expression import _interpret_as_from
from ..sql import (
- util as sql_util,
- expression, visitors
- )
+ util as sql_util,
+ expression, visitors
+)
from ..sql.base import ColumnCollection
from . import properties
@@ -45,6 +45,7 @@ __all__ = ['Query', 'QueryContext', 'aliased']
_path_registry = PathRegistry.root
+
@inspection._self_inspects
@log.class_logger
class Query(object):
@@ -124,22 +125,22 @@ class Query(object):
if entity not in d:
ext_info = inspect(entity)
if not ext_info.is_aliased_class and \
- ext_info.mapper.with_polymorphic:
+ ext_info.mapper.with_polymorphic:
if ext_info.mapper.mapped_table not in \
- self._polymorphic_adapters:
+ self._polymorphic_adapters:
self._mapper_loads_polymorphically_with(
ext_info.mapper,
sql_util.ColumnAdapter(
- ext_info.selectable,
- ext_info.mapper._equivalent_columns
+ ext_info.selectable,
+ ext_info.mapper._equivalent_columns
)
)
aliased_adapter = None
elif ext_info.is_aliased_class:
aliased_adapter = sql_util.ColumnAdapter(
- ext_info.selectable,
- ext_info.mapper._equivalent_columns
- )
+ ext_info.selectable,
+ ext_info.mapper._equivalent_columns
+ )
else:
aliased_adapter = None
@@ -163,17 +164,17 @@ class Query(object):
info = inspect(from_obj)
if hasattr(info, 'mapper') and \
- (info.is_mapper or info.is_aliased_class):
+ (info.is_mapper or info.is_aliased_class):
self._select_from_entity = from_obj
if set_base_alias:
raise sa_exc.ArgumentError(
- "A selectable (FromClause) instance is "
- "expected when the base alias is being set.")
+ "A selectable (FromClause) instance is "
+ "expected when the base alias is being set.")
fa.append(info.selectable)
elif not info.is_selectable:
raise sa_exc.ArgumentError(
- "argument is not a mapped class, mapper, "
- "aliased(), or FromClause instance.")
+ "argument is not a mapped class, mapper, "
+ "aliased(), or FromClause instance.")
else:
if isinstance(from_obj, expression.SelectBase):
from_obj = from_obj.alias()
@@ -184,11 +185,11 @@ class Query(object):
self._from_obj = tuple(fa)
if set_base_alias and \
- len(self._from_obj) == 1 and \
- isinstance(select_from_alias, expression.Alias):
+ len(self._from_obj) == 1 and \
+ isinstance(select_from_alias, expression.Alias):
equivs = self.__all_equivs()
self._from_obj_alias = sql_util.ColumnAdapter(
- self._from_obj[0], equivs)
+ self._from_obj[0], equivs)
def _reset_polymorphic_adapter(self, mapper):
for m2 in mapper._with_polymorphic_mappers:
@@ -216,11 +217,11 @@ class Query(object):
def _adapt_col_list(self, cols):
return [
- self._adapt_clause(
- expression._literal_as_text(o),
- True, True)
- for o in cols
- ]
+ self._adapt_clause(
+ expression._literal_as_text(o),
+ True, True)
+ for o in cols
+ ]
@_generative()
def _adapt_all_clauses(self):
@@ -270,18 +271,18 @@ class Query(object):
# if 'orm only', look for ORM annotations
# in the element before adapting.
if not _orm_only or \
- '_orm_adapt' in elem._annotations or \
- "parententity" in elem._annotations:
+ '_orm_adapt' in elem._annotations or \
+ "parententity" in elem._annotations:
e = adapter(elem)
if e is not None:
return e
return visitors.replacement_traverse(
- clause,
- {},
- replace
- )
+ clause,
+ {},
+ replace
+ )
def _entity_zero(self):
return self._entities[0]
@@ -311,26 +312,26 @@ class Query(object):
def _only_mapper_zero(self, rationale=None):
if len(self._entities) > 1:
raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query "
- "against a single mapper."
- )
+ rationale or
+ "This operation requires a Query "
+ "against a single mapper."
+ )
return self._mapper_zero()
def _only_full_mapper_zero(self, methname):
if self._entities != [self._primary_entity]:
raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
+ "%s() can only be used against "
+ "a single mapped class." % methname)
return self._primary_entity.entity_zero
def _only_entity_zero(self, rationale=None):
if len(self._entities) > 1:
raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query "
- "against a single mapper."
- )
+ rationale or
+ "This operation requires a Query "
+ "against a single mapper."
+ )
return self._entity_zero()
def __all_equivs(self):
@@ -340,7 +341,8 @@ class Query(object):
return equivs
def _get_condition(self):
- return self._no_criterion_condition("get", order_by=False, distinct=False)
+ return self._no_criterion_condition(
+ "get", order_by=False, distinct=False)
def _get_existing_condition(self):
self._no_criterion_assertion("get", order_by=False, distinct=False)
@@ -354,8 +356,8 @@ class Query(object):
self._group_by or (order_by and self._order_by) or \
(distinct and self._distinct):
raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
+ "Query.%s() being called on a "
+ "Query with existing criterion. " % meth)
def _no_criterion_condition(self, meth, order_by=True, distinct=True):
self._no_criterion_assertion(meth, order_by, distinct)
@@ -369,8 +371,8 @@ class Query(object):
return
if self._order_by:
raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
+ "Query.%s() being called on a "
+ "Query with existing criterion. " % meth)
self._no_criterion_condition(meth)
def _no_statement_condition(self, meth):
@@ -411,9 +413,9 @@ class Query(object):
)
def _get_options(self, populate_existing=None,
- version_check=None,
- only_load_props=None,
- refresh_state=None):
+ version_check=None,
+ only_load_props=None,
+ refresh_state=None):
if populate_existing:
self._populate_existing = populate_existing
if version_check:
@@ -441,11 +443,10 @@ class Query(object):
"""
stmt = self._compile_context(labels=self._with_labels).\
- statement
+ statement
if self._params:
stmt = stmt.params(self._params)
-
# TODO: there's no tests covering effects of
# the annotation not being there
return stmt._annotate({'no_replacement_traverse': True})
@@ -662,9 +663,9 @@ class Query(object):
@_generative(_no_clauseelement_condition)
def with_polymorphic(self,
- cls_or_mappers,
- selectable=None,
- polymorphic_on=None):
+ cls_or_mappers,
+ selectable=None,
+ polymorphic_on=None):
"""Load columns for inheriting classes.
:meth:`.Query.with_polymorphic` applies transformations
@@ -692,13 +693,13 @@ class Query(object):
if not self._primary_entity:
raise sa_exc.InvalidRequestError(
- "No primary mapper set up for this Query.")
+ "No primary mapper set up for this Query.")
entity = self._entities[0]._clone()
self._entities = [entity] + self._entities[1:]
entity.set_with_polymorphic(self,
- cls_or_mappers,
- selectable=selectable,
- polymorphic_on=polymorphic_on)
+ cls_or_mappers,
+ selectable=selectable,
+ polymorphic_on=polymorphic_on)
@_generative()
def yield_per(self, count):
@@ -717,15 +718,16 @@ class Query(object):
Also note that while :meth:`~sqlalchemy.orm.query.Query.yield_per`
will set the ``stream_results`` execution option to True, currently
- this is only understood by :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect
- which will stream results using server side cursors instead of pre-buffer
- all rows for this query. Other DBAPIs pre-buffer all rows before
- making them available.
+ this is only understood by
+ :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect which will
+ stream results using server side cursors instead of pre-buffer all
+ rows for this query. Other DBAPIs pre-buffer all rows before making
+ them available.
"""
self._yield_per = count
self._execution_options = self._execution_options.union(
- {"stream_results": True})
+ {"stream_results": True})
def get(self, ident):
"""Return an instance based on the given primary key identifier,
@@ -795,9 +797,9 @@ class Query(object):
if len(ident) != len(mapper.primary_key):
raise sa_exc.InvalidRequestError(
- "Incorrect number of values in identifier to formulate "
- "primary key for query.get(); primary key columns are %s" %
- ','.join("'%s'" % c for c in mapper.primary_key))
+ "Incorrect number of values in identifier to formulate "
+ "primary key for query.get(); primary key columns are %s" %
+ ','.join("'%s'" % c for c in mapper.primary_key))
key = mapper.identity_key_from_primary_key(ident)
@@ -839,9 +841,9 @@ class Query(object):
"""
self._correlate = self._correlate.union(
- _interpret_as_from(s)
- if s is not None else None
- for s in args)
+ _interpret_as_from(s)
+ if s is not None else None
+ for s in args)
@_generative()
def autoflush(self, setting):
@@ -900,17 +902,17 @@ class Query(object):
for prop in mapper.iterate_properties:
if isinstance(prop, properties.RelationshipProperty) and \
- prop.mapper is self._mapper_zero():
+ prop.mapper is self._mapper_zero():
property = prop
break
else:
raise sa_exc.InvalidRequestError(
- "Could not locate a property which relates instances "
- "of class '%s' to instances of class '%s'" %
- (
- self._mapper_zero().class_.__name__,
- instance.__class__.__name__)
- )
+ "Could not locate a property which relates instances "
+ "of class '%s' to instances of class '%s'" %
+ (
+ self._mapper_zero().class_.__name__,
+ instance.__class__.__name__)
+ )
return self.filter(with_parent(instance, property))
@@ -943,8 +945,8 @@ class Query(object):
"""
fromclause = self.with_labels().enable_eagerloads(False).\
- _set_enable_single_crit(False).\
- statement.correlate(None)
+ _set_enable_single_crit(False).\
+ statement.correlate(None)
q = self._from_selectable(fromclause)
if entities:
q._set_entities(entities)
@@ -1039,8 +1041,9 @@ class Query(object):
self._set_entity_selectables(self._entities[l:])
@util.pending_deprecation("0.7",
- ":meth:`.add_column` is superseded by :meth:`.add_columns`",
- False)
+ ":meth:`.add_column` is superseded "
+ "by :meth:`.add_columns`",
+ False)
def add_column(self, column):
"""Add a column expression to the list of result columns to be
returned.
@@ -1205,8 +1208,8 @@ class Query(object):
kwargs.update(args[0])
elif len(args) > 0:
raise sa_exc.ArgumentError(
- "params() takes zero or one positional argument, "
- "which is a dictionary.")
+ "params() takes zero or one positional argument, "
+ "which is a dictionary.")
self._params = self._params.copy()
self._params.update(kwargs)
@@ -1246,7 +1249,6 @@ class Query(object):
else:
self._criterion = criterion
-
def filter_by(self, **kwargs):
"""apply the given filtering criterion to a copy
of this :class:`.Query`, using keyword expressions.
@@ -1271,7 +1273,7 @@ class Query(object):
"""
clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
- for key, value in kwargs.items()]
+ for key, value in kwargs.items()]
return self.filter(sql.and_(*clauses))
@_generative(_no_statement_condition, _no_limit_offset)
@@ -1324,7 +1326,8 @@ class Query(object):
"""apply a HAVING criterion to the query and return the
newly resulting :class:`.Query`.
- :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`.
+ :meth:`~.Query.having` is used in conjunction with
+ :meth:`~.Query.group_by`.
HAVING criterion makes it possible to use filters on aggregate
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
@@ -1342,8 +1345,8 @@ class Query(object):
if criterion is not None and \
not isinstance(criterion, sql.ClauseElement):
raise sa_exc.ArgumentError(
- "having() argument must be of type "
- "sqlalchemy.sql.ClauseElement or string")
+ "having() argument must be of type "
+ "sqlalchemy.sql.ClauseElement or string")
criterion = self._adapt_clause(criterion, True, True)
@@ -1391,7 +1394,7 @@ class Query(object):
"""
return self._from_selectable(
- expression.union(*([self] + list(q))))
+ expression.union(*([self] + list(q))))
def union_all(self, *q):
"""Produce a UNION ALL of this Query against one or more queries.
@@ -1401,8 +1404,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.union_all(*([self] + list(q)))
- )
+ expression.union_all(*([self] + list(q)))
+ )
def intersect(self, *q):
"""Produce an INTERSECT of this Query against one or more queries.
@@ -1412,8 +1415,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.intersect(*([self] + list(q)))
- )
+ expression.intersect(*([self] + list(q)))
+ )
def intersect_all(self, *q):
"""Produce an INTERSECT ALL of this Query against one or more queries.
@@ -1423,8 +1426,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.intersect_all(*([self] + list(q)))
- )
+ expression.intersect_all(*([self] + list(q)))
+ )
def except_(self, *q):
"""Produce an EXCEPT of this Query against one or more queries.
@@ -1434,8 +1437,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.except_(*([self] + list(q)))
- )
+ expression.except_(*([self] + list(q)))
+ )
def except_all(self, *q):
"""Produce an EXCEPT ALL of this Query against one or more queries.
@@ -1445,8 +1448,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.except_all(*([self] + list(q)))
- )
+ expression.except_all(*([self] + list(q)))
+ )
def join(self, *props, **kwargs):
"""Create a SQL JOIN against this :class:`.Query` object's criterion
@@ -1456,8 +1459,8 @@ class Query(object):
Consider a mapping between two classes ``User`` and ``Address``,
with a relationship ``User.addresses`` representing a collection
- of ``Address`` objects associated with each ``User``. The most common
- usage of :meth:`~.Query.join` is to create a JOIN along this
+ of ``Address`` objects associated with each ``User``. The most
+ common usage of :meth:`~.Query.join` is to create a JOIN along this
relationship, using the ``User.addresses`` attribute as an indicator
for how this should occur::
@@ -1683,8 +1686,8 @@ class Query(object):
:ref:`ormtutorial_joins` in the ORM tutorial.
- :ref:`inheritance_toplevel` for details on how :meth:`~.Query.join`
- is used for inheritance relationships.
+ :ref:`inheritance_toplevel` for details on how
+ :meth:`~.Query.join` is used for inheritance relationships.
:func:`.orm.join` - a standalone ORM-level join function,
used internally by :meth:`.Query.join`, which in previous
@@ -1692,13 +1695,13 @@ class Query(object):
"""
aliased, from_joinpoint = kwargs.pop('aliased', False),\
- kwargs.pop('from_joinpoint', False)
+ kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.keys))
+ ','.join(kwargs.keys))
return self._join(props,
- outerjoin=False, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
+ outerjoin=False, create_aliases=aliased,
+ from_joinpoint=from_joinpoint)
def outerjoin(self, *props, **kwargs):
"""Create a left outer join against this ``Query`` object's criterion
@@ -1708,13 +1711,13 @@ class Query(object):
"""
aliased, from_joinpoint = kwargs.pop('aliased', False), \
- kwargs.pop('from_joinpoint', False)
+ kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs))
+ ','.join(kwargs))
return self._join(props,
- outerjoin=True, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
+ outerjoin=True, create_aliases=aliased,
+ from_joinpoint=from_joinpoint)
def _update_joinpoint(self, jp):
self._joinpoint = jp
@@ -1740,9 +1743,9 @@ class Query(object):
if len(keys) == 2 and \
isinstance(keys[0], (expression.FromClause,
- type, AliasedClass)) and \
+ type, AliasedClass)) and \
isinstance(keys[1], (str, expression.ClauseElement,
- interfaces.PropComparator)):
+ interfaces.PropComparator)):
# detect 2-arg form of join and
# convert to a tuple.
keys = (keys,)
@@ -1761,7 +1764,8 @@ class Query(object):
# is a little bit of legacy behavior still at work here
# which means they might be in either order. may possibly
# lock this down to (right_entity, onclause) in 0.6.
- if isinstance(arg1, (interfaces.PropComparator, util.string_types)):
+ if isinstance(
+ arg1, (interfaces.PropComparator, util.string_types)):
right_entity, onclause = arg2, arg1
else:
right_entity, onclause = arg1, arg2
@@ -1777,7 +1781,7 @@ class Query(object):
# check for q.join(Class.propname, from_joinpoint=True)
# and Class is that of the current joinpoint
elif from_joinpoint and \
- isinstance(onclause, interfaces.PropComparator):
+ isinstance(onclause, interfaces.PropComparator):
left_entity = onclause._parententity
info = inspect(self._joinpoint_zero())
@@ -1789,7 +1793,7 @@ class Query(object):
if left_mapper is left_entity:
left_entity = self._joinpoint_zero()
descriptor = _entity_descriptor(left_entity,
- onclause.key)
+ onclause.key)
onclause = descriptor
if isinstance(onclause, interfaces.PropComparator):
@@ -1804,7 +1808,7 @@ class Query(object):
left_entity = onclause._parententity
prop = onclause.property
- if not isinstance(onclause, attributes.QueryableAttribute):
+ if not isinstance(onclause, attributes.QueryableAttribute):
onclause = prop
if not create_aliases:
@@ -1829,10 +1833,9 @@ class Query(object):
raise NotImplementedError("query.join(a==b) not supported.")
self._join_left_to_right(
- left_entity,
- right_entity, onclause,
- outerjoin, create_aliases, prop)
-
+ left_entity,
+ right_entity, onclause,
+ outerjoin, create_aliases, prop)
def _join_left_to_right(self, left, right,
onclause, outerjoin, create_aliases, prop):
@@ -1848,48 +1851,49 @@ class Query(object):
if left is None:
raise sa_exc.InvalidRequestError(
- "Don't know how to join from %s; please use "
- "select_from() to establish the left "
- "entity/selectable of this join" % self._entities[0])
+ "Don't know how to join from %s; please use "
+ "select_from() to establish the left "
+ "entity/selectable of this join" % self._entities[0])
if left is right and \
not create_aliases:
raise sa_exc.InvalidRequestError(
- "Can't construct a join from %s to %s, they "
- "are the same entity" %
- (left, right))
+ "Can't construct a join from %s to %s, they "
+ "are the same entity" %
+ (left, right))
l_info = inspect(left)
r_info = inspect(right)
-
overlap = False
if not create_aliases:
right_mapper = getattr(r_info, "mapper", None)
# if the target is a joined inheritance mapping,
# be more liberal about auto-aliasing.
if right_mapper and (
- right_mapper.with_polymorphic or
- isinstance(right_mapper.mapped_table, expression.Join)
- ):
+ right_mapper.with_polymorphic or
+ isinstance(right_mapper.mapped_table, expression.Join)
+ ):
for from_obj in self._from_obj or [l_info.selectable]:
- if sql_util.selectables_overlap(l_info.selectable, from_obj) and \
- sql_util.selectables_overlap(from_obj, r_info.selectable):
+ if sql_util.selectables_overlap(
+ l_info.selectable, from_obj) and \
+ sql_util.selectables_overlap(
+ from_obj, r_info.selectable):
overlap = True
break
- elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable):
+ elif sql_util.selectables_overlap(l_info.selectable,
+ r_info.selectable):
overlap = True
-
if overlap and l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
- "Can't join table/selectable '%s' to itself" %
- l_info.selectable)
+ "Can't join table/selectable '%s' to itself" %
+ l_info.selectable)
right, onclause = self._prepare_right_side(
- r_info, right, onclause,
- create_aliases,
- prop, overlap)
+ r_info, right, onclause,
+ create_aliases,
+ prop, overlap)
# if joining on a MapperProperty path,
# track the path to prevent redundant joins
@@ -1904,7 +1908,7 @@ class Query(object):
self._join_to_left(l_info, left, right, onclause, outerjoin)
def _prepare_right_side(self, r_info, right, onclause, create_aliases,
- prop, overlap):
+ prop, overlap):
info = r_info
right_mapper, right_selectable, right_is_aliased = \
@@ -1918,8 +1922,8 @@ class Query(object):
if right_mapper and prop and \
not right_mapper.common_parent(prop.mapper):
raise sa_exc.InvalidRequestError(
- "Join target %s does not correspond to "
- "the right side of join condition %s" % (right, onclause)
+ "Join target %s does not correspond to "
+ "the right side of join condition %s" % (right, onclause)
)
if not right_mapper and prop:
@@ -1929,11 +1933,11 @@ class Query(object):
if right_mapper and right is right_selectable:
if not right_selectable.is_derived_from(
- right_mapper.mapped_table):
+ right_mapper.mapped_table):
raise sa_exc.InvalidRequestError(
"Selectable '%s' is not derived from '%s'" %
(right_selectable.description,
- right_mapper.mapped_table.description))
+ right_mapper.mapped_table.description))
if isinstance(right_selectable, expression.SelectBase):
# TODO: this isn't even covered now!
@@ -1943,16 +1947,16 @@ class Query(object):
right = aliased(right_mapper, right_selectable)
aliased_entity = right_mapper and \
- not right_is_aliased and \
- (
- right_mapper.with_polymorphic and isinstance(
- right_mapper._with_polymorphic_selectable,
- expression.Alias)
- or
- overlap # test for overlap:
- # orm/inheritance/relationships.py
- # SelfReferentialM2MTest
- )
+ not right_is_aliased and \
+ (
+ right_mapper.with_polymorphic and isinstance(
+ right_mapper._with_polymorphic_selectable,
+ expression.Alias)
+ or
+ overlap # test for overlap:
+ # orm/inheritance/relationships.py
+ # SelfReferentialM2MTest
+ )
if not need_adapter and (create_aliases or aliased_entity):
right = aliased(right, flat=True)
@@ -1962,10 +1966,11 @@ class Query(object):
# apply an adapter to all subsequent filter() calls
# until reset_joinpoint() is called.
if need_adapter:
- self._filter_aliases = ORMAdapter(right,
- equivalents=right_mapper and
- right_mapper._equivalent_columns or {},
- chain_to=self._filter_aliases)
+ self._filter_aliases = ORMAdapter(
+ right,
+ equivalents=right_mapper and
+ right_mapper._equivalent_columns or {},
+ chain_to=self._filter_aliases)
# if the onclause is a ClauseElement, adapt it with any
# adapters that are in place right now
@@ -1978,12 +1983,12 @@ class Query(object):
# set are also adapted.
if aliased_entity and not create_aliases:
self._mapper_loads_polymorphically_with(
- right_mapper,
- ORMAdapter(
- right,
- equivalents=right_mapper._equivalent_columns
- )
- )
+ right_mapper,
+ ORMAdapter(
+ right,
+ equivalents=right_mapper._equivalent_columns
+ )
+ )
return right, onclause
@@ -1994,22 +1999,22 @@ class Query(object):
if self._from_obj:
replace_clause_index, clause = sql_util.find_join_source(
- self._from_obj,
- left_selectable)
+ self._from_obj,
+ left_selectable)
if clause is not None:
try:
clause = orm_join(clause,
- right,
- onclause, isouter=outerjoin)
+ right,
+ onclause, isouter=outerjoin)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
+ "Could not find a FROM clause to join from. "
+ "Tried joining to %s, but got: %s" % (right, ae))
self._from_obj = \
- self._from_obj[:replace_clause_index] + \
- (clause, ) + \
- self._from_obj[replace_clause_index + 1:]
+ self._from_obj[:replace_clause_index] + \
+ (clause, ) + \
+ self._from_obj[replace_clause_index + 1:]
return
if left_mapper:
@@ -2027,8 +2032,8 @@ class Query(object):
clause = orm_join(clause, right, onclause, isouter=outerjoin)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
+ "Could not find a FROM clause to join from. "
+ "Tried joining to %s, but got: %s" % (right, ae))
self._from_obj = self._from_obj + (clause,)
def _reset_joinpoint(self):
@@ -2186,14 +2191,14 @@ class Query(object):
start, stop, step = util.decode_slice(item)
if isinstance(stop, int) and \
- isinstance(start, int) and \
- stop - start <= 0:
+ isinstance(start, int) and \
+ stop - start <= 0:
return []
# perhaps we should execute a count() here so that we
# can still use LIMIT/OFFSET ?
elif (isinstance(start, int) and start < 0) \
- or (isinstance(stop, int) and stop < 0):
+ or (isinstance(stop, int) and stop < 0):
return list(self)[item]
res = self.slice(start, stop)
@@ -2310,11 +2315,11 @@ class Query(object):
statement = sql.text(statement)
if not isinstance(statement,
- (expression.TextClause,
- expression.SelectBase)):
+ (expression.TextClause,
+ expression.SelectBase)):
raise sa_exc.ArgumentError(
- "from_statement accepts text(), select(), "
- "and union() objects only.")
+ "from_statement accepts text(), select(), "
+ "and union() objects only.")
self._statement = statement
@@ -2408,16 +2413,16 @@ class Query(object):
def _connection_from_session(self, **kw):
conn = self.session.connection(
- **kw)
+ **kw)
if self._execution_options:
conn = conn.execution_options(**self._execution_options)
return conn
def _execute_and_instances(self, querycontext):
conn = self._connection_from_session(
- mapper=self._mapper_zero_or_none(),
- clause=querycontext.statement,
- close_with_result=True)
+ mapper=self._mapper_zero_or_none(),
+ clause=querycontext.statement,
+ close_with_result=True)
result = conn.execute(querycontext.statement, self._params)
return loading.instances(self, result, querycontext)
@@ -2553,7 +2558,7 @@ class Query(object):
# .with_only_columns() after we have a core select() so that
# we get just "SELECT 1" without any entities.
return sql.exists(self.add_columns('1').with_labels().
- statement.with_only_columns(['1']))
+ statement.with_only_columns(['1']))
def count(self):
"""Return a count of rows this Query would return.
@@ -2629,10 +2634,11 @@ class Query(object):
This method has several key caveats:
- * The method does **not** offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key
- references which require it, otherwise the database may emit an
- integrity violation if foreign key references are being enforced.
+ * The method does **not** offer in-Python cascading of relationships
+ - it is assumed that ON DELETE CASCADE/SET NULL/etc. is configured
+ for any foreign key references which require it, otherwise the
+ database may emit an integrity violation if foreign key references
+ are being enforced.
After the DELETE, dependent objects in the :class:`.Session` which
were impacted by an ON DELETE may not contain the current
@@ -2641,8 +2647,8 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`. Accessing an expired object
whose row has been deleted will invoke a SELECT to locate the
- row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError`
- is raised.
+ row; when the row is not found, an
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
* The :meth:`.MapperEvents.before_delete` and
:meth:`.MapperEvents.after_delete`
@@ -2657,10 +2663,10 @@ class Query(object):
:ref:`inserts_and_updates` - Core SQL tutorial
"""
- #TODO: cascades need handling.
+ # TODO: cascades need handling.
delete_op = persistence.BulkDelete.factory(
- self, synchronize_session)
+ self, synchronize_session)
delete_op.exec_()
return delete_op.rowcount
@@ -2698,9 +2704,9 @@ class Query(object):
This method has several key caveats:
- * The method does **not** offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it, otherwise the database may emit an
+ * The method does **not** offer in-Python cascading of relationships
+ - it is assumed that ON UPDATE CASCADE is configured for any foreign
+ key references which require it, otherwise the database may emit an
integrity violation if foreign key references are being enforced.
After the UPDATE, dependent objects in the :class:`.Session` which
@@ -2709,16 +2715,16 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`.
- * As of 0.8, this method will support multiple table updates, as detailed
- in :ref:`multi_table_updates`, and this behavior does extend to support
- updates of joined-inheritance and other multiple table mappings. However,
- the **join condition of an inheritance mapper is currently not
- automatically rendered**.
- Care must be taken in any multiple-table update to explicitly include
- the joining condition between those tables, even in mappings where
- this is normally automatic.
- E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the
- ``Engineer`` local table using criteria against the ``Employee``
+ * As of 0.8, this method will support multiple table updates, as
+ detailed in :ref:`multi_table_updates`, and this behavior does
+ extend to support updates of joined-inheritance and other multiple
+ table mappings. However, the **join condition of an inheritance
+ mapper is currently not automatically rendered**.
+ Care must be taken in any multiple-table update to explicitly
+ include the joining condition between those tables, even in mappings
+ where this is normally automatic.
+ E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of
+ the ``Engineer`` local table using criteria against the ``Employee``
local table might look like::
session.query(Engineer).\\
@@ -2740,18 +2746,17 @@ class Query(object):
"""
- #TODO: value keys need to be mapped to corresponding sql cols and
+ # TODO: value keys need to be mapped to corresponding sql cols and
# instr.attr.s to string keys
- #TODO: updates of manytoone relationships need to be converted to
+ # TODO: updates of manytoone relationships need to be converted to
# fk assignments
- #TODO: cascades need handling.
+ # TODO: cascades need handling.
update_op = persistence.BulkUpdate.factory(
- self, synchronize_session, values)
+ self, synchronize_session, values)
update_op.exec_()
return update_op.rowcount
-
def _compile_context(self, labels=True):
context = QueryContext(self)
@@ -2784,13 +2789,13 @@ class Query(object):
if not context.primary_columns:
if self._only_load_props:
raise sa_exc.InvalidRequestError(
- "No column-based properties specified for "
- "refresh operation. Use session.expire() "
- "to reload collections and related items.")
+ "No column-based properties specified for "
+ "refresh operation. Use session.expire() "
+ "to reload collections and related items.")
else:
raise sa_exc.InvalidRequestError(
- "Query contains no columns with which to "
- "SELECT from.")
+ "Query contains no columns with which to "
+ "SELECT from.")
if context.multi_row_eager_loaders and self._should_nest_selectable:
context.statement = self._compound_eager_statement(context)
@@ -2805,26 +2810,26 @@ class Query(object):
if context.order_by:
order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
+ chain(*[
+ sql_util.unwrap_order_by(o)
+ for o in context.order_by
+ ])
+ )
else:
context.order_by = None
order_by_col_expr = []
inner = sql.select(
- context.primary_columns + order_by_col_expr,
- context.whereclause,
- from_obj=context.froms,
- use_labels=context.labels,
- # TODO: this order_by is only needed if
- # LIMIT/OFFSET is present in self._select_args,
- # else the application on the outside is enough
- order_by=context.order_by,
- **self._select_args
- )
+ context.primary_columns + order_by_col_expr,
+ context.whereclause,
+ from_obj=context.froms,
+ use_labels=context.labels,
+ # TODO: this order_by is only needed if
+ # LIMIT/OFFSET is present in self._select_args,
+ # else the application on the outside is enough
+ order_by=context.order_by,
+ **self._select_args
+ )
for hint in self._with_hints:
inner = inner.with_hint(*hint)
@@ -2839,8 +2844,8 @@ class Query(object):
context.adapter = sql_util.ColumnAdapter(inner, equivs)
statement = sql.select(
- [inner] + context.secondary_columns,
- use_labels=context.labels)
+ [inner] + context.secondary_columns,
+ use_labels=context.labels)
statement._for_update_arg = context._for_update_arg
@@ -2850,8 +2855,8 @@ class Query(object):
# giving us a marker as to where the "splice point" of
# the join should be
from_clause = sql_util.splice_joins(
- from_clause,
- eager_join, eager_join.stop_on)
+ from_clause,
+ eager_join, eager_join.stop_on)
statement.append_from(from_clause)
@@ -2871,24 +2876,24 @@ class Query(object):
if self._distinct and context.order_by:
order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
+ chain(*[
+ sql_util.unwrap_order_by(o)
+ for o in context.order_by
+ ])
+ )
context.primary_columns += order_by_col_expr
context.froms += tuple(context.eager_joins.values())
statement = sql.select(
- context.primary_columns +
- context.secondary_columns,
- context.whereclause,
- from_obj=context.froms,
- use_labels=context.labels,
- order_by=context.order_by,
- **self._select_args
- )
+ context.primary_columns +
+ context.secondary_columns,
+ context.whereclause,
+ from_obj=context.froms,
+ use_labels=context.labels,
+ order_by=context.order_by,
+ **self._select_args
+ )
statement._for_update_arg = context._for_update_arg
for hint in self._with_hints:
@@ -2920,14 +2925,15 @@ class Query(object):
single_crit = adapter.traverse(single_crit)
single_crit = self._adapt_clause(single_crit, False, False)
context.whereclause = sql.and_(
- sql.True_._ifnone(context.whereclause),
- single_crit)
+ sql.True_._ifnone(context.whereclause),
+ single_crit)
def __str__(self):
return str(self._compile_context().statement)
from ..sql.selectable import ForUpdateArg
+
class LockmodeArg(ForUpdateArg):
@classmethod
def parse_legacy_query(self, mode):
@@ -2944,10 +2950,11 @@ class LockmodeArg(ForUpdateArg):
read = False
else:
raise sa_exc.ArgumentError(
- "Unknown with_lockmode argument: %r" % mode)
+ "Unknown with_lockmode argument: %r" % mode)
return LockmodeArg(read=read, nowait=nowait)
+
class _QueryEntity(object):
"""represent an entity column returned within a Query result."""
@@ -2955,7 +2962,7 @@ class _QueryEntity(object):
if cls is _QueryEntity:
entity = args[1]
if not isinstance(entity, util.string_types) and \
- _is_mapped_class(entity):
+ _is_mapped_class(entity):
cls = _MapperEntity
elif isinstance(entity, Bundle):
cls = _BundleEntity
@@ -2989,7 +2996,7 @@ class _MapperEntity(_QueryEntity):
self.is_aliased_class = ext_info.is_aliased_class
self._with_polymorphic = ext_info.with_polymorphic_mappers
self._polymorphic_discriminator = \
- ext_info.polymorphic_on
+ ext_info.polymorphic_on
self.entity_zero = ext_info
if ext_info.is_aliased_class:
self._label_name = self.entity_zero.name
@@ -2999,7 +3006,7 @@ class _MapperEntity(_QueryEntity):
self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
- selectable, polymorphic_on):
+ selectable, polymorphic_on):
"""Receive an update from a call to query.with_polymorphic().
Note the newer style of using a free standing with_polymporphic()
@@ -3010,23 +3017,23 @@ class _MapperEntity(_QueryEntity):
if self.is_aliased_class:
# TODO: invalidrequest ?
raise NotImplementedError(
- "Can't use with_polymorphic() against "
- "an Aliased object"
- )
+ "Can't use with_polymorphic() against "
+ "an Aliased object"
+ )
if cls_or_mappers is None:
query._reset_polymorphic_adapter(self.mapper)
return
mappers, from_obj = self.mapper._with_polymorphic_args(
- cls_or_mappers, selectable)
+ cls_or_mappers, selectable)
self._with_polymorphic = mappers
self._polymorphic_discriminator = polymorphic_on
self.selectable = from_obj
- query._mapper_loads_polymorphically_with(self.mapper,
- sql_util.ColumnAdapter(from_obj,
- self.mapper._equivalent_columns))
+ query._mapper_loads_polymorphically_with(
+ self.mapper, sql_util.ColumnAdapter(
+ from_obj, self.mapper._equivalent_columns))
filter_fn = id
@@ -3115,7 +3122,7 @@ class _MapperEntity(_QueryEntity):
def setup_context(self, query, context):
adapter = self._get_entity_clauses(query, context)
- #if self._adapted_selectable is None:
+ # if self._adapted_selectable is None:
context.froms += (self.selectable,)
if context.order_by is False and self.mapper.order_by:
@@ -3124,10 +3131,10 @@ class _MapperEntity(_QueryEntity):
# apply adaptation to the mapper's order_by if needed.
if adapter:
context.order_by = adapter.adapt_list(
- util.to_list(
- context.order_by
- )
- )
+ util.to_list(
+ context.order_by
+ )
+ )
if self._with_polymorphic:
poly_properties = self.mapper._iterate_polymorphic_properties(
@@ -3161,6 +3168,7 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
+
@inspection._self_inspects
class Bundle(object):
"""A grouping of SQL expressions that are returned by a :class:`.Query`
@@ -3192,7 +3200,8 @@ class Bundle(object):
bn = Bundle("mybundle", MyClass.x, MyClass.y)
- for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+ for row in session.query(bn).filter(
+ bn.c.x == 5).filter(bn.c.y == 4):
print(row.mybundle.x, row.mybundle.y)
:param name: name of the bundle.
@@ -3206,7 +3215,7 @@ class Bundle(object):
self.exprs = exprs
self.c = self.columns = ColumnCollection()
self.columns.update((getattr(col, "key", col._label), col)
- for col in exprs)
+ for col in exprs)
self.single_entity = kw.pop('single_entity', self.single_entity)
columns = None
@@ -3225,7 +3234,8 @@ class Bundle(object):
Bundle('b3', MyClass.x, MyClass.y)
)
- q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+ q = sess.query(b1).filter(
+ b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
.. seealso::
@@ -3266,7 +3276,8 @@ class Bundle(object):
"""
def proc(row, result):
- return util.KeyedTuple([proc(row, None) for proc in procs], labels)
+ return util.KeyedTuple(
+ [proc(row, None) for proc in procs], labels)
return proc
@@ -3318,9 +3329,9 @@ class _BundleEntity(_QueryEntity):
def adapt_to_selectable(self, query, sel):
c = _BundleEntity(query, self.bundle, setup_entities=False)
- #c._label_name = self._label_name
- #c.entity_zero = self.entity_zero
- #c.entities = self.entities
+ # c._label_name = self._label_name
+ # c.entity_zero = self.entity_zero
+ # c.entities = self.entities
for ent in self._entities:
ent.adapt_to_selectable(c, sel)
@@ -3335,14 +3346,15 @@ class _BundleEntity(_QueryEntity):
def row_processor(self, query, context, custom_rows):
procs, labels = zip(
- *[ent.row_processor(query, context, custom_rows)
- for ent in self._entities]
- )
+ *[ent.row_processor(query, context, custom_rows)
+ for ent in self._entities]
+ )
proc = self.bundle.create_row_processor(query, procs, labels)
return proc, self._label_name
+
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
@@ -3354,16 +3366,16 @@ class _ColumnEntity(_QueryEntity):
column = sql.literal_column(column)
self._label_name = column.name
elif isinstance(column, (
- attributes.QueryableAttribute,
- interfaces.PropComparator
- )):
+ attributes.QueryableAttribute,
+ interfaces.PropComparator
+ )):
self._label_name = column.key
column = column._query_clause_element()
else:
self._label_name = getattr(column, 'key', None)
if not isinstance(column, expression.ColumnElement) and \
- hasattr(column, '_select_iterable'):
+ hasattr(column, '_select_iterable'):
for c in column._select_iterable:
if c is column:
break
@@ -3414,7 +3426,7 @@ class _ColumnEntity(_QueryEntity):
for elem in visitors.iterate(column, {})
if 'parententity' in elem._annotations
and actual_froms.intersection(elem._from_objects)
- )
+ )
if self.entities:
self.entity_zero = list(self.entities)[0]
@@ -3456,14 +3468,14 @@ class _ColumnEntity(_QueryEntity):
return entity is self.entity_zero
else:
return not _is_aliased_class(self.entity_zero) and \
- entity.common_parent(self.entity_zero)
+ entity.common_parent(self.entity_zero)
def _resolve_expr_against_query_aliases(self, query, expr, context):
return query._adapt_clause(expr, False, True)
def row_processor(self, query, context, custom_rows):
column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
+ query, self.column, context)
if context.adapter:
column = context.adapter.columns[column]
@@ -3475,7 +3487,7 @@ class _ColumnEntity(_QueryEntity):
def setup_context(self, query, context):
column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
+ query, self.column, context)
context.froms += tuple(self.froms)
context.primary_columns.append(column)
@@ -3493,8 +3505,8 @@ class QueryContext(object):
if query._statement is not None:
if isinstance(query._statement, expression.SelectBase) and \
- not query._statement._textual and \
- not query._statement.use_labels:
+ not query._statement._textual and \
+ not query._statement.use_labels:
self.statement = query._statement.apply_labels()
else:
self.statement = query._statement
@@ -3516,7 +3528,7 @@ class QueryContext(object):
self.eager_joins = {}
self.create_eager_joins = []
self.propagate_options = set(o for o in query._with_options if
- o.propagate_to_loaders)
+ o.propagate_to_loaders)
self.attributes = query._attributes.copy()
@@ -3563,5 +3575,3 @@ class AliasOption(interfaces.MapperOption):
else:
alias = self.alias
query._from_obj_alias = sql_util.ColumnAdapter(alias)
-
-
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index a5327e52e..c2debda03 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -23,13 +23,15 @@ from ..sql.util import (
ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
_deep_deannotate, selectables_overlap
- )
+)
from ..sql import operators, expression, visitors
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator
+from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY,
+ StrategizedProperty, PropComparator)
from ..inspection import inspect
from . import mapper as mapperlib
import collections
+
def remote(expr):
"""Annotate a portion of a primaryjoin expression
with a 'remote' annotation.
@@ -47,7 +49,7 @@ def remote(expr):
"""
return _annotate_columns(expression._clause_element_as_expr(expr),
- {"remote": True})
+ {"remote": True})
def foreign(expr):
@@ -68,7 +70,7 @@ def foreign(expr):
"""
return _annotate_columns(expression._clause_element_as_expr(expr),
- {"foreign": True})
+ {"foreign": True})
@log.class_logger
@@ -90,33 +92,34 @@ class RelationshipProperty(StrategizedProperty):
_dependency_processor = None
def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- distinct_target_key=None,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None,
- info=None):
+ secondary=None, primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ uselist=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ post_update=False,
+ cascade=False, extension=None,
+ viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None,
+ single_parent=False, innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=False,
+ cascade_backrefs=True,
+ load_on_pending=False,
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None,
+ info=None):
"""Provide a relationship between two mapped classes.
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
+ This corresponds to a parent-child or associative table relationship.
+ The constructed class is an instance of
+ :class:`.RelationshipProperty`.
A typical :func:`.relationship`, used in a classical mapping::
@@ -127,10 +130,11 @@ class RelationshipProperty(StrategizedProperty):
Some arguments accepted by :func:`.relationship` optionally accept a
callable function, which when called produces the desired value.
The callable is invoked by the parent :class:`.Mapper` at "mapper
- initialization" time, which happens only when mappers are first used, and
- is assumed to be after all mappings have been constructed. This can be
- used to resolve order-of-declaration and other dependency issues, such as
- if ``Child`` is declared below ``Parent`` in the same file::
+ initialization" time, which happens only when mappers are first used,
+ and is assumed to be after all mappings have been constructed. This
+ can be used to resolve order-of-declaration and other dependency
+ issues, such as if ``Child`` is declared below ``Parent`` in the same
+ file::
mapper(Parent, properties={
"children":relationship(lambda: Child,
@@ -138,12 +142,12 @@ class RelationshipProperty(StrategizedProperty):
})
When using the :ref:`declarative_toplevel` extension, the Declarative
- initializer allows string arguments to be passed to :func:`.relationship`.
- These string arguments are converted into callables that evaluate
- the string as Python code, using the Declarative
- class-registry as a namespace. This allows the lookup of related
- classes to be automatic via their string name, and removes the need to
- import related classes at all into the local module space::
+ initializer allows string arguments to be passed to
+ :func:`.relationship`. These string arguments are converted into
+ callables that evaluate the string as Python code, using the
+ Declarative class-registry as a namespace. This allows the lookup of
+ related classes to be automatic via their string name, and removes the
+ need to import related classes at all into the local module space::
from sqlalchemy.ext.declarative import declarative_base
@@ -156,18 +160,18 @@ class RelationshipProperty(StrategizedProperty):
.. seealso::
- :ref:`relationship_config_toplevel` - Full introductory and reference
- documentation for :func:`.relationship`.
+ :ref:`relationship_config_toplevel` - Full introductory and
+ reference documentation for :func:`.relationship`.
:ref:`orm_tutorial_relationship` - ORM tutorial introduction.
:param argument:
- a mapped class, or actual :class:`.Mapper` instance, representing the
- target of the relationship.
+ a mapped class, or actual :class:`.Mapper` instance, representing
+ the target of the relationship.
- :paramref:`~.relationship.argument` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.argument` may also be passed as a callable
+ function which is evaluated at mapper initialization time, and may
+ be passed as a Python-evaluable string when using Declarative.
.. seealso::
@@ -187,35 +191,37 @@ class RelationshipProperty(StrategizedProperty):
present in the :class:`.MetaData` collection associated with the
parent-mapped :class:`.Table`.
- The :paramref:`~.relationship.secondary` keyword argument is typically
- applied in the case where the intermediary :class:`.Table` is not
- otherwise exprssed in any direct class mapping. If the "secondary" table
- is also explicitly mapped elsewhere
- (e.g. as in :ref:`association_pattern`), one should consider applying
- the :paramref:`~.relationship.viewonly` flag so that this :func:`.relationship`
- is not used for persistence operations which may conflict with those
- of the association object pattern.
+ The :paramref:`~.relationship.secondary` keyword argument is
+ typically applied in the case where the intermediary :class:`.Table`
+ is not otherwise exprssed in any direct class mapping. If the
+ "secondary" table is also explicitly mapped elsewhere (e.g. as in
+ :ref:`association_pattern`), one should consider applying the
+ :paramref:`~.relationship.viewonly` flag so that this
+ :func:`.relationship` is not used for persistence operations which
+ may conflict with those of the association object pattern.
.. seealso::
- :ref:`relationships_many_to_many` - Reference example of "many to many".
+ :ref:`relationships_many_to_many` - Reference example of "many
+ to many".
:ref:`orm_tutorial_many_to_many` - ORM tutorial introduction to
many-to-many relationships.
- :ref:`self_referential_many_to_many` - Specifics on using many-to-many
- in a self-referential case.
+ :ref:`self_referential_many_to_many` - Specifics on using
+ many-to-many in a self-referential case.
:ref:`declarative_many_to_many` - Additional options when using
Declarative.
- :ref:`association_pattern` - an alternative to :paramref:`~.relationship.secondary`
- when composing association table relationships, allowing additional
- attributes to be specified on the association table.
+ :ref:`association_pattern` - an alternative to
+ :paramref:`~.relationship.secondary` when composing association
+ table relationships, allowing additional attributes to be
+ specified on the association table.
- :ref:`composite_secondary_join` - a lesser-used pattern which in some
- cases can enable complex :func:`.relationship` SQL conditions
- to be used.
+ :ref:`composite_secondary_join` - a lesser-used pattern which
+ in some cases can enable complex :func:`.relationship` SQL
+ conditions to be used.
.. versionadded:: 0.9.2 :paramref:`~.relationship.secondary` works
more effectively when referring to a :class:`.Join` instance.
@@ -251,11 +257,13 @@ class RelationshipProperty(StrategizedProperty):
:param back_populates:
- Takes a string name and has the same meaning as :paramref:`~.relationship.backref`,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate :paramref:`~.relationship.back_populates`
- to this relationship to ensure proper functioning.
+ Takes a string name and has the same meaning as
+ :paramref:`~.relationship.backref`, except the complementing
+ property is **not** created automatically, and instead must be
+ configured explicitly on the other mapper. The complementing
+ property should also indicate
+ :paramref:`~.relationship.back_populates` to this relationship to
+ ensure proper functioning.
.. seealso::
@@ -309,8 +317,9 @@ class RelationshipProperty(StrategizedProperty):
examples.
:param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
+ a class which extends :class:`.RelationshipProperty.Comparator`
+ which provides custom SQL clause generation for comparison
+ operations.
.. seealso::
@@ -325,20 +334,21 @@ class RelationshipProperty(StrategizedProperty):
keyword to the innermost SELECT statement. When left as ``None``,
the DISTINCT keyword will be applied in those cases when the target
columns do not comprise the full primary key of the target table.
- When set to ``True``, the DISTINCT keyword is applied to the innermost
- SELECT unconditionally.
+ When set to ``True``, the DISTINCT keyword is applied to the
+ innermost SELECT unconditionally.
It may be desirable to set this flag to False when the DISTINCT is
reducing performance of the innermost subquery beyond that of what
duplicate innermost rows may be causing.
- .. versionadded:: 0.8.3 - :paramref:`~.relationship.distinct_target_key`
- allows the
+ .. versionadded:: 0.8.3 -
+ :paramref:`~.relationship.distinct_target_key` allows the
subquery eager loader to apply a DISTINCT modifier to the
innermost SELECT.
- .. versionchanged:: 0.9.0 - :paramref:`~.relationship.distinct_target_key`
- now defaults to ``None``, so that the feature enables itself automatically for
+ .. versionchanged:: 0.9.0 -
+ :paramref:`~.relationship.distinct_target_key` now defaults to
+ ``None``, so that the feature enables itself automatically for
those cases where the innermost query targets a non-unique
key.
@@ -387,8 +397,9 @@ class RelationshipProperty(StrategizedProperty):
.. versionchanged:: 0.8
A multiple-foreign key join ambiguity can be resolved by
- setting the :paramref:`~.relationship.foreign_keys` parameter alone, without the
- need to explicitly set :paramref:`~.relationship.primaryjoin` as well.
+ setting the :paramref:`~.relationship.foreign_keys`
+ parameter alone, without the need to explicitly set
+ :paramref:`~.relationship.primaryjoin` as well.
2. The :class:`.Table` being mapped does not actually have
:class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
@@ -396,10 +407,11 @@ class RelationshipProperty(StrategizedProperty):
was reflected from a database that does not support foreign key
reflection (MySQL MyISAM).
- 3. The :paramref:`~.relationship.primaryjoin` argument is used to construct a non-standard
- join condition, which makes use of columns or expressions that do
- not normally refer to their "parent" column, such as a join condition
- expressed by a complex comparison using a SQL function.
+ 3. The :paramref:`~.relationship.primaryjoin` argument is used to
+ construct a non-standard join condition, which makes use of
+ columns or expressions that do not normally refer to their
+ "parent" column, such as a join condition expressed by a
+ complex comparison using a SQL function.
The :func:`.relationship` construct will raise informative
error messages that suggest the use of the
@@ -409,9 +421,10 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.foreign_keys` parameter is usually
not needed.
- :paramref:`~.relationship.foreign_keys` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.foreign_keys` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -419,14 +432,16 @@ class RelationshipProperty(StrategizedProperty):
:ref:`relationship_custom_foreign`
- :func:`.foreign` - allows direct annotation of the "foreign" columns
- within a :paramref:`~.relationship.primaryjoin` condition.
+ :func:`.foreign` - allows direct annotation of the "foreign"
+ columns within a :paramref:`~.relationship.primaryjoin` condition.
.. versionadded:: 0.8
The :func:`.foreign` annotation can also be applied
- directly to the :paramref:`~.relationship.primaryjoin` expression, which is an alternate,
- more specific system of describing which columns in a particular
- :paramref:`~.relationship.primaryjoin` should be considered "foreign".
+ directly to the :paramref:`~.relationship.primaryjoin`
+ expression, which is an alternate, more specific system of
+ describing which columns in a particular
+ :paramref:`~.relationship.primaryjoin` should be considered
+ "foreign".
:param info: Optional data dictionary which will be populated into the
:attr:`.MapperProperty.info` attribute of this object.
@@ -440,18 +455,19 @@ class RelationshipProperty(StrategizedProperty):
generally perform better than outer joins.
This flag can be set to ``True`` when the relationship references an
- object via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is guaranteed
- to have one or at least one entry.
+ object via many-to-one using local foreign keys that are not
+ nullable, or when the reference is one-to-one or a collection that
+ is guaranteed to have one or at least one entry.
- If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
- ``innerjoin=True`` will be bypassed and the join will continue to
- chain as LEFT OUTER JOIN so that the results don't change. As an alternative,
- specify the value ``"nested"``. This will instead nest the join
- on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)".
+ If the joined-eager load is chained onto an existing LEFT OUTER
+ JOIN, ``innerjoin=True`` will be bypassed and the join will continue
+ to chain as LEFT OUTER JOIN so that the results don't change. As an
+ alternative, specify the value ``"nested"``. This will instead nest
+ the join on the right side, e.g. using the form "a LEFT OUTER JOIN
+ (b JOIN c)".
- .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
- nesting of eager "inner" joins.
+ .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to
+ support nesting of eager "inner" joins.
.. seealso::
@@ -479,8 +495,8 @@ class RelationshipProperty(StrategizedProperty):
how the related items should be loaded. Default value is
``select``. Values include:
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
+ * ``select`` - items should be loaded lazily when the property is
+ first accessed, using a separate SELECT statement, or identity map
fetch for simple many-to-one references.
* ``immediate`` - items should be loaded as the parents are loaded,
@@ -493,8 +509,9 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.innerjoin` parameter.
* ``subquery`` - items should be loaded "eagerly" as the parents are
- loaded, using one additional SQL statement, which issues a JOIN to a
- subquery of the original statement, for each collection requested.
+ loaded, using one additional SQL statement, which issues a JOIN to
+ a subquery of the original statement, for each collection
+ requested.
* ``noload`` - no loading should occur at any time. This is to
support "write-only" attributes, or attributes which are
@@ -523,35 +540,35 @@ class RelationshipProperty(StrategizedProperty):
Indicates loading behavior for transient or pending parent objects.
When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
+ issue a query for a parent object that is not persistent, meaning it
+ has never been flushed. This may take effect for a pending object
+ when autoflush is disabled, or for a transient object that has been
"attached" to a :class:`.Session` but is not part of its pending
collection.
- The :paramref:`~.relationship.load_on_pending` flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before a flush proceeds. This flag
- is not not intended for general use.
+ The :paramref:`~.relationship.load_on_pending` flag does not improve
+ behavior when the ORM is used normally - object references should be
+ constructed at the object level, not at the foreign key level, so
+ that they are present in an ordinary way before a flush proceeds.
+ This flag is not not intended for general use.
.. seealso::
- :meth:`.Session.enable_relationship_loading` - this method establishes
- "load on pending" behavior for the whole object, and also allows
- loading on objects that remain transient or detached.
+ :meth:`.Session.enable_relationship_loading` - this method
+ establishes "load on pending" behavior for the whole object, and
+ also allows loading on objects that remain transient or
+ detached.
:param order_by:
indicates the ordering that should be applied when loading these
- items. :paramref:`~.relationship.order_by` is expected to refer to one
- of the :class:`.Column`
- objects to which the target class is mapped, or
- the attribute itself bound to the target class which refers
- to the column.
+ items. :paramref:`~.relationship.order_by` is expected to refer to
+ one of the :class:`.Column` objects to which the target class is
+ mapped, or the attribute itself bound to the target class which
+ refers to the column.
- :paramref:`~.relationship.order_by` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.order_by` may also be passed as a callable
+ function which is evaluated at mapper initialization time, and may
+ be passed as a Python-evaluable string when using Declarative.
:param passive_deletes=False:
Indicates loading behavior during delete operations.
@@ -640,12 +657,13 @@ class RelationshipProperty(StrategizedProperty):
join of this child object against the parent object, or in a
many-to-many relationship the join of the primary object to the
association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
+ foreign key relationships of the parent and child tables (or
+ association table).
- :paramref:`~.relationship.primaryjoin` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.primaryjoin` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -655,15 +673,16 @@ class RelationshipProperty(StrategizedProperty):
used for self-referential relationships, indicates the column or
list of columns that form the "remote side" of the relationship.
- :paramref:`.relationship.remote_side` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`.relationship.remote_side` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. versionchanged:: 0.8
The :func:`.remote` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "remote".
+ directly to the ``primaryjoin`` expression, which is an
+ alternate, more specific system of describing which columns in a
+ particular ``primaryjoin`` should be considered "remote".
.. seealso::
@@ -671,8 +690,8 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.remote_side`
is used to configure self-referential relationships.
- :func:`.remote` - an annotation function that accomplishes the same
- purpose as :paramref:`~.relationship.remote_side`, typically
+ :func:`.remote` - an annotation function that accomplishes the
+ same purpose as :paramref:`~.relationship.remote_side`, typically
when a custom :paramref:`~.relationship.primaryjoin` condition
is used.
@@ -685,18 +704,19 @@ class RelationshipProperty(StrategizedProperty):
.. seealso::
- :ref:`dynamic_relationship` - Introduction to "dynamic" relationship
- loaders.
+ :ref:`dynamic_relationship` - Introduction to "dynamic"
+ relationship loaders.
:param secondaryjoin:
a SQL expression that will be used as the join of
an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
+ computed based on the foreign key relationships of the association
+ and child tables.
- :paramref:`~.relationship.secondaryjoin` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.secondaryjoin` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -709,9 +729,9 @@ class RelationshipProperty(StrategizedProperty):
should be treated either as one-to-one or one-to-many. Its usage
is optional, except for :func:`.relationship` constructs which
are many-to-one or many-to-many and also
- specify the ``delete-orphan`` cascade option. The :func:`.relationship`
- construct itself will raise an error instructing when this option
- is required.
+ specify the ``delete-orphan`` cascade option. The
+ :func:`.relationship` construct itself will raise an error
+ instructing when this option is required.
.. seealso::
@@ -726,33 +746,35 @@ class RelationshipProperty(StrategizedProperty):
of the relationship - one to many forms a list, many to one
forms a scalar, many to many is a list. If a scalar is desired
where normally a list would be present, such as a bi-directional
- one-to-one relationship, set :paramref:`~.relationship.uselist` to False.
+ one-to-one relationship, set :paramref:`~.relationship.uselist` to
+ False.
The :paramref:`~.relationship.uselist` flag is also available on an
- existing :func:`.relationship` construct as a read-only attribute, which
- can be used to determine if this :func:`.relationship` deals with
- collections or scalar attributes::
+ existing :func:`.relationship` construct as a read-only attribute,
+ which can be used to determine if this :func:`.relationship` deals
+ with collections or scalar attributes::
>>> User.addresses.property.uselist
True
.. seealso::
- :ref:`relationships_one_to_one` - Introduction to the "one to one"
- relationship pattern, which is typically when the
+ :ref:`relationships_one_to_one` - Introduction to the "one to
+ one" relationship pattern, which is typically when the
:paramref:`~.relationship.uselist` flag is needed.
:param viewonly=False:
when set to True, the relationship is used only for loading objects,
and not for any persistence operation. A :func:`.relationship`
which specifies :paramref:`~.relationship.viewonly` can work
- with a wider range of SQL operations within the :paramref:`~.relationship.primaryjoin`
- condition, including operations that feature the use of
- a variety of comparison operators as well as SQL functions such
- as :func:`~.sql.expression.cast`. The :paramref:`~.relationship.viewonly`
- flag is also of general use when defining any kind of :func:`~.relationship`
- that doesn't represent the full set of related objects, to prevent
- modifications of the collection from resulting in persistence operations.
+ with a wider range of SQL operations within the
+ :paramref:`~.relationship.primaryjoin` condition, including
+ operations that feature the use of a variety of comparison operators
+ as well as SQL functions such as :func:`~.sql.expression.cast`. The
+ :paramref:`~.relationship.viewonly` flag is also of general use when
+ defining any kind of :func:`~.relationship` that doesn't represent
+ the full set of related objects, to prevent modifications of the
+ collection from resulting in persistence operations.
"""
@@ -784,7 +806,7 @@ class RelationshipProperty(StrategizedProperty):
self.extension = extension
self.load_on_pending = load_on_pending
self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
+ RelationshipProperty.Comparator
self.comparator = self.comparator_factory(self, None)
util.set_creation_order(self)
@@ -799,7 +821,7 @@ class RelationshipProperty(StrategizedProperty):
self._reverse_property = set()
self.cascade = cascade if cascade is not False \
- else "save-update, merge"
+ else "save-update, merge"
self.order_by = order_by
@@ -808,8 +830,8 @@ class RelationshipProperty(StrategizedProperty):
if self.back_populates:
if backref:
raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
self.backref = None
else:
self.backref = backref
@@ -821,14 +843,14 @@ class RelationshipProperty(StrategizedProperty):
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc,
- )
+ )
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.RelationshipProperty` attributes.
- See the documentation for :class:`.PropComparator` for a brief overview
- of ORM level operator definition.
+ See the documentation for :class:`.PropComparator` for a brief
+ overview of ORM level operator definition.
See also:
@@ -846,7 +868,8 @@ class RelationshipProperty(StrategizedProperty):
_of_type = None
- def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
+ def __init__(
+ self, prop, parentmapper, adapt_to_entity=None, of_type=None):
"""Construction of :class:`.RelationshipProperty.Comparator`
is internal to the ORM's attribute mechanics.
@@ -860,7 +883,7 @@ class RelationshipProperty(StrategizedProperty):
def adapt_to_entity(self, adapt_to_entity):
return self.__class__(self.property, self._parentmapper,
adapt_to_entity=adapt_to_entity,
- of_type=self._of_type)
+ of_type=self._of_type)
@util.memoized_property
def mapper(self):
@@ -891,10 +914,10 @@ class RelationshipProperty(StrategizedProperty):
of_type = None
pj, sj, source, dest, \
- secondary, target_adapter = self.property._create_joins(
- source_selectable=adapt_from,
- source_polymorphic=True,
- of_type=of_type)
+ secondary, target_adapter = self.property._create_joins(
+ source_selectable=adapt_from,
+ source_polymorphic=True,
+ of_type=of_type)
if sj is not None:
return pj & sj
else:
@@ -909,10 +932,10 @@ class RelationshipProperty(StrategizedProperty):
"""
return RelationshipProperty.Comparator(
- self.property,
- self._parentmapper,
- adapt_to_entity=self._adapt_to_entity,
- of_type=cls)
+ self.property,
+ self._parentmapper,
+ adapt_to_entity=self._adapt_to_entity,
+ of_type=cls)
def in_(self, other):
"""Produce an IN clause - this is not implemented
@@ -920,8 +943,9 @@ class RelationshipProperty(StrategizedProperty):
"""
raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
+ 'relationships. For a simple '
+ 'many-to-one, use in_() against '
+ 'the set of foreign key values.')
__hash__ = None
@@ -967,21 +991,23 @@ class RelationshipProperty(StrategizedProperty):
return ~self._criterion_exists()
else:
return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
+ None, adapt_source=self.adapter))
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection to an object or collection; "
+ "use contains() to test for membership.")
else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
+ return _orm_annotate(
+ self.property._optimized_compare(
+ other, adapt_source=self.adapter))
def _criterion_exists(self, criterion=None, **kwargs):
if getattr(self, '_of_type', None):
info = inspect(self._of_type)
target_mapper, to_selectable, is_aliased_class = \
info.mapper, info.selectable, info.is_aliased_class
- if self.property._is_self_referential and not is_aliased_class:
+ if self.property._is_self_referential and not \
+ is_aliased_class:
to_selectable = to_selectable.alias()
single_crit = target_mapper._single_table_criterion
@@ -1000,9 +1026,10 @@ class RelationshipProperty(StrategizedProperty):
source_selectable = None
pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
+ self.property._create_joins(
+ dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
for k in kwargs:
crit = getattr(self.property.mapper.class_, k) == kwargs[k]
@@ -1019,7 +1046,8 @@ class RelationshipProperty(StrategizedProperty):
else:
j = _orm_annotate(pj, exclude=self.property.remote_side)
- if criterion is not None and target_adapter and not is_aliased_class:
+ if criterion is not None and target_adapter and not \
+ is_aliased_class:
# limit this adapter to annotated only?
criterion = target_adapter.traverse(criterion)
@@ -1082,9 +1110,9 @@ class RelationshipProperty(StrategizedProperty):
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
return self._criterion_exists(criterion, **kwargs)
@@ -1118,8 +1146,8 @@ class RelationshipProperty(StrategizedProperty):
"""
if self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
+ "'has()' not implemented for collections. "
+ "Use any().")
return self._criterion_exists(criterion, **kwargs)
def contains(self, other, **kwargs):
@@ -1180,10 +1208,10 @@ class RelationshipProperty(StrategizedProperty):
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
+ clause = self.property._optimized_compare(
+ other, adapt_source=self.adapter)
if self.property.secondaryjoin is not None:
clause.negation_clause = \
@@ -1197,8 +1225,10 @@ class RelationshipProperty(StrategizedProperty):
def state_bindparam(x, state, col):
o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda: \
- self.property.mapper._get_committed_attr_by_column(o, col))
+ return sql.bindparam(
+ x, unique=True, callable_=lambda:
+ self.property.mapper.
+ _get_committed_attr_by_column(o, col))
def adapt(col):
if self.adapter:
@@ -1214,12 +1244,12 @@ class RelationshipProperty(StrategizedProperty):
for (x, y) in self.property.local_remote_pairs])
criterion = sql.and_(*[x == y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
+ zip(
+ self.property.mapper.primary_key,
+ self.property.
+ mapper.
+ primary_key_from_instance(other))
+ ])
return ~self._criterion_exists(criterion)
def __ne__(self, other):
@@ -1264,13 +1294,14 @@ class RelationshipProperty(StrategizedProperty):
if isinstance(other, (util.NoneType, expression.Null)):
if self.property.direction == MANYTOONE:
return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
+ self.property._calculated_foreign_keys])
else:
return self._criterion_exists()
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
else:
return self.__negated_contains_or_equals(other)
@@ -1281,50 +1312,53 @@ class RelationshipProperty(StrategizedProperty):
return self.prop
def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
+ value_is_parent=False,
+ alias_secondary=True):
if op == operators.eq:
if value is None:
if self.uselist:
return ~sql.exists([1], self.primaryjoin)
else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
+ return self._optimized_compare(
+ None,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
+ return self._optimized_compare(
+ value,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
else:
return op(self.comparator, value)
def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
+ adapt_source=None,
+ alias_secondary=True):
if value is not None:
value = attributes.instance_state(value)
- return self._lazy_strategy.lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
+ return self._lazy_strategy.lazy_clause(
+ value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
if load:
for r in self._reverse_property:
if (source_state, r) in _recursive:
return
- if not "merge" in self._cascade:
+ if "merge" not in self._cascade:
return
if self.key not in source_dict:
@@ -1332,7 +1366,7 @@ class RelationshipProperty(StrategizedProperty):
if self.uselist:
instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
+ get(source_state, source_dict)
if hasattr(instances, '_sa_adapter'):
# convert collections to adapters to get a true iterator
instances = instances._sa_adapter
@@ -1351,18 +1385,18 @@ class RelationshipProperty(StrategizedProperty):
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
+ load=load, _recursive=_recursive)
if obj is not None:
dest_list.append(obj)
if not load:
coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
+ dest_dict, self.key)
for c in dest_list:
coll.append_without_event(c)
else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
+ dest_state.get_impl(self.key)._set_iterable(
+ dest_state, dest_dict, dest_list)
else:
current = source_dict[self.key]
if current is not None:
@@ -1370,7 +1404,7 @@ class RelationshipProperty(StrategizedProperty):
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
+ load=load, _recursive=_recursive)
else:
obj = None
@@ -1378,10 +1412,10 @@ class RelationshipProperty(StrategizedProperty):
dest_dict[self.key] = obj
else:
dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
+ dest_dict, obj, None)
def _value_as_iterable(self, state, dict_, key,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
"""Return a list of tuples (state, obj) for the given
key.
@@ -1402,7 +1436,7 @@ class RelationshipProperty(StrategizedProperty):
def cascade_iterator(self, type_, state, dict_,
visited_states, halt_on=None):
- #assert type_ in self._cascade
+ # assert type_ in self._cascade
# only actively lazy load on the 'delete' cascade
if type_ != 'delete' or self.passive_deletes:
@@ -1412,11 +1446,11 @@ class RelationshipProperty(StrategizedProperty):
if type_ == 'save-update':
tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
+ get_all_pending(state, dict_)
else:
tuples = self._value_as_iterable(state, dict_, self.key,
- passive=passive)
+ passive=passive)
skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
not in self._cascade
@@ -1444,12 +1478,12 @@ class RelationshipProperty(StrategizedProperty):
if not instance_mapper.isa(self.mapper.class_manager.mapper):
raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
+ "doesn't handle objects "
+ "of type '%s'" % (
+ self.key,
+ self.parent.class_,
+ c.__class__
+ ))
visited_states.add(instance_state)
@@ -1461,16 +1495,19 @@ class RelationshipProperty(StrategizedProperty):
other._reverse_property.add(self)
if not other.mapper.common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
+ raise sa_exc.ArgumentError(
+ 'reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' %
+ (key, self, other, self.parent))
+
if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
+ == other.direction:
+ raise sa_exc.ArgumentError(
+ '%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?' %
+ (other, self, self.direction))
@util.memoized_property
def mapper(self):
@@ -1481,20 +1518,21 @@ class RelationshipProperty(StrategizedProperty):
"""
if util.callable(self.argument) and \
- not isinstance(self.argument, (type, mapperlib.Mapper)):
+ not isinstance(self.argument, (type, mapperlib.Mapper)):
argument = self.argument()
else:
argument = self.argument
if isinstance(argument, type):
mapper_ = mapperlib.class_mapper(argument,
- configure=False)
+ configure=False)
elif isinstance(self.argument, mapperlib.Mapper):
mapper_ = argument
else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(argument)))
+ raise sa_exc.ArgumentError(
+ "relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(argument)))
return mapper_
@util.memoized_property
@@ -1516,7 +1554,6 @@ class RelationshipProperty(StrategizedProperty):
super(RelationshipProperty, self).do_init()
self._lazy_strategy = self._get_strategy((("lazy", "select"),))
-
def _process_dependent_arguments(self):
"""Convert incoming configuration arguments to their
proper form.
@@ -1530,7 +1567,7 @@ class RelationshipProperty(StrategizedProperty):
for attr in (
'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
+ ):
attr_value = getattr(self, attr)
if util.callable(attr_value):
setattr(self, attr, attr_value())
@@ -1548,44 +1585,43 @@ class RelationshipProperty(StrategizedProperty):
# remote_side are all columns, not strings.
if self.order_by is not False and self.order_by is not None:
self.order_by = [
- expression._only_column_elements(x, "order_by")
- for x in
- util.to_list(self.order_by)]
+ expression._only_column_elements(x, "order_by")
+ for x in
+ util.to_list(self.order_by)]
self._user_defined_foreign_keys = \
util.column_set(
- expression._only_column_elements(x, "foreign_keys")
- for x in util.to_column_set(
- self._user_defined_foreign_keys
- ))
+ expression._only_column_elements(x, "foreign_keys")
+ for x in util.to_column_set(
+ self._user_defined_foreign_keys
+ ))
self.remote_side = \
util.column_set(
- expression._only_column_elements(x, "remote_side")
- for x in
- util.to_column_set(self.remote_side))
+ expression._only_column_elements(x, "remote_side")
+ for x in
+ util.to_column_set(self.remote_side))
self.target = self.mapper.mapped_table
-
def _setup_join_conditions(self):
self._join_condition = jc = JoinCondition(
- parent_selectable=self.parent.mapped_table,
- child_selectable=self.mapper.mapped_table,
- parent_local_selectable=self.parent.local_table,
- child_local_selectable=self.mapper.local_table,
- primaryjoin=self.primaryjoin,
- secondary=self.secondary,
- secondaryjoin=self.secondaryjoin,
- parent_equivalents=self.parent._equivalent_columns,
- child_equivalents=self.mapper._equivalent_columns,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- local_remote_pairs=self.local_remote_pairs,
- remote_side=self.remote_side,
- self_referential=self._is_self_referential,
- prop=self,
- support_sync=not self.viewonly,
- can_be_synced_fn=self._columns_are_mapped
+ parent_selectable=self.parent.mapped_table,
+ child_selectable=self.mapper.mapped_table,
+ parent_local_selectable=self.parent.local_table,
+ child_local_selectable=self.mapper.local_table,
+ primaryjoin=self.primaryjoin,
+ secondary=self.secondary,
+ secondaryjoin=self.secondaryjoin,
+ parent_equivalents=self.parent._equivalent_columns,
+ child_equivalents=self.mapper._equivalent_columns,
+ consider_as_foreign_keys=self._user_defined_foreign_keys,
+ local_remote_pairs=self.local_remote_pairs,
+ remote_side=self.remote_side,
+ self_referential=self._is_self_referential,
+ prop=self,
+ support_sync=not self.viewonly,
+ can_be_synced_fn=self._columns_are_mapped
)
self.primaryjoin = jc.deannotated_primaryjoin
self.secondaryjoin = jc.deannotated_secondaryjoin
@@ -1601,17 +1637,17 @@ class RelationshipProperty(StrategizedProperty):
"""Test that this relationship is legal, warn about
inheritance conflicts."""
- if not self.is_primary() \
- and not mapperlib.class_mapper(
- self.parent.class_,
- configure=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
+ if not self.is_primary() and not mapperlib.class_mapper(
+ self.parent.class_,
+ configure=False).has_property(self.key):
+ raise sa_exc.ArgumentError(
+ "Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " %
+ (self.key, self.parent.class_.__name__,
+ self.parent.class_.__name__))
# check for conflicting relationship() on superclass
if not self.parent.concrete:
@@ -1646,28 +1682,28 @@ class RelationshipProperty(StrategizedProperty):
and (self.direction is MANYTOMANY or self.direction
is MANYTOONE):
raise sa_exc.ArgumentError(
- 'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
+ 'On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
if self.direction is MANYTOONE and self.passive_deletes:
util.warn("On %s, 'passive_deletes' is normally configured "
"on one-to-many, one-to-one, many-to-many "
"relationships only."
- % self)
+ % self)
if self.passive_deletes == 'all' and \
- ("delete" in cascade or
- "delete-orphan" in cascade):
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
raise sa_exc.ArgumentError(
- "On %s, can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade" % self)
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
if cascade.delete_orphan:
self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
+ (self.key, self.parent.class_)
+ )
def _columns_are_mapped(self, *cols):
"""Return True if all columns in the given collection are
@@ -1697,13 +1733,14 @@ class RelationshipProperty(StrategizedProperty):
mapper = self.mapper.primary_mapper()
check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
+ union(mapper.self_and_descendants)
for m in check:
if m.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, m))
+ raise sa_exc.ArgumentError(
+ "Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" %
+ (backref_key, self, m))
# determine primaryjoin/secondaryjoin for the
# backref. Use the one we had, so that
@@ -1713,13 +1750,16 @@ class RelationshipProperty(StrategizedProperty):
# for many to many, just switch primaryjoin/
# secondaryjoin. use the annotated
# pj/sj on the _join_condition.
- pj = kwargs.pop('primaryjoin',
- self._join_condition.secondaryjoin_minus_local)
- sj = kwargs.pop('secondaryjoin',
- self._join_condition.primaryjoin_minus_local)
+ pj = kwargs.pop(
+ 'primaryjoin',
+ self._join_condition.secondaryjoin_minus_local)
+ sj = kwargs.pop(
+ 'secondaryjoin',
+ self._join_condition.primaryjoin_minus_local)
else:
- pj = kwargs.pop('primaryjoin',
- self._join_condition.primaryjoin_reverse_remote)
+ pj = kwargs.pop(
+ 'primaryjoin',
+ self._join_condition.primaryjoin_reverse_remote)
sj = kwargs.pop('secondaryjoin', None)
if sj:
raise sa_exc.InvalidRequestError(
@@ -1728,7 +1768,7 @@ class RelationshipProperty(StrategizedProperty):
)
foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
+ self._user_defined_foreign_keys)
parent = self.parent.primary_mapper()
kwargs.setdefault('viewonly', self.viewonly)
kwargs.setdefault('post_update', self.post_update)
@@ -1765,8 +1805,8 @@ class RelationshipProperty(StrategizedProperty):
return self.mapper.common_parent(self.parent)
def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
if source_selectable is None:
if source_polymorphic and self.parent.with_polymorphic:
source_selectable = self.parent._with_polymorphic_selectable
@@ -1799,7 +1839,8 @@ class RelationshipProperty(StrategizedProperty):
if dest_selectable is None:
dest_selectable = self.mapper.local_table
return (primaryjoin, secondaryjoin, source_selectable,
- dest_selectable, secondary, target_adapter)
+ dest_selectable, secondary, target_adapter)
+
def _annotate_columns(element, annotations):
def clone(elem):
@@ -1815,23 +1856,23 @@ def _annotate_columns(element, annotations):
class JoinCondition(object):
def __init__(self,
- parent_selectable,
- child_selectable,
- parent_local_selectable,
- child_local_selectable,
- primaryjoin=None,
- secondary=None,
- secondaryjoin=None,
- parent_equivalents=None,
- child_equivalents=None,
- consider_as_foreign_keys=None,
- local_remote_pairs=None,
- remote_side=None,
- self_referential=False,
- prop=None,
- support_sync=True,
- can_be_synced_fn=lambda *c: True
- ):
+ parent_selectable,
+ child_selectable,
+ parent_local_selectable,
+ child_local_selectable,
+ primaryjoin=None,
+ secondary=None,
+ secondaryjoin=None,
+ parent_equivalents=None,
+ child_equivalents=None,
+ consider_as_foreign_keys=None,
+ local_remote_pairs=None,
+ remote_side=None,
+ self_referential=False,
+ prop=None,
+ support_sync=True,
+ can_be_synced_fn=lambda *c: True
+ ):
self.parent_selectable = parent_selectable
self.parent_local_selectable = parent_local_selectable
self.child_selectable = child_selectable
@@ -1865,26 +1906,26 @@ class JoinCondition(object):
return
log = self.prop.logger
log.info('%s setup primary join %s', self.prop,
- self.primaryjoin)
+ self.primaryjoin)
log.info('%s setup secondary join %s', self.prop,
- self.secondaryjoin)
+ self.secondaryjoin)
log.info('%s synchronize pairs [%s]', self.prop,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.synchronize_pairs))
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.synchronize_pairs))
log.info('%s secondary synchronize pairs [%s]', self.prop,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.secondary_synchronize_pairs or []))
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.secondary_synchronize_pairs or []))
log.info('%s local/remote pairs [%s]', self.prop,
- ','.join('(%s / %s)' % (l, r) for (l, r) in
- self.local_remote_pairs))
+ ','.join('(%s / %s)' % (l, r) for (l, r) in
+ self.local_remote_pairs))
log.info('%s remote columns [%s]', self.prop,
- ','.join('%s' % col for col in self.remote_columns)
- )
+ ','.join('%s' % col for col in self.remote_columns)
+ )
log.info('%s local columns [%s]', self.prop,
- ','.join('%s' % col for col in self.local_columns)
- )
+ ','.join('%s' % col for col in self.local_columns)
+ )
log.info('%s relationship direction %s', self.prop,
- self.direction)
+ self.direction)
def _determine_joins(self):
"""Determine the 'primaryjoin' and 'secondaryjoin' attributes,
@@ -1896,9 +1937,9 @@ class JoinCondition(object):
"""
if self.secondaryjoin is not None and self.secondary is None:
raise sa_exc.ArgumentError(
- "Property %s specified with secondary "
- "join condition but "
- "no secondary argument" % self.prop)
+ "Property %s specified with secondary "
+ "join condition but "
+ "no secondary argument" % self.prop)
# find a join between the given mapper's mapped table and
# the given table. will try the mapper's local table first
@@ -1935,47 +1976,47 @@ class JoinCondition(object):
)
except sa_exc.NoForeignKeysError:
if self.secondary is not None:
- raise sa_exc.NoForeignKeysError("Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are no foreign keys "
- "linking these tables via secondary table '%s'. "
- "Ensure that referencing columns are associated "
- "with a ForeignKey or ForeignKeyConstraint, or "
- "specify 'primaryjoin' and 'secondaryjoin' "
- "expressions."
- % (self.prop, self.secondary))
+ raise sa_exc.NoForeignKeysError(
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are no foreign keys "
+ "linking these tables via secondary table '%s'. "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey or ForeignKeyConstraint, or "
+ "specify 'primaryjoin' and 'secondaryjoin' "
+ "expressions." % (self.prop, self.secondary))
else:
- raise sa_exc.NoForeignKeysError("Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are no foreign keys "
- "linking these tables. "
- "Ensure that referencing columns are associated "
- "with a ForeignKey or ForeignKeyConstraint, or "
- "specify a 'primaryjoin' expression."
- % self.prop)
+ raise sa_exc.NoForeignKeysError(
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are no foreign keys "
+ "linking these tables. "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey or ForeignKeyConstraint, or "
+ "specify a 'primaryjoin' expression." % self.prop)
except sa_exc.AmbiguousForeignKeysError:
if self.secondary is not None:
raise sa_exc.AmbiguousForeignKeysError(
- "Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are multiple foreign key "
- "paths linking the tables via secondary table '%s'. "
- "Specify the 'foreign_keys' "
- "argument, providing a list of those columns which "
- "should be counted as containing a foreign key "
- "reference from the secondary table to each of the "
- "parent and child tables."
- % (self.prop, self.secondary))
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are multiple foreign key "
+ "paths linking the tables via secondary table '%s'. "
+ "Specify the 'foreign_keys' "
+ "argument, providing a list of those columns which "
+ "should be counted as containing a foreign key "
+ "reference from the secondary table to each of the "
+ "parent and child tables."
+ % (self.prop, self.secondary))
else:
raise sa_exc.AmbiguousForeignKeysError(
- "Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are multiple foreign key "
- "paths linking the tables. Specify the "
- "'foreign_keys' argument, providing a list of those "
- "columns which should be counted as containing a "
- "foreign key reference to the parent table."
- % self.prop)
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are multiple foreign key "
+ "paths linking the tables. Specify the "
+ "'foreign_keys' argument, providing a list of those "
+ "columns which should be counted as containing a "
+ "foreign key reference to the parent table."
+ % self.prop)
@property
def primaryjoin_minus_local(self):
@@ -1983,7 +2024,8 @@ class JoinCondition(object):
@property
def secondaryjoin_minus_local(self):
- return _deep_deannotate(self.secondaryjoin, values=("local", "remote"))
+ return _deep_deannotate(self.secondaryjoin,
+ values=("local", "remote"))
@util.memoized_property
def primaryjoin_reverse_remote(self):
@@ -2009,12 +2051,12 @@ class JoinCondition(object):
v['remote'] = True
return element._with_annotations(v)
return visitors.replacement_traverse(
- self.primaryjoin, {}, replace)
+ self.primaryjoin, {}, replace)
else:
if self._has_foreign_annotations:
# TODO: coverage
return _deep_deannotate(self.primaryjoin,
- values=("local", "remote"))
+ values=("local", "remote"))
else:
return _deep_deannotate(self.primaryjoin)
@@ -2071,7 +2113,7 @@ class JoinCondition(object):
def is_foreign(a, b):
if isinstance(a, schema.Column) and \
- isinstance(b, schema.Column):
+ isinstance(b, schema.Column):
if a.references(b):
return a
elif b.references(a):
@@ -2085,7 +2127,7 @@ class JoinCondition(object):
def visit_binary(binary):
if not isinstance(binary.left, sql.ColumnElement) or \
- not isinstance(binary.right, sql.ColumnElement):
+ not isinstance(binary.right, sql.ColumnElement):
return
if "foreign" not in binary.left._annotations and \
@@ -2094,10 +2136,10 @@ class JoinCondition(object):
if col is not None:
if col.compare(binary.left):
binary.left = binary.left._annotate(
- {"foreign": True})
+ {"foreign": True})
elif col.compare(binary.right):
binary.right = binary.right._annotate(
- {"foreign": True})
+ {"foreign": True})
self.primaryjoin = visitors.cloned_traverse(
self.primaryjoin,
@@ -2123,25 +2165,26 @@ class JoinCondition(object):
def visit_binary(binary):
c, f = binary.left, binary.right
if (
- isinstance(c, expression.ColumnClause) and \
- isinstance(f, expression.ColumnClause) and \
- pt.is_derived_from(c.table) and \
- pt.is_derived_from(f.table) and \
- mt.is_derived_from(c.table) and \
+ isinstance(c, expression.ColumnClause) and
+ isinstance(f, expression.ColumnClause) and
+ pt.is_derived_from(c.table) and
+ pt.is_derived_from(f.table) and
+ mt.is_derived_from(c.table) and
mt.is_derived_from(f.table)
):
result[0] = True
visitors.traverse(
- self.primaryjoin,
- {},
- {"binary": visit_binary}
- )
+ self.primaryjoin,
+ {},
+ {"binary": visit_binary}
+ )
return result[0]
def _tables_overlap(self):
"""Return True if parent/child tables have some overlap."""
- return selectables_overlap(self.parent_selectable, self.child_selectable)
+ return selectables_overlap(
+ self.parent_selectable, self.child_selectable)
def _annotate_remote(self):
"""Annotate the primaryjoin and secondaryjoin
@@ -2172,9 +2215,9 @@ class JoinCondition(object):
if self.secondary.c.contains_column(element):
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
self.secondaryjoin = visitors.replacement_traverse(
- self.secondaryjoin, {}, repl)
+ self.secondaryjoin, {}, repl)
def _annotate_selfref(self, fn):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2190,13 +2233,13 @@ class JoinCondition(object):
binary.left = binary.left._annotate({"remote": True})
if fn(binary.right) and not equated:
binary.right = binary.right._annotate(
- {"remote": True})
+ {"remote": True})
else:
self._warn_non_column_elements()
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
- {"binary": visit_binary})
+ self.primaryjoin, {},
+ {"binary": visit_binary})
def _annotate_remote_from_args(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2207,9 +2250,9 @@ class JoinCondition(object):
if self._local_remote_pairs:
if self._remote_side:
raise sa_exc.ArgumentError(
- "remote_side argument is redundant "
- "against more detailed _local_remote_side "
- "argument.")
+ "remote_side argument is redundant "
+ "against more detailed _local_remote_side "
+ "argument.")
remote_side = [r for (l, r) in self._local_remote_pairs]
else:
@@ -2222,7 +2265,7 @@ class JoinCondition(object):
if element in remote_side:
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
def _annotate_remote_with_overlap(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2233,9 +2276,9 @@ class JoinCondition(object):
"""
def visit_binary(binary):
binary.left, binary.right = proc_left_right(binary.left,
- binary.right)
+ binary.right)
binary.right, binary.left = proc_left_right(binary.right,
- binary.left)
+ binary.left)
def proc_left_right(left, right):
if isinstance(left, expression.ColumnClause) and \
@@ -2249,8 +2292,8 @@ class JoinCondition(object):
return left, right
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
- {"binary": visit_binary})
+ self.primaryjoin, {},
+ {"binary": visit_binary})
def _annotate_remote_distinct_selectables(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2260,14 +2303,13 @@ class JoinCondition(object):
"""
def repl(element):
if self.child_selectable.c.contains_column(element) and \
- (
- not self.parent_local_selectable.c.\
- contains_column(element)
- or self.child_local_selectable.c.\
- contains_column(element)):
+ (not self.parent_local_selectable.c.
+ contains_column(element) or
+ self.child_local_selectable.c.
+ contains_column(element)):
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
def _warn_non_column_elements(self):
util.warn(
@@ -2293,7 +2335,7 @@ class JoinCondition(object):
if self._local_remote_pairs:
local_side = util.column_set([l for (l, r)
- in self._local_remote_pairs])
+ in self._local_remote_pairs])
else:
local_side = util.column_set(self.parent_selectable.c)
@@ -2302,20 +2344,20 @@ class JoinCondition(object):
elem in local_side:
return elem._annotate({"local": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, locals_
- )
+ self.primaryjoin, {}, locals_
+ )
def _check_remote_side(self):
if not self.local_remote_pairs:
- raise sa_exc.ArgumentError('Relationship %s could '
- 'not determine any unambiguous local/remote column '
- 'pairs based on join condition and remote_side '
- 'arguments. '
- 'Consider using the remote() annotation to '
- 'accurately mark those elements of the join '
- 'condition that are on the remote side of '
- 'the relationship.'
- % (self.prop, ))
+ raise sa_exc.ArgumentError(
+ 'Relationship %s could '
+ 'not determine any unambiguous local/remote column '
+ 'pairs based on join condition and remote_side '
+ 'arguments. '
+ 'Consider using the remote() annotation to '
+ 'accurately mark those elements of the join '
+ 'condition that are on the remote side of '
+ 'the relationship.' % (self.prop, ))
def _check_foreign_cols(self, join_condition, primary):
"""Check the foreign key columns collected and emit error
@@ -2324,7 +2366,7 @@ class JoinCondition(object):
can_sync = False
foreign_cols = self._gather_columns_with_annotation(
- join_condition, "foreign")
+ join_condition, "foreign")
has_foreign = bool(foreign_cols)
@@ -2342,13 +2384,13 @@ class JoinCondition(object):
# (not just ==), perhaps they need to turn on "viewonly=True".
if self.support_sync and has_foreign and not can_sync:
err = "Could not locate any simple equality expressions "\
- "involving locally mapped foreign key columns for "\
- "%s join condition "\
- "'%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
- self.prop
- )
+ "involving locally mapped foreign key columns for "\
+ "%s join condition "\
+ "'%s' on relationship %s." % (
+ primary and 'primary' or 'secondary',
+ join_condition,
+ self.prop
+ )
err += \
" Ensure that referencing columns are associated "\
"with a ForeignKey or ForeignKeyConstraint, or are "\
@@ -2359,11 +2401,11 @@ class JoinCondition(object):
raise sa_exc.ArgumentError(err)
else:
err = "Could not locate any relevant foreign key columns "\
- "for %s join condition '%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
- self.prop
- )
+ "for %s join condition '%s' on relationship %s." % (
+ primary and 'primary' or 'secondary',
+ join_condition,
+ self.prop
+ )
err += \
' Ensure that referencing columns are associated '\
'with a ForeignKey or ForeignKeyConstraint, or are '\
@@ -2384,12 +2426,12 @@ class JoinCondition(object):
# fk collection which suggests ONETOMANY.
onetomany_fk = targetcols.intersection(
- self.foreign_key_columns)
+ self.foreign_key_columns)
# fk collection which suggests MANYTOONE.
manytoone_fk = parentcols.intersection(
- self.foreign_key_columns)
+ self.foreign_key_columns)
if onetomany_fk and manytoone_fk:
# fks on both sides. test for overlap of local/remote
@@ -2401,23 +2443,23 @@ class JoinCondition(object):
# 1. columns that are both remote and FK suggest
# onetomany.
onetomany_local = self._gather_columns_with_annotation(
- self.primaryjoin, "remote", "foreign")
+ self.primaryjoin, "remote", "foreign")
# 2. columns that are FK but are not remote (e.g. local)
# suggest manytoone.
manytoone_local = set([c for c in
- self._gather_columns_with_annotation(
- self.primaryjoin,
- "foreign")
- if "remote" not in c._annotations])
+ self._gather_columns_with_annotation(
+ self.primaryjoin,
+ "foreign")
+ if "remote" not in c._annotations])
# 3. if both collections are present, remove columns that
# refer to themselves. This is for the case of
# and_(Me.id == Me.remote_id, Me.version == Me.version)
if onetomany_local and manytoone_local:
self_equated = self.remote_columns.intersection(
- self.local_columns
- )
+ self.local_columns
+ )
onetomany_local = onetomany_local.difference(self_equated)
manytoone_local = manytoone_local.difference(self_equated)
@@ -2444,10 +2486,11 @@ class JoinCondition(object):
elif manytoone_fk:
self.direction = MANYTOONE
else:
- raise sa_exc.ArgumentError("Can't determine relationship "
- "direction for relationship '%s' - foreign "
- "key columns are present in neither the parent "
- "nor the child's mapped tables" % self.prop)
+ raise sa_exc.ArgumentError(
+ "Can't determine relationship "
+ "direction for relationship '%s' - foreign "
+ "key columns are present in neither the parent "
+ "nor the child's mapped tables" % self.prop)
def _deannotate_pairs(self, collection):
"""provide deannotation for the various lists of
@@ -2457,7 +2500,7 @@ class JoinCondition(object):
"""
return [(x._deannotate(), y._deannotate())
- for x, y in collection]
+ for x, y in collection]
def _setup_pairs(self):
sync_pairs = []
@@ -2521,12 +2564,12 @@ class JoinCondition(object):
def _gather_join_annotations(self, annotation):
s = set(
self._gather_columns_with_annotation(
- self.primaryjoin, annotation)
+ self.primaryjoin, annotation)
)
if self.secondaryjoin is not None:
s.update(
self._gather_columns_with_annotation(
- self.secondaryjoin, annotation)
+ self.secondaryjoin, annotation)
)
return set([x._deannotate() for x in s])
@@ -2538,9 +2581,9 @@ class JoinCondition(object):
])
def join_targets(self, source_selectable,
- dest_selectable,
- aliased,
- single_crit=None):
+ dest_selectable,
+ aliased,
+ single_crit=None):
"""Given a source and destination selectable, create a
join between them.
@@ -2556,8 +2599,8 @@ class JoinCondition(object):
# its internal structure remains fixed
# regardless of context.
dest_selectable = _shallow_annotate(
- dest_selectable,
- {'no_replacement_traverse': True})
+ dest_selectable,
+ {'no_replacement_traverse': True})
primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
self.secondaryjoin, self.secondary
@@ -2579,24 +2622,26 @@ class JoinCondition(object):
primary_aliasizer = ClauseAdapter(secondary)
secondary_aliasizer = \
ClauseAdapter(dest_selectable,
- equivalents=self.child_equivalents).\
- chain(primary_aliasizer)
+ equivalents=self.child_equivalents).\
+ chain(primary_aliasizer)
if source_selectable is not None:
primary_aliasizer = \
ClauseAdapter(secondary).\
- chain(ClauseAdapter(source_selectable,
+ chain(ClauseAdapter(
+ source_selectable,
equivalents=self.parent_equivalents))
secondaryjoin = \
secondary_aliasizer.traverse(secondaryjoin)
else:
- primary_aliasizer = ClauseAdapter(dest_selectable,
- exclude_fn=_ColInAnnotations("local"),
- equivalents=self.child_equivalents)
+ primary_aliasizer = ClauseAdapter(
+ dest_selectable,
+ exclude_fn=_ColInAnnotations("local"),
+ equivalents=self.child_equivalents)
if source_selectable is not None:
primary_aliasizer.chain(
ClauseAdapter(source_selectable,
- exclude_fn=_ColInAnnotations("remote"),
- equivalents=self.parent_equivalents))
+ exclude_fn=_ColInAnnotations("remote"),
+ equivalents=self.parent_equivalents))
secondary_aliasizer = None
primaryjoin = primary_aliasizer.traverse(primaryjoin)
@@ -2605,7 +2650,7 @@ class JoinCondition(object):
else:
target_adapter = None
return primaryjoin, secondaryjoin, secondary, \
- target_adapter, dest_selectable
+ target_adapter, dest_selectable
def create_lazy_clause(self, reverse_direction=False):
binds = util.column_dict()
@@ -2625,7 +2670,7 @@ class JoinCondition(object):
def col_to_bind(col):
if (reverse_direction and col in lookup) or \
- (not reverse_direction and "local" in col._annotations):
+ (not reverse_direction and "local" in col._annotations):
if col in lookup:
for tobind, equated in lookup[col]:
if equated in binds:
@@ -2639,13 +2684,13 @@ class JoinCondition(object):
lazywhere = self.primaryjoin
if self.secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
- lazywhere, {}, col_to_bind)
+ lazywhere, {}, col_to_bind)
if self.secondaryjoin is not None:
secondaryjoin = self.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.replacement_traverse(
- secondaryjoin, {}, col_to_bind)
+ secondaryjoin, {}, col_to_bind)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = dict((binds[col].key, col) for col in binds)
@@ -2655,11 +2700,13 @@ class JoinCondition(object):
return lazywhere, bind_to_col, equated_columns
+
class _ColInAnnotations(object):
"""Seralizable equivalent to:
lambda c: "name" in c._annotations
"""
+
def __init__(self, name):
self.name = name
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index e3be9ddae..71648d126 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -59,8 +59,8 @@ class scoped_session(object):
if scope is not None:
if self.registry.has():
raise sa_exc.InvalidRequestError(
- "Scoped session is already present; "
- "no new arguments may be specified.")
+ "Scoped session is already present; "
+ "no new arguments may be specified.")
else:
sess = self.session_factory(**kw)
self.registry.set(sess)
@@ -97,8 +97,8 @@ class scoped_session(object):
if self.registry.has():
warn('At least one scoped session is already present. '
- ' configure() can not affect sessions that have '
- 'already been created.')
+ ' configure() can not affect sessions that have '
+ 'already been created.')
self.session_factory.configure(**kwargs)
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 9ce988a12..036045dba 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -7,30 +7,31 @@
"""Provides the Session class and related utilities."""
-
import weakref
from .. import util, sql, engine, exc as sa_exc
from ..sql import util as sql_util, expression
from . import (
SessionExtension, attributes, exc, query,
loading, identity
- )
+)
from ..inspection import inspect
from .base import (
object_mapper, class_mapper,
_class_to_mapper, _state_mapper, object_state,
_none_set, state_str, instance_str
- )
+)
from .unitofwork import UOWTransaction
from . import state as statelib
import sys
-__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
+__all__ = ['Session', 'SessionTransaction',
+ 'SessionExtension', 'sessionmaker']
_sessions = weakref.WeakValueDictionary()
"""Weak-referencing dictionary of :class:`.Session` objects.
"""
+
def _state_session(state):
"""Given an :class:`.InstanceState`, return the :class:`.Session`
associated, if any.
@@ -43,7 +44,6 @@ def _state_session(state):
return None
-
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@@ -81,6 +81,7 @@ COMMITTED = util.symbol('COMMITTED')
DEACTIVE = util.symbol('DEACTIVE')
CLOSED = util.symbol('CLOSED')
+
class SessionTransaction(object):
"""A :class:`.Session`-level transaction.
@@ -185,20 +186,20 @@ class SessionTransaction(object):
return self.session is not None and self._state is ACTIVE
def _assert_active(self, prepared_ok=False,
- rollback_ok=False,
- deactive_ok=False,
- closed_msg="This transaction is closed"):
+ rollback_ok=False,
+ deactive_ok=False,
+ closed_msg="This transaction is closed"):
if self._state is COMMITTED:
raise sa_exc.InvalidRequestError(
- "This session is in 'committed' state; no further "
- "SQL can be emitted within this transaction."
- )
+ "This session is in 'committed' state; no further "
+ "SQL can be emitted within this transaction."
+ )
elif self._state is PREPARED:
if not prepared_ok:
raise sa_exc.InvalidRequestError(
- "This session is in 'prepared' state; no further "
- "SQL can be emitted within this transaction."
- )
+ "This session is in 'prepared' state; no further "
+ "SQL can be emitted within this transaction."
+ )
elif self._state is DEACTIVE:
if not deactive_ok and not rollback_ok:
if self._rollback_exception:
@@ -215,7 +216,7 @@ class SessionTransaction(object):
"This Session's transaction has been rolled back "
"by a nested rollback() call. To begin a new "
"transaction, issue Session.rollback() first."
- )
+ )
elif self._state is CLOSED:
raise sa_exc.ResourceClosedError(closed_msg)
@@ -240,7 +241,7 @@ class SessionTransaction(object):
if self._parent is None:
raise sa_exc.InvalidRequestError(
"Transaction %s is not on the active transaction list" % (
- upto))
+ upto))
return (self,) + self._parent._iterate_parents(upto)
def _take_snapshot(self):
@@ -274,7 +275,7 @@ class SessionTransaction(object):
for s in set(self._deleted).union(self.session._deleted):
if s.deleted:
- #assert s in self._deleted
+ # assert s in self._deleted
del s.deleted
self.session._update_impl(s, discard_existing=True)
@@ -326,7 +327,7 @@ class SessionTransaction(object):
transaction = conn.begin()
self._connections[conn] = self._connections[conn.engine] = \
- (conn, transaction, conn is not bind)
+ (conn, transaction, conn is not bind)
self.session.dispatch.after_begin(self.session, self, conn)
return conn
@@ -354,9 +355,9 @@ class SessionTransaction(object):
self.session.flush()
else:
raise exc.FlushError(
- "Over 100 subsequent flushes have occurred within "
- "session.commit() - is an after_flush() hook "
- "creating new objects?")
+ "Over 100 subsequent flushes have occurred within "
+ "session.commit() - is an after_flush() hook "
+ "creating new objects?")
if self._parent is None and self.session.twophase:
try:
@@ -410,9 +411,9 @@ class SessionTransaction(object):
# if items were added, deleted, or mutated
# here, we need to re-restore the snapshot
util.warn(
- "Session's state has been changed on "
- "a non-active transaction - this state "
- "will be discarded.")
+ "Session's state has been changed on "
+ "a non-active transaction - this state "
+ "will be discarded.")
self._restore_snapshot(dirty_only=self.nested)
self.close()
@@ -486,7 +487,7 @@ class Session(_SessionClassMethods):
'scalar')
def __init__(self, bind=None, autoflush=True, expire_on_commit=True,
- _enable_transaction_accounting=True,
+ _enable_transaction_accounting=True,
autocommit=False, twophase=False,
weak_identity_map=True, binds=None, extension=None,
info=None,
@@ -501,16 +502,16 @@ class Session(_SessionClassMethods):
.. warning::
- The autocommit flag is **not for general use**, and if it is used,
- queries should only be invoked within the span of a
- :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing
+ The autocommit flag is **not for general use**, and if it is
+ used, queries should only be invoked within the span of a
+ :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing
queries outside of a demarcated transaction is a legacy mode
of usage, and can in some cases lead to concurrent connection
checkouts.
Defaults to ``False``. When ``True``, the
- :class:`.Session` does not keep a persistent transaction running, and
- will acquire connections from the engine on an as-needed basis,
+ :class:`.Session` does not keep a persistent transaction running,
+ and will acquire connections from the engine on an as-needed basis,
returning them immediately after their use. Flushes will begin and
commit (or possibly rollback) their own transaction if no
transaction is present. When using this mode, the
@@ -525,8 +526,8 @@ class Session(_SessionClassMethods):
:meth:`~.Session.flush` call to this ``Session`` before proceeding.
This is a convenience feature so that :meth:`~.Session.flush` need
not be called repeatedly in order for database queries to retrieve
- results. It's typical that ``autoflush`` is used in conjunction with
- ``autocommit=False``. In this scenario, explicit calls to
+ results. It's typical that ``autoflush`` is used in conjunction
+ with ``autocommit=False``. In this scenario, explicit calls to
:meth:`~.Session.flush` are rarely needed; you usually only need to
call :meth:`~.Session.commit` (which flushes) to finalize changes.
@@ -542,8 +543,8 @@ class Session(_SessionClassMethods):
:class:`.Engine` or :class:`.Connection` objects. Operations which
proceed relative to a particular :class:`.Mapper` will consult this
dictionary for the direct :class:`.Mapper` instance as
- well as the mapper's ``mapped_table`` attribute in order to locate a
- connectable to use. The full resolution is described in the
+ well as the mapper's ``mapped_table`` attribute in order to locate
+ a connectable to use. The full resolution is described in the
:meth:`.Session.get_bind`.
Usage looks like::
@@ -566,8 +567,8 @@ class Session(_SessionClassMethods):
legacy-only flag which when ``False`` disables *all* 0.5-style
object accounting on transaction boundaries, including auto-expiry
of instances on rollback and commit, maintenance of the "new" and
- "deleted" lists upon rollback, and autoflush of pending changes upon
- :meth:`~.Session.begin`, all of which are interdependent.
+ "deleted" lists upon rollback, and autoflush of pending changes
+ upon :meth:`~.Session.begin`, all of which are interdependent.
:param expire_on_commit: Defaults to ``True``. When ``True``, all
instances will be fully expired after each :meth:`~.commit`,
@@ -581,25 +582,26 @@ class Session(_SessionClassMethods):
Please see :class:`.SessionEvents`.
:param info: optional dictionary of arbitrary data to be associated
- with this :class:`.Session`. Is available via the :attr:`.Session.info`
- attribute. Note the dictionary is copied at construction time so
- that modifications to the per-:class:`.Session` dictionary will be local
- to that :class:`.Session`.
+ with this :class:`.Session`. Is available via the
+ :attr:`.Session.info` attribute. Note the dictionary is copied at
+ construction time so that modifications to the per-
+ :class:`.Session` dictionary will be local to that
+ :class:`.Session`.
.. versionadded:: 0.9.0
:param query_cls: Class which should be used to create new Query
- objects, as returned by the :meth:`~.Session.query` method. Defaults
- to :class:`.Query`.
+ objects, as returned by the :meth:`~.Session.query` method.
+ Defaults to :class:`.Query`.
:param twophase: When ``True``, all transactions will be started as
a "two phase" transaction, i.e. using the "two phase" semantics
of the database in use along with an XID. During a
:meth:`~.commit`, after :meth:`~.flush` has been issued for all
- attached databases, the :meth:`~.TwoPhaseTransaction.prepare` method
- on each database's :class:`.TwoPhaseTransaction` will be called.
- This allows each database to roll back the entire transaction,
- before each transaction is committed.
+ attached databases, the :meth:`~.TwoPhaseTransaction.prepare`
+ method on each database's :class:`.TwoPhaseTransaction` will be
+ called. This allows each database to roll back the entire
+ transaction, before each transaction is committed.
:param weak_identity_map: Defaults to ``True`` - when set to
``False``, objects placed in the :class:`.Session` will be
@@ -613,7 +615,7 @@ class Session(_SessionClassMethods):
self._identity_cls = identity.WeakInstanceDict
else:
util.warn_deprecated("weak_identity_map=False is deprecated. "
- "This feature is not needed.")
+ "This feature is not needed.")
self._identity_cls = identity.StrongInstanceDict
self.identity_map = self._identity_cls()
@@ -648,7 +650,6 @@ class Session(_SessionClassMethods):
else:
assert False
-
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
@@ -693,7 +694,7 @@ class Session(_SessionClassMethods):
if self.transaction is not None:
if subtransactions or nested:
self.transaction = self.transaction._begin(
- nested=nested)
+ nested=nested)
else:
raise sa_exc.InvalidRequestError(
"A transaction is already begun. Use "
@@ -791,9 +792,9 @@ class Session(_SessionClassMethods):
self.transaction.prepare()
def connection(self, mapper=None, clause=None,
- bind=None,
- close_with_result=False,
- **kw):
+ bind=None,
+ close_with_result=False,
+ **kw):
"""Return a :class:`.Connection` object corresponding to this
:class:`.Session` object's transactional state.
@@ -830,12 +831,12 @@ class Session(_SessionClassMethods):
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
- :param close_with_result: Passed to :meth:`.Engine.connect`, indicating
- the :class:`.Connection` should be considered "single use",
- automatically closing when the first result set is closed. This
- flag only has an effect if this :class:`.Session` is configured with
- ``autocommit=True`` and does not already have a transaction
- in progress.
+ :param close_with_result: Passed to :meth:`.Engine.connect`,
+ indicating the :class:`.Connection` should be considered
+ "single use", automatically closing when the first result set is
+ closed. This flag only has an effect if this :class:`.Session` is
+ configured with ``autocommit=True`` and does not already have a
+ transaction in progress.
:param \**kw:
Additional keyword arguments are sent to :meth:`get_bind()`,
@@ -847,7 +848,7 @@ class Session(_SessionClassMethods):
bind = self.get_bind(mapper, clause=clause, **kw)
return self._connection_for_bind(bind,
- close_with_result=close_with_result)
+ close_with_result=close_with_result)
def _connection_for_bind(self, engine, **kwargs):
if self.transaction is not None:
@@ -870,8 +871,8 @@ class Session(_SessionClassMethods):
user_table.select().where(user_table.c.id == 5)
)
- :meth:`~.Session.execute` accepts any executable clause construct, such
- as :func:`~.sql.expression.select`,
+ :meth:`~.Session.execute` accepts any executable clause construct,
+ such as :func:`~.sql.expression.select`,
:func:`~.sql.expression.insert`,
:func:`~.sql.expression.update`,
:func:`~.sql.expression.delete`, and
@@ -900,7 +901,8 @@ class Session(_SessionClassMethods):
cursor's ``execute()`` or ``executemany()`` is used to execute the
statement. An INSERT construct may be invoked for a single row::
- result = session.execute(users.insert(), {"id": 7, "name": "somename"})
+ result = session.execute(
+ users.insert(), {"id": 7, "name": "somename"})
or for multiple rows::
@@ -918,8 +920,9 @@ class Session(_SessionClassMethods):
:class:`.Connection`, which in the average case is derived directly
from the "bind" of the :class:`.Session` itself, and in other cases
can be based on the :func:`.mapper`
- and :class:`.Table` objects passed to the method; see the documentation
- for :meth:`.Session.get_bind` for a full description of this scheme.
+ and :class:`.Table` objects passed to the method; see the
+ documentation for :meth:`.Session.get_bind` for a full description of
+ this scheme.
The :meth:`.Session.execute` method does *not* invoke autoflush.
@@ -981,8 +984,8 @@ class Session(_SessionClassMethods):
if bind is None:
bind = self.get_bind(mapper, clause=clause, **kw)
- return self._connection_for_bind(bind, close_with_result=True).execute(
- clause, params or {})
+ return self._connection_for_bind(
+ bind, close_with_result=True).execute(clause, params or {})
def scalar(self, clause, params=None, mapper=None, bind=None, **kw):
"""Like :meth:`~.Session.execute` but return a scalar result."""
@@ -1150,7 +1153,7 @@ class Session(_SessionClassMethods):
raise sa_exc.UnboundExecutionError(
"Could not locate a bind configured on %s or this Session" % (
- ', '.join(context)))
+ ', '.join(context)))
def query(self, *entities, **kwargs):
"""Return a new :class:`.Query` object corresponding to this
@@ -1196,9 +1199,9 @@ class Session(_SessionClassMethods):
# with code that catches StatementError, IntegrityError,
# etc.
e.add_detail(
- "raised as a result of Query-invoked autoflush; "
- "consider using a session.no_autoflush block if this "
- "flush is occurring prematurely")
+ "raised as a result of Query-invoked autoflush; "
+ "consider using a session.no_autoflush block if this "
+ "flush is occurring prematurely")
util.raise_from_cause(e)
def refresh(self, instance, attribute_names=None, lockmode=None):
@@ -1335,7 +1338,7 @@ class Session(_SessionClassMethods):
# pre-fetch the full cascade since the expire is going to
# remove associations
cascaded = list(state.manager.mapper.cascade_iterator(
- 'refresh-expire', state))
+ 'refresh-expire', state))
self._conditional_expire(state)
for o, m, st_, dct_ in cascaded:
self._conditional_expire(st_)
@@ -1350,7 +1353,7 @@ class Session(_SessionClassMethods):
state._detach()
@util.deprecated("0.7", "The non-weak-referencing identity map "
- "feature is no longer needed.")
+ "feature is no longer needed.")
def prune(self):
"""Remove unreferenced instances cached in the identity map.
@@ -1381,7 +1384,7 @@ class Session(_SessionClassMethods):
state_str(state))
cascaded = list(state.manager.mapper.cascade_iterator(
- 'expunge', state))
+ 'expunge', state))
self._expunge_state(state)
for o, m, st_, dct_ in cascaded:
self._expunge_state(st_)
@@ -1408,8 +1411,8 @@ class Session(_SessionClassMethods):
instance_key = mapper._identity_key_from_state(state)
if _none_set.intersection(instance_key[1]) and \
- not mapper.allow_partial_pks or \
- _none_set.issuperset(instance_key[1]):
+ not mapper.allow_partial_pks or \
+ _none_set.issuperset(instance_key[1]):
raise exc.FlushError(
"Instance %s has a NULL identity key. If this is an "
"auto-generated value, check that the database table "
@@ -1499,9 +1502,9 @@ class Session(_SessionClassMethods):
mapper = _state_mapper(state)
for o, m, st_, dct_ in mapper.cascade_iterator(
- 'save-update',
- state,
- halt_on=self._contains_state):
+ 'save-update',
+ state,
+ halt_on=self._contains_state):
self._save_or_update_impl(st_)
def delete(self, instance):
@@ -1535,7 +1538,7 @@ class Session(_SessionClassMethods):
# so that autoflush does not delete the item
# the strong reference to the instance itself is significant here
cascade_states = list(state.manager.mapper.cascade_iterator(
- 'delete', state))
+ 'delete', state))
self._deleted[state] = state.obj()
self.identity_map.add(state)
@@ -1552,10 +1555,10 @@ class Session(_SessionClassMethods):
same primary key in the session. If not found locally, it attempts
to load the object from the database based on primary key, and if
none can be located, creates a new instance. The state of each
- attribute on the source instance is then copied to the target instance.
- The resulting target instance is then returned by the method; the
- original source instance is left unmodified, and un-associated with the
- :class:`.Session` if not already.
+ attribute on the source instance is then copied to the target
+ instance. The resulting target instance is then returned by the
+ method; the original source instance is left unmodified, and
+ un-associated with the :class:`.Session` if not already.
This operation cascades to associated instances if the association is
mapped with ``cascade="merge"``.
@@ -1583,7 +1586,8 @@ class Session(_SessionClassMethods):
any existing related objects or collections that might not
be loaded. The resulting objects from ``load=False`` are always
produced as "clean", so it is only appropriate that the given objects
- should be "clean" as well, else this suggests a mis-use of the method.
+ should be "clean" as well, else this suggests a mis-use of the
+ method.
"""
@@ -1601,9 +1605,9 @@ class Session(_SessionClassMethods):
try:
self.autoflush = False
return self._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive=_recursive)
+ attributes.instance_state(instance),
+ attributes.instance_dict(instance),
+ load=load, _recursive=_recursive)
finally:
self.autoflush = autoflush
@@ -1640,8 +1644,8 @@ class Session(_SessionClassMethods):
new_instance = True
elif not _none_set.intersection(key[1]) or \
- (mapper.allow_partial_pks and
- not _none_set.issuperset(key[1])):
+ (mapper.allow_partial_pks and
+ not _none_set.issuperset(key[1])):
merged = self.query(mapper.class_).get(key[1])
else:
merged = None
@@ -1664,38 +1668,38 @@ class Session(_SessionClassMethods):
# version check if applicable
if mapper.version_id_col is not None:
existing_version = mapper._get_state_attr_by_column(
- state,
- state_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ state_dict,
+ mapper.version_id_col,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
merged_version = mapper._get_state_attr_by_column(
- merged_state,
- merged_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ merged_state,
+ merged_dict,
+ mapper.version_id_col,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if existing_version is not attributes.PASSIVE_NO_RESULT and \
- merged_version is not attributes.PASSIVE_NO_RESULT and \
- existing_version != merged_version:
+ merged_version is not attributes.PASSIVE_NO_RESULT and \
+ existing_version != merged_version:
raise exc.StaleDataError(
- "Version id '%s' on merged state %s "
- "does not match existing version '%s'. "
- "Leave the version attribute unset when "
- "merging to update the most recent version."
- % (
- existing_version,
- state_str(merged_state),
- merged_version
- ))
+ "Version id '%s' on merged state %s "
+ "does not match existing version '%s'. "
+ "Leave the version attribute unset when "
+ "merging to update the most recent version."
+ % (
+ existing_version,
+ state_str(merged_state),
+ merged_version
+ ))
merged_state.load_path = state.load_path
merged_state.load_options = state.load_options
for prop in mapper.iterate_properties:
prop.merge(self, state, state_dict,
- merged_state, merged_dict,
- load, _recursive)
+ merged_state, merged_dict,
+ load, _recursive)
if not load:
# remove any history
@@ -1714,8 +1718,8 @@ class Session(_SessionClassMethods):
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
- "Object '%s' already has an identity - it can't be registered "
- "as pending" % state_str(state))
+ "Object '%s' already has an identity - "
+ "it can't be registered as pending" % state_str(state))
self._before_attach(state)
if state not in self._new:
@@ -1725,7 +1729,7 @@ class Session(_SessionClassMethods):
def _update_impl(self, state, discard_existing=False):
if (self.identity_map.contains_state(state) and
- state not in self._deleted):
+ state not in self._deleted):
return
if state.key is None:
@@ -1791,8 +1795,8 @@ class Session(_SessionClassMethods):
is what was already loaded from a foreign-key-holding value.
The :meth:`.Session.enable_relationship_loading` method is
- similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike
- that flag, :meth:`.Session.enable_relationship_loading` allows
+ similar to the ``load_on_pending`` flag on :func:`.relationship`.
+ Unlike that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
@@ -1828,10 +1832,10 @@ class Session(_SessionClassMethods):
if state.key and \
state.key in self.identity_map and \
not self.identity_map.contains_state(state):
- raise sa_exc.InvalidRequestError("Can't attach instance "
- "%s; another instance with key %s is already "
- "present in this session."
- % (state_str(state), state.key))
+ raise sa_exc.InvalidRequestError(
+ "Can't attach instance "
+ "%s; another instance with key %s is already "
+ "present in this session." % (state_str(state), state.key))
if state.session_id and \
state.session_id is not self.hash_key and \
@@ -1869,7 +1873,8 @@ class Session(_SessionClassMethods):
Session.
"""
- return iter(list(self._new.values()) + list(self.identity_map.values()))
+ return iter(
+ list(self._new.values()) + list(self.identity_map.values()))
def _contains_state(self, state):
return state in self._new or self.identity_map.contains_state(state)
@@ -1922,8 +1927,8 @@ class Session(_SessionClassMethods):
def _is_clean(self):
return not self.identity_map.check_modified() and \
- not self._deleted and \
- not self._new
+ not self._deleted and \
+ not self._new
def _flush(self, objects=None):
@@ -2002,21 +2007,21 @@ class Session(_SessionClassMethods):
len_ = len(self.identity_map._modified)
statelib.InstanceState._commit_all_states(
- [(state, state.dict) for state in
- self.identity_map._modified],
- instance_dict=self.identity_map)
+ [(state, state.dict) for state in
+ self.identity_map._modified],
+ instance_dict=self.identity_map)
util.warn("Attribute history events accumulated on %d "
- "previously clean instances "
- "within inner-flush event handlers have been reset, "
- "and will not result in database updates. "
- "Consider using set_committed_value() within "
- "inner-flush event handlers to avoid this warning."
- % len_)
+ "previously clean instances "
+ "within inner-flush event handlers have been "
+ "reset, and will not result in database updates. "
+ "Consider using set_committed_value() within "
+ "inner-flush event handlers to avoid this warning."
+ % len_)
# useful assertions:
- #if not objects:
+ # if not objects:
# assert not self.identity_map._modified
- #else:
+ # else:
# assert self.identity_map._modified == \
# self.identity_map._modified.difference(objects)
@@ -2029,7 +2034,7 @@ class Session(_SessionClassMethods):
transaction.rollback(_capture_exception=True)
def is_modified(self, instance, include_collections=True,
- passive=True):
+ passive=True):
"""Return ``True`` if the given instance has locally
modified attributes.
@@ -2057,12 +2062,12 @@ class Session(_SessionClassMethods):
A few caveats to this method apply:
- * Instances present in the :attr:`.Session.dirty` collection may report
- ``False`` when tested with this method. This is because
- the object may have received change events via attribute
- mutation, thus placing it in :attr:`.Session.dirty`,
- but ultimately the state is the same as that loaded from
- the database, resulting in no net change here.
+ * Instances present in the :attr:`.Session.dirty` collection may
+ report ``False`` when tested with this method. This is because
+ the object may have received change events via attribute mutation,
+ thus placing it in :attr:`.Session.dirty`, but ultimately the state
+ is the same as that loaded from the database, resulting in no net
+ change here.
* Scalar attributes may not have recorded the previously set
value when a new value was applied, if the attribute was not loaded,
or was expired, at the time the new value was received - in these
@@ -2103,15 +2108,15 @@ class Session(_SessionClassMethods):
for attr in state.manager.attributes:
if \
- (
- not include_collections and
- hasattr(attr.impl, 'get_collection')
- ) or not hasattr(attr.impl, 'get_history'):
+ (
+ not include_collections and
+ hasattr(attr.impl, 'get_collection')
+ ) or not hasattr(attr.impl, 'get_history'):
continue
(added, unchanged, deleted) = \
- attr.impl.get_history(state, dict_,
- passive=attributes.NO_CHANGE)
+ attr.impl.get_history(state, dict_,
+ passive=attributes.NO_CHANGE)
if added or deleted:
return True
@@ -2148,8 +2153,8 @@ class Session(_SessionClassMethods):
call :meth:`.Session.rollback`, in order to close out the
transaction stack. It is in this "partial rollback" period that the
:attr:`.is_active` flag returns False. After the call to
- :meth:`.Session.rollback`, the :class:`.SessionTransaction` is replaced
- with a new one and :attr:`.is_active` returns ``True`` again.
+ :meth:`.Session.rollback`, the :class:`.SessionTransaction` is
+ replaced with a new one and :attr:`.is_active` returns ``True`` again.
When a :class:`.Session` is used in ``autocommit=True`` mode, the
:class:`.SessionTransaction` is only instantiated within the scope
@@ -2289,9 +2294,9 @@ class sessionmaker(_SessionClassMethods):
"""
def __init__(self, bind=None, class_=Session, autoflush=True,
- autocommit=False,
- expire_on_commit=True,
- info=None, **kw):
+ autocommit=False,
+ expire_on_commit=True,
+ info=None, **kw):
"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
@@ -2315,8 +2320,8 @@ class sessionmaker(_SessionClassMethods):
.. versionadded:: 0.9.0
- :param \**kw: all other keyword arguments are passed to the constructor
- of newly created :class:`.Session` objects.
+ :param \**kw: all other keyword arguments are passed to the
+ constructor of newly created :class:`.Session` objects.
"""
kw['bind'] = bind
@@ -2363,10 +2368,10 @@ class sessionmaker(_SessionClassMethods):
def __repr__(self):
return "%s(class_=%r,%s)" % (
- self.__class__.__name__,
- self.class_.__name__,
- ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
- )
+ self.__class__.__name__,
+ self.class_.__name__,
+ ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
+ )
def make_transient(instance):
@@ -2397,6 +2402,7 @@ def make_transient(instance):
if state.deleted:
del state.deleted
+
def make_transient_to_detached(instance):
"""Make the given transient instance 'detached'.
@@ -2424,7 +2430,7 @@ def make_transient_to_detached(instance):
state = attributes.instance_state(instance)
if state.session_id or state.key:
raise sa_exc.InvalidRequestError(
- "Given object must be transient")
+ "Given object must be transient")
state.key = state.mapper._identity_key_from_state(state)
if state.deleted:
del state.deleted
@@ -2432,7 +2438,6 @@ def make_transient_to_detached(instance):
state._expire_attributes(state.dict, state.unloaded)
-
def object_session(instance):
"""Return the ``Session`` to which instance belongs.
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 573e6515d..a9024b468 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -17,9 +17,10 @@ from .. import util
from . import exc as orm_exc, interfaces
from .path_registry import PathRegistry
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
- NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
+ NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
from . import base
+
class InstanceState(interfaces._InspectionAttr):
"""tracks state information at the instance level.
@@ -276,8 +277,8 @@ class InstanceState(interfaces._InspectionAttr):
state_dict = {'instance': self.obj()}
state_dict.update(
(k, self.__dict__[k]) for k in (
- 'committed_state', '_pending_mutations', 'modified', 'expired',
- 'callables', 'key', 'parents', 'load_options',
+ 'committed_state', '_pending_mutations', 'modified',
+ 'expired', 'callables', 'key', 'parents', 'load_options',
'class_',
) if k in self.__dict__
)
@@ -315,7 +316,7 @@ class InstanceState(interfaces._InspectionAttr):
if 'load_path' in state_dict:
self.load_path = PathRegistry.\
- deserialize(state_dict['load_path'])
+ deserialize(state_dict['load_path'])
state_dict['manager'](self, inst, state_dict)
@@ -418,7 +419,7 @@ class InstanceState(interfaces._InspectionAttr):
return PASSIVE_NO_RESULT
toload = self.expired_attributes.\
- intersection(self.unmodified)
+ intersection(self.unmodified)
self.manager.deferred_scalar_loader(self, toload)
@@ -441,7 +442,7 @@ class InstanceState(interfaces._InspectionAttr):
"""Return self.unmodified.intersection(keys)."""
return set(keys).intersection(self.manager).\
- difference(self.committed_state)
+ difference(self.committed_state)
@property
def unloaded(self):
@@ -452,15 +453,15 @@ class InstanceState(interfaces._InspectionAttr):
"""
return set(self.manager).\
- difference(self.committed_state).\
- difference(self.dict)
+ difference(self.committed_state).\
+ difference(self.dict)
@property
def _unloaded_non_object(self):
return self.unloaded.intersection(
- attr for attr in self.manager
- if self.manager[attr].impl.accepts_scalar_loader
- )
+ attr for attr in self.manager
+ if self.manager[attr].impl.accepts_scalar_loader
+ )
@property
def expired_attributes(self):
@@ -477,7 +478,8 @@ class InstanceState(interfaces._InspectionAttr):
def _instance_dict(self):
return None
- def _modified_event(self, dict_, attr, previous, collection=False, force=False):
+ def _modified_event(
+ self, dict_, attr, previous, collection=False, force=False):
if not attr.send_modified_events:
return
if attr.key not in self.committed_state or force:
@@ -508,13 +510,13 @@ class InstanceState(interfaces._InspectionAttr):
if inst is None:
raise orm_exc.ObjectDereferencedError(
- "Can't emit change event for attribute '%s' - "
- "parent object of type %s has been garbage "
- "collected."
- % (
- self.manager[attr.key],
- base.state_class_str(self)
- ))
+ "Can't emit change event for attribute '%s' - "
+ "parent object of type %s has been garbage "
+ "collected."
+ % (
+ self.manager[attr.key],
+ base.state_class_str(self)
+ ))
self.modified = True
def _commit(self, dict_, keys):
@@ -533,8 +535,8 @@ class InstanceState(interfaces._InspectionAttr):
self.expired = False
for key in set(self.callables).\
- intersection(keys).\
- intersection(dict_):
+ intersection(keys).\
+ intersection(dict_):
del self.callables[key]
def _commit_all(self, dict_, instance_dict=None):
@@ -617,7 +619,7 @@ class AttributeState(object):
"""
return self.state.manager[self.key].__get__(
- self.state.obj(), self.state.class_)
+ self.state.obj(), self.state.class_)
@property
def history(self):
@@ -636,7 +638,7 @@ class AttributeState(object):
"""
return self.state.get_history(self.key,
- PASSIVE_NO_INITIALIZE)
+ PASSIVE_NO_INITIALIZE)
def load_history(self):
"""Return the current pre-flush change history for
@@ -655,8 +657,7 @@ class AttributeState(object):
"""
return self.state.get_history(self.key,
- PASSIVE_OFF ^ INIT_OK)
-
+ PASSIVE_OFF ^ INIT_OK)
class PendingCollection(object):
@@ -667,6 +668,7 @@ class PendingCollection(object):
PendingCollection are applied to it to produce the final result.
"""
+
def __init__(self):
self.deleted_items = util.IdentitySet()
self.added_items = util.OrderedIdentitySet()
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index 28130dab5..392f7cec2 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -16,17 +16,20 @@ from .. import exc as sa_exc, inspect
from .base import _is_aliased_class, _class_to_mapper
from . import util as orm_util
from .path_registry import PathRegistry, TokenRegistry, \
- _WILDCARD_TOKEN, _DEFAULT_TOKEN
+ _WILDCARD_TOKEN, _DEFAULT_TOKEN
+
class Load(Generative, MapperOption):
"""Represents loader options which modify the state of a
- :class:`.Query` in order to affect how various mapped attributes are loaded.
+ :class:`.Query` in order to affect how various mapped attributes are
+ loaded.
.. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
the existing system of loader options, including options such as
- :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular,
- it introduces a new method-chained system that replaces the need for
- dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`.
+ :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In
+ particular, it introduces a new method-chained system that replaces the
+ need for dot-separated paths as well as "_all()" options such as
+ :func:`.orm.joinedload_all`.
A :class:`.Load` object can be used directly or indirectly. To use one
directly, instantiate given the parent class. This style of usage is
@@ -41,11 +44,12 @@ class Load(Generative, MapperOption):
session.query(MyClass).options(myopt)
The :class:`.Load` construct is invoked indirectly whenever one makes use
- of the various loader options that are present in ``sqlalchemy.orm``, including
- options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`,
- and all the rest. These constructs produce an "anonymous" form of the
- :class:`.Load` object which tracks attributes and options, but is not linked
- to a parent class until it is associated with a parent :class:`.Query`::
+ of the various loader options that are present in ``sqlalchemy.orm``,
+ including options such as :func:`.orm.joinedload`, :func:`.orm.defer`,
+ :func:`.orm.subqueryload`, and all the rest. These constructs produce an
+ "anonymous" form of the :class:`.Load` object which tracks attributes and
+ options, but is not linked to a parent class until it is associated with a
+ parent :class:`.Query`::
# produce "unbound" Load object
myopt = joinedload("widgets")
@@ -55,11 +59,12 @@ class Load(Generative, MapperOption):
session.query(MyClass).options(myopt)
Whether the direct or indirect style is used, the :class:`.Load` object
- returned now represents a specific "path" along the entities of a :class:`.Query`.
- This path can be traversed using a standard method-chaining approach.
- Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``,
- ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety
- of loader options along each element in the "path"::
+ returned now represents a specific "path" along the entities of a
+ :class:`.Query`. This path can be traversed using a standard
+ method-chaining approach. Supposing a class hierarchy such as ``User``,
+ ``User.addresses -> Address``, ``User.orders -> Order`` and
+ ``Order.items -> Item``, we can specify a variety of loader options along
+ each element in the "path"::
session.query(User).options(
joinedload("addresses"),
@@ -67,11 +72,12 @@ class Load(Generative, MapperOption):
)
Where above, the ``addresses`` collection will be joined-loaded, the
- ``orders`` collection will be subquery-loaded, and within that subquery load
- the ``items`` collection will be joined-loaded.
+ ``orders`` collection will be subquery-loaded, and within that subquery
+ load the ``items`` collection will be joined-loaded.
"""
+
def __init__(self, entity):
insp = inspect(entity)
self.path = insp._path_registry
@@ -106,7 +112,7 @@ class Load(Generative, MapperOption):
if raiseerr and not path.has_entity:
if isinstance(path, TokenRegistry):
raise sa_exc.ArgumentError(
- "Wildcard token cannot be followed by another entity")
+ "Wildcard token cannot be followed by another entity")
else:
raise sa_exc.ArgumentError(
"Attribute '%s' of entity '%s' does not "
@@ -145,8 +151,9 @@ class Load(Generative, MapperOption):
if not prop.parent.common_parent(path.mapper):
if raiseerr:
- raise sa_exc.ArgumentError("Attribute '%s' does not "
- "link from element '%s'" % (attr, path.entity))
+ raise sa_exc.ArgumentError(
+ "Attribute '%s' does not "
+ "link from element '%s'" % (attr, path.entity))
else:
return None
@@ -157,11 +164,11 @@ class Load(Generative, MapperOption):
path_element = ext_info.mapper
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
- ext_info.mapper.base_mapper,
- ext_info.mapper, aliased=True,
- _use_mapper_path=True)
- path.entity_path[prop].set(self.context,
- "path_with_polymorphic", inspect(ac))
+ ext_info.mapper.base_mapper,
+ ext_info.mapper, aliased=True,
+ _use_mapper_path=True)
+ path.entity_path[prop].set(
+ self.context, "path_with_polymorphic", inspect(ac))
path = path[prop][path_element]
else:
path = path[prop]
@@ -176,7 +183,8 @@ class Load(Generative, MapperOption):
return strategy
@_generative
- def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True):
+ def set_relationship_strategy(
+ self, attr, strategy, propagate_to_loaders=True):
strategy = self._coerce_strat(strategy)
self.propagate_to_loaders = propagate_to_loaders
@@ -225,14 +233,15 @@ class Load(Generative, MapperOption):
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
- elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key:
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \
+ c_token != p_token.key:
return None
if c_token is p_token:
continue
else:
return None
- return to_chop[i+1:]
+ return to_chop[i + 1:]
class _UnboundLoad(Load):
@@ -245,6 +254,7 @@ class _UnboundLoad(Load):
of freestanding options, e.g. ``joinedload('x.y.z')``.
"""
+
def __init__(self):
self.path = ()
self._to_bind = set()
@@ -318,14 +328,15 @@ class _UnboundLoad(Load):
return opt
-
def _chop_path(self, to_chop, path):
i = -1
- for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())):
+ for i, (c_token, (p_mapper, p_prop)) in enumerate(
+ zip(to_chop, path.pairs())):
if isinstance(c_token, util.string_types):
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
- elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key:
+ elif c_token != 'relationship:%s' % (
+ _WILDCARD_TOKEN,) and c_token != p_prop.key:
return None
elif isinstance(c_token, PropComparator):
if c_token.property is not p_prop:
@@ -335,7 +346,6 @@ class _UnboundLoad(Load):
return to_chop[i:]
-
def _bind_loader(self, query, context, raiseerr):
start_path = self.path
# _current_path implies we're in a
@@ -354,15 +364,15 @@ class _UnboundLoad(Load):
elif isinstance(token, PropComparator):
prop = token.property
entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token._parententity,
- raiseerr)
+ query,
+ prop.key,
+ token._parententity,
+ raiseerr)
else:
raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
+ "mapper option expects "
+ "string key or list of attributes")
if not entity:
return
@@ -378,7 +388,7 @@ class _UnboundLoad(Load):
path = loader.path
for token in start_path:
loader.path = path = loader._generate_path(
- loader.path, token, None, raiseerr)
+ loader.path, token, None, raiseerr)
if path is None:
return
@@ -390,8 +400,8 @@ class _UnboundLoad(Load):
effective_path = loader.path
# prioritize "first class" options over those
- # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z")
- # versus someload("x") / someload("x.y")
+ # that were "links in the chain", e.g. "x" and "y" in
+ # someload("x.y.z") versus someload("x") / someload("x.y")
if self._is_chain_link:
effective_path.setdefault(context, "loader", loader)
else:
@@ -411,7 +421,7 @@ class _UnboundLoad(Load):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
- % (token, )
+ % (token, )
)
else:
raise sa_exc.ArgumentError(
@@ -419,7 +429,7 @@ class _UnboundLoad(Load):
"specified in this Query. Note the full path "
"from root (%s) to target entity must be specified."
% (token, ",".join(str(x) for
- x in query._mapper_entities))
+ x in query._mapper_entities))
)
else:
return None
@@ -429,9 +439,9 @@ class _UnboundLoad(Load):
if len(list(query._mapper_entities)) != 1:
if raiseerr:
raise sa_exc.ArgumentError(
- "Wildcard loader can only be used with exactly "
- "one entity. Use Load(ent) to specify "
- "specific entities.")
+ "Wildcard loader can only be used with exactly "
+ "one entity. Use Load(ent) to specify "
+ "specific entities.")
elif token.endswith(_DEFAULT_TOKEN):
raiseerr = False
@@ -445,13 +455,12 @@ class _UnboundLoad(Load):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
- % (token, )
+ % (token, )
)
else:
return None
-
class loader_option(object):
def __init__(self):
pass
@@ -493,6 +502,7 @@ See :func:`.orm.%(name)s` for usage examples.
""" % {"name": self.name}
return self
+
@loader_option()
def contains_eager(loadopt, attr, alias=None):
"""Indicate that the given attribute should be eagerly loaded from
@@ -533,16 +543,19 @@ def contains_eager(loadopt, attr, alias=None):
alias = info.selectable
cloned = loadopt.set_relationship_strategy(
- attr,
- {"lazy": "joined"},
- propagate_to_loaders=False
- )
+ attr,
+ {"lazy": "joined"},
+ propagate_to_loaders=False
+ )
cloned.local_opts['eager_from_alias'] = alias
return cloned
+
@contains_eager._add_unbound_fn
def contains_eager(*keys, **kw):
- return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw)
+ return _UnboundLoad()._from_keys(
+ _UnboundLoad.contains_eager, keys, True, kw)
+
@loader_option()
def load_only(loadopt, *attrs):
@@ -559,8 +572,8 @@ def load_only(loadopt, *attrs):
session.query(User).options(load_only("name", "fullname"))
Example - given a relationship ``User.addresses -> Address``, specify
- subquery loading for the ``User.addresses`` collection, but on each ``Address``
- object load only the ``email_address`` attribute::
+ subquery loading for the ``User.addresses`` collection, but on each
+ ``Address`` object load only the ``email_address`` attribute::
session.query(User).options(
subqueryload("addreses").load_only("email_address")
@@ -579,18 +592,20 @@ def load_only(loadopt, *attrs):
"""
cloned = loadopt.set_column_strategy(
- attrs,
- {"deferred": False, "instrument": True}
- )
+ attrs,
+ {"deferred": False, "instrument": True}
+ )
cloned.set_column_strategy("*",
- {"deferred": True, "instrument": True},
- {"undefer_pks": True})
+ {"deferred": True, "instrument": True},
+ {"undefer_pks": True})
return cloned
+
@load_only._add_unbound_fn
def load_only(*attrs):
return _UnboundLoad().load_only(*attrs)
+
@loader_option()
def joinedload(loadopt, attr, innerjoin=None):
"""Indicate that the given attribute should be loaded using joined
@@ -618,22 +633,25 @@ def joinedload(loadopt, attr, innerjoin=None):
If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
``innerjoin=True`` will be bypassed and the join will continue to
- chain as LEFT OUTER JOIN so that the results don't change. As an alternative,
- specify the value ``"nested"``. This will instead nest the join
- on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)".
+ chain as LEFT OUTER JOIN so that the results don't change. As an
+ alternative, specify the value ``"nested"``. This will instead nest the
+ join on the right side, e.g. using the form "a LEFT OUTER JOIN
+ (b JOIN c)".
.. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
nesting of eager "inner" joins.
.. note::
- The joins produced by :func:`.orm.joinedload` are **anonymously aliased**.
- The criteria by which the join proceeds cannot be modified, nor can the
- :class:`.Query` refer to these joins in any way, including ordering.
+ The joins produced by :func:`.orm.joinedload` are **anonymously
+ aliased**. The criteria by which the join proceeds cannot be
+ modified, nor can the :class:`.Query` refer to these joins in any way,
+ including ordering.
To produce a specific SQL JOIN which is explicitly available, use
:meth:`.Query.join`. To combine explicit JOINs with eager loading
- of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`.
+ of collections, use :func:`.orm.contains_eager`; see
+ :ref:`contains_eager`.
.. seealso::
@@ -647,8 +665,8 @@ def joinedload(loadopt, attr, innerjoin=None):
:paramref:`.relationship.lazy`
- :paramref:`.relationship.innerjoin` - :func:`.relationship`-level version
- of the :paramref:`.joinedload.innerjoin` option.
+ :paramref:`.relationship.innerjoin` - :func:`.relationship`-level
+ version of the :paramref:`.joinedload.innerjoin` option.
"""
loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
@@ -656,15 +674,17 @@ def joinedload(loadopt, attr, innerjoin=None):
loader.local_opts['innerjoin'] = innerjoin
return loader
+
@joinedload._add_unbound_fn
def joinedload(*keys, **kw):
return _UnboundLoad._from_keys(
- _UnboundLoad.joinedload, keys, False, kw)
+ _UnboundLoad.joinedload, keys, False, kw)
+
@joinedload._add_unbound_all_fn
def joinedload_all(*keys, **kw):
return _UnboundLoad._from_keys(
- _UnboundLoad.joinedload, keys, True, kw)
+ _UnboundLoad.joinedload, keys, True, kw)
@loader_option()
@@ -701,14 +721,17 @@ def subqueryload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
+
@subqueryload._add_unbound_fn
def subqueryload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
+
@subqueryload._add_unbound_all_fn
def subqueryload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
+
@loader_option()
def lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
@@ -724,14 +747,17 @@ def lazyload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
+
@lazyload._add_unbound_fn
def lazyload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
+
@lazyload._add_unbound_all_fn
def lazyload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
+
@loader_option()
def immediateload(loadopt, attr):
"""Indicate that the given attribute should be loaded using
@@ -754,9 +780,11 @@ def immediateload(loadopt, attr):
loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
return loader
+
@immediateload._add_unbound_fn
def immediateload(*keys):
- return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.immediateload, keys, False, {})
@loader_option()
@@ -773,10 +801,12 @@ def noload(loadopt, attr):
return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
+
@noload._add_unbound_fn
def noload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
+
@loader_option()
def defaultload(loadopt, attr):
"""Indicate an attribute should load using its default loader style.
@@ -797,14 +827,16 @@ def defaultload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(
- attr,
- None
- )
+ attr,
+ None
+ )
+
@defaultload._add_unbound_fn
def defaultload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
+
@loader_option()
def defer(loadopt, key):
"""Indicate that the given column-oriented attribute should be deferred, e.g.
@@ -858,19 +890,21 @@ def defer(loadopt, key):
"""
return loadopt.set_column_strategy(
- (key, ),
- {"deferred": True, "instrument": True}
- )
+ (key, ),
+ {"deferred": True, "instrument": True}
+ )
@defer._add_unbound_fn
def defer(key, *addl_attrs):
- return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+
@loader_option()
def undefer(loadopt, key):
- """Indicate that the given column-oriented attribute should be undeferred, e.g.
- specified within the SELECT statement of the entity as a whole.
+ """Indicate that the given column-oriented attribute should be undeferred,
+ e.g. specified within the SELECT statement of the entity as a whole.
The column being undeferred is typically set up on the mapping as a
:func:`.deferred` attribute.
@@ -884,7 +918,8 @@ def undefer(loadopt, key):
session.query(MyClass).options(undefer("col1"), undefer("col2"))
# undefer all columns specific to a single class using Load + *
- session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+ session.query(MyClass, MyOtherClass).options(
+ Load(MyClass).undefer("*"))
:param key: Attribute to be undeferred.
@@ -902,17 +937,21 @@ def undefer(loadopt, key):
"""
return loadopt.set_column_strategy(
- (key, ),
- {"deferred": False, "instrument": True}
- )
+ (key, ),
+ {"deferred": False, "instrument": True}
+ )
+
@undefer._add_unbound_fn
def undefer(key, *addl_attrs):
- return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+
@loader_option()
def undefer_group(loadopt, name):
- """Indicate that columns within the given deferred group name should be undeferred.
+ """Indicate that columns within the given deferred group name should be
+ undeferred.
The columns being undeferred are set up on the mapping as
:func:`.deferred` attributes and include a "group" name.
@@ -922,9 +961,11 @@ def undefer_group(loadopt, name):
session.query(MyClass).options(undefer_group("large_attrs"))
To undefer a group of attributes on a related entity, the path can be
- spelled out using relationship loader options, such as :func:`.orm.defaultload`::
+ spelled out using relationship loader options, such as
+ :func:`.orm.defaultload`::
- session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs"))
+ session.query(MyClass).options(
+ defaultload("someattr").undefer_group("large_attrs"))
.. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
particiular entity load path.
@@ -939,12 +980,12 @@ def undefer_group(loadopt, name):
"""
return loadopt.set_column_strategy(
- "*",
- None,
- {"undefer_group": name}
- )
+ "*",
+ None,
+ {"undefer_group": name}
+ )
+
@undefer_group._add_unbound_fn
def undefer_group(name):
return _UnboundLoad().undefer_group(name)
-
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index dc59bb27b..e1ef85c1d 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -14,7 +14,7 @@ from . import exc, util as orm_util, attributes
def populate(source, source_mapper, dest, dest_mapper,
- synchronize_pairs, uowcommit, flag_cascaded_pks):
+ synchronize_pairs, uowcommit, flag_cascaded_pks):
source_dict = source.dict
dest_dict = dest.dict
@@ -23,7 +23,7 @@ def populate(source, source_mapper, dest, dest_mapper,
# inline of source_mapper._get_state_attr_by_column
prop = source_mapper._columntoproperty[l]
value = source.manager[prop.key].impl.get(source, source_dict,
- attributes.PASSIVE_OFF)
+ attributes.PASSIVE_OFF)
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
@@ -40,8 +40,8 @@ def populate(source, source_mapper, dest, dest_mapper,
# reasons, since we only need this info for a primary key
# destination.
if flag_cascaded_pks and l.primary_key and \
- r.primary_key and \
- r.references(l):
+ r.primary_key and \
+ r.references(l):
uowcommit.attributes[("pk_cascaded", dest, r)] = True
@@ -49,7 +49,7 @@ def clear(dest, dest_mapper, synchronize_pairs):
for l, r in synchronize_pairs:
if r.primary_key and \
dest_mapper._get_state_attr_by_column(
- dest, dest.dict, r) not in orm_util._none_set:
+ dest, dest.dict, r) not in orm_util._none_set:
raise AssertionError(
"Dependency rule tried to blank-out primary key "
@@ -96,8 +96,8 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
prop = source_mapper._columntoproperty[l]
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, None, r)
- history = uowcommit.get_attribute_history(source, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ history = uowcommit.get_attribute_history(
+ source, prop.key, attributes.PASSIVE_NO_INITIALIZE)
if bool(history.deleted):
return True
else:
@@ -107,16 +107,17 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
def _raise_col_to_prop(isdest, source_mapper, source_column,
dest_mapper, dest_column):
if isdest:
- raise exc.UnmappedColumnError("Can't execute sync rule for "
- "destination column '%s'; mapper '%s' does not map "
- "this column. Try using an explicit `foreign_keys` "
- "collection which does not include this column (or use "
- "a viewonly=True relation)." % (dest_column,
- dest_mapper))
+ raise exc.UnmappedColumnError(
+ "Can't execute sync rule for "
+ "destination column '%s'; mapper '%s' does not map "
+ "this column. Try using an explicit `foreign_keys` "
+ "collection which does not include this column (or use "
+ "a viewonly=True relation)." % (dest_column, dest_mapper))
else:
- raise exc.UnmappedColumnError("Can't execute sync rule for "
- "source column '%s'; mapper '%s' does not map this "
- "column. Try using an explicit `foreign_keys` "
- "collection which does not include destination column "
- "'%s' (or use a viewonly=True relation)."
- % (source_column, source_mapper, dest_column))
+ raise exc.UnmappedColumnError(
+ "Can't execute sync rule for "
+ "source column '%s'; mapper '%s' does not map this "
+ "column. Try using an explicit `foreign_keys` "
+ "collection which does not include destination column "
+ "'%s' (or use a viewonly=True relation)." %
+ (source_column, source_mapper, dest_column))
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index b7f739ec5..71e61827b 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -56,16 +56,16 @@ def track_cascade_events(descriptor, prop):
if sess._warn_on_events:
sess._flush_warning(
- "collection remove"
- if prop.uselist
- else "related attribute delete")
+ "collection remove"
+ if prop.uselist
+ else "related attribute delete")
# expunge pending orphans
item_state = attributes.instance_state(item)
if prop._cascade.delete_orphan and \
item_state in sess._new and \
prop.mapper._is_orphan(item_state):
- sess.expunge(item)
+ sess.expunge(item)
def set_(state, newvalue, oldvalue, initiator):
# process "save_update" cascade rules for when an instance
@@ -83,8 +83,8 @@ def track_cascade_events(descriptor, prop):
if newvalue is not None:
newvalue_state = attributes.instance_state(newvalue)
if prop._cascade.save_update and \
- (prop.cascade_backrefs or key == initiator.key) and \
- not sess._contains_state(newvalue_state):
+ (prop.cascade_backrefs or key == initiator.key) and \
+ not sess._contains_state(newvalue_state):
sess._save_or_update_state(newvalue_state)
if oldvalue is not None and \
@@ -95,7 +95,7 @@ def track_cascade_events(descriptor, prop):
oldvalue_state = attributes.instance_state(oldvalue)
if oldvalue_state in sess._new and \
- prop.mapper._is_orphan(oldvalue_state):
+ prop.mapper._is_orphan(oldvalue_state):
sess.expunge(oldvalue)
return newvalue
@@ -175,7 +175,7 @@ class UOWTransaction(object):
self.states[state] = (isdelete, True)
def get_attribute_history(self, state, key,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
"""facade to attributes.get_state_history(), including
caching of results."""
@@ -191,11 +191,11 @@ class UOWTransaction(object):
# we want non-passive, do a non-passive lookup and re-cache
if not cached_passive & attributes.SQL_OK \
- and passive & attributes.SQL_OK:
+ and passive & attributes.SQL_OK:
impl = state.manager[key].impl
history = impl.get_history(state, state.dict,
- attributes.PASSIVE_OFF |
- attributes.LOAD_AGAINST_COMMITTED)
+ attributes.PASSIVE_OFF |
+ attributes.LOAD_AGAINST_COMMITTED)
if history and impl.uses_objects:
state_history = history.as_state()
else:
@@ -206,13 +206,13 @@ class UOWTransaction(object):
# TODO: store the history as (state, object) tuples
# so we don't have to keep converting here
history = impl.get_history(state, state.dict, passive |
- attributes.LOAD_AGAINST_COMMITTED)
+ attributes.LOAD_AGAINST_COMMITTED)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history,
- passive)
+ passive)
return state_history
@@ -225,13 +225,13 @@ class UOWTransaction(object):
self.presort_actions[key] = Preprocess(processor, fromparent)
def register_object(self, state, isdelete=False,
- listonly=False, cancel_delete=False,
- operation=None, prop=None):
+ listonly=False, cancel_delete=False,
+ operation=None, prop=None):
if not self.session._contains_state(state):
if not state.deleted and operation is not None:
util.warn("Object of type %s not in session, %s operation "
- "along '%s' will not proceed" %
- (orm_util.state_class_str(state), operation, prop))
+ "along '%s' will not proceed" %
+ (orm_util.state_class_str(state), operation, prop))
return False
if state not in self.states:
@@ -278,8 +278,8 @@ class UOWTransaction(object):
"""
return util.PopulateDict(
- lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
- )
+ lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
+ )
def filter_states_for_dep(self, dep, states):
"""Filter the given list of InstanceStates to those relevant to the
@@ -314,8 +314,8 @@ class UOWTransaction(object):
# see if the graph of mapper dependencies has cycles.
self.cycles = cycles = topological.find_cycles(
- self.dependencies,
- list(self.postsort_actions.values()))
+ self.dependencies,
+ list(self.postsort_actions.values()))
if cycles:
# if yes, break the per-mapper actions into
@@ -330,8 +330,8 @@ class UOWTransaction(object):
# that were broken up.
for edge in list(self.dependencies):
if None in edge or \
- edge[0].disabled or edge[1].disabled or \
- cycles.issuperset(edge):
+ edge[0].disabled or edge[1].disabled or \
+ cycles.issuperset(edge):
self.dependencies.remove(edge)
elif edge[0] in cycles:
self.dependencies.remove(edge)
@@ -345,30 +345,30 @@ class UOWTransaction(object):
return set([a for a in self.postsort_actions.values()
if not a.disabled
]
- ).difference(cycles)
+ ).difference(cycles)
def execute(self):
postsort_actions = self._generate_actions()
- #sort = topological.sort(self.dependencies, postsort_actions)
- #print "--------------"
- #print "\ndependencies:", self.dependencies
- #print "\ncycles:", self.cycles
- #print "\nsort:", list(sort)
- #print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
+ # sort = topological.sort(self.dependencies, postsort_actions)
+ # print "--------------"
+ # print "\ndependencies:", self.dependencies
+ # print "\ncycles:", self.cycles
+ # print "\nsort:", list(sort)
+ # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
# execute
if self.cycles:
for set_ in topological.sort_as_subsets(
- self.dependencies,
- postsort_actions):
+ self.dependencies,
+ postsort_actions):
while set_:
n = set_.pop()
n.execute_aggregate(self, set_)
else:
for rec in topological.sort(
- self.dependencies,
- postsort_actions):
+ self.dependencies,
+ postsort_actions):
rec.execute(self)
def finalize_flush_changes(self):
@@ -430,11 +430,11 @@ class Preprocess(IterateMappersMixin):
if (delete_states or save_states):
if not self.setup_flush_actions and (
- self.dependency_processor.\
- prop_has_changes(uow, delete_states, True) or
- self.dependency_processor.\
- prop_has_changes(uow, save_states, False)
- ):
+ self.dependency_processor.
+ prop_has_changes(uow, delete_states, True) or
+ self.dependency_processor.
+ prop_has_changes(uow, save_states, False)
+ ):
self.dependency_processor.per_property_flush_actions(uow)
self.setup_flush_actions = True
return True
@@ -451,8 +451,8 @@ class PostSortRec(object):
return uow.postsort_actions[key]
else:
uow.postsort_actions[key] = \
- ret = \
- object.__new__(cls)
+ ret = \
+ object.__new__(cls)
return ret
def execute_aggregate(self, uow, recs):
@@ -471,7 +471,7 @@ class ProcessAll(IterateMappersMixin, PostSortRec):
self.delete = delete
self.fromparent = fromparent
uow.deps[dependency_processor.parent.base_mapper].\
- add(dependency_processor)
+ add(dependency_processor)
def execute(self, uow):
states = self._elements(uow)
@@ -521,13 +521,14 @@ class SaveUpdateAll(PostSortRec):
def execute(self, uow):
persistence.save_obj(self.mapper,
- uow.states_for_mapper_hierarchy(self.mapper, False, False),
- uow
- )
+ uow.states_for_mapper_hierarchy(
+ self.mapper, False, False),
+ uow
+ )
def per_state_flush_actions(self, uow):
states = list(uow.states_for_mapper_hierarchy(
- self.mapper, False, False))
+ self.mapper, False, False))
base_mapper = self.mapper.base_mapper
delete_all = DeleteAll(uow, base_mapper)
for state in states:
@@ -549,13 +550,14 @@ class DeleteAll(PostSortRec):
def execute(self, uow):
persistence.delete_obj(self.mapper,
- uow.states_for_mapper_hierarchy(self.mapper, True, False),
- uow
- )
+ uow.states_for_mapper_hierarchy(
+ self.mapper, True, False),
+ uow
+ )
def per_state_flush_actions(self, uow):
states = list(uow.states_for_mapper_hierarchy(
- self.mapper, True, False))
+ self.mapper, True, False))
base_mapper = self.mapper.base_mapper
save_all = SaveUpdateAll(uow, base_mapper)
for state in states:
@@ -581,9 +583,9 @@ class ProcessState(PostSortRec):
dependency_processor = self.dependency_processor
delete = self.delete
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.dependency_processor is dependency_processor and
- r.delete is delete]
+ if r.__class__ is cls_ and
+ r.dependency_processor is dependency_processor and
+ r.delete is delete]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
if delete:
@@ -609,13 +611,13 @@ class SaveUpdateState(PostSortRec):
cls_ = self.__class__
mapper = self.mapper
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
+ if r.__class__ is cls_ and
+ r.mapper is mapper]
recs.difference_update(our_recs)
persistence.save_obj(mapper,
- [self.state] +
- [r.state for r in our_recs],
- uow)
+ [self.state] +
+ [r.state for r in our_recs],
+ uow)
def __repr__(self):
return "%s(%s)" % (
@@ -633,13 +635,13 @@ class DeleteState(PostSortRec):
cls_ = self.__class__
mapper = self.mapper
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
+ if r.__class__ is cls_ and
+ r.mapper is mapper]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
persistence.delete_obj(mapper,
- [s for s in states if uow.states[s][0]],
- uow)
+ [s for s in states if uow.states[s][0]],
+ uow)
def __repr__(self):
return "%s(%s)" % (
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 3626a8a03..215de5f4b 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -13,7 +13,7 @@ from . import attributes
import re
from .base import instance_str, state_str, state_class_str, attribute_str, \
- state_attribute_str, object_mapper, object_state, _none_set
+ state_attribute_str, object_mapper, object_state, _none_set
from .base import class_mapper, _class_to_mapper
from .base import _InspectionAttr
from .path_registry import PathRegistry
@@ -27,23 +27,23 @@ class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
_add_w_all_cascades = all_cascades.difference([
- 'all', 'none', 'delete-orphan'])
+ 'all', 'none', 'delete-orphan'])
_allowed_cascades = all_cascades
def __new__(cls, arg):
values = set([
- c for c
- in re.split('\s*,\s*', arg or "")
- if c
- ])
+ c for c
+ in re.split('\s*,\s*', arg or "")
+ if c
+ ])
if values.difference(cls._allowed_cascades):
raise sa_exc.ArgumentError(
- "Invalid cascade option(s): %s" %
- ", ".join([repr(x) for x in
- sorted(
- values.difference(cls._allowed_cascades)
- )])
+ "Invalid cascade option(s): %s" %
+ ", ".join([repr(x) for x in
+ sorted(
+ values.difference(cls._allowed_cascades)
+ )])
)
if "all" in values:
@@ -62,7 +62,7 @@ class CascadeOptions(frozenset):
if self.delete_orphan and not self.delete:
util.warn("The 'delete-orphan' cascade "
- "option requires 'delete'.")
+ "option requires 'delete'.")
return self
def __repr__(self):
@@ -71,8 +71,11 @@ class CascadeOptions(frozenset):
)
-def _validator_events(desc, key, validator, include_removes, include_backrefs):
- """Runs a validation method on an attribute value to be set or appended."""
+def _validator_events(
+ desc, key, validator, include_removes, include_backrefs):
+ """Runs a validation method on an attribute value to be set or
+ appended.
+ """
if not include_backrefs:
def detect_is_backref(state, initiator):
@@ -116,7 +119,7 @@ def _validator_events(desc, key, validator, include_removes, include_backrefs):
def polymorphic_union(table_map, typecolname,
- aliasname='p_union', cast_nulls=True):
+ aliasname='p_union', cast_nulls=True):
"""Create a ``UNION`` statement used by a polymorphic mapper.
See :ref:`concrete_inheritance` for an example of how
@@ -168,10 +171,11 @@ def polymorphic_union(table_map, typecolname,
for type, table in table_map.items():
if typecolname is not None:
result.append(
- sql.select([col(name, table) for name in colnames] +
- [sql.literal_column(sql_util._quote_ddl_expr(type)).
- label(typecolname)],
- from_obj=[table]))
+ sql.select([col(name, table) for name in colnames] +
+ [sql.literal_column(
+ sql_util._quote_ddl_expr(type)).
+ label(typecolname)],
+ from_obj=[table]))
else:
result.append(sql.select([col(name, table) for name in colnames],
from_obj=[table]))
@@ -225,7 +229,8 @@ def identity_key(*args, **kwargs):
E.g.::
- >>> row = engine.execute("select * from table where a=1 and b=2").first()
+ >>> row = engine.execute("select * from table where a=1 and b=2").\
+first()
>>> identity_key(MyClass, row=row)
(<class '__main__.MyClass'>, (1, 2))
@@ -246,11 +251,12 @@ def identity_key(*args, **kwargs):
elif len(args) == 3:
class_, ident = args
else:
- raise sa_exc.ArgumentError("expected up to three "
- "positional arguments, got %s" % len(args))
+ raise sa_exc.ArgumentError(
+ "expected up to three positional arguments, "
+ "got %s" % len(args))
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs))
+ % ", ".join(kwargs))
mapper = class_mapper(class_)
if "ident" in locals():
return mapper.identity_key_from_primary_key(util.to_list(ident))
@@ -258,7 +264,7 @@ def identity_key(*args, **kwargs):
instance = kwargs.pop("instance")
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys))
+ % ", ".join(kwargs.keys))
mapper = object_mapper(instance)
return mapper.identity_key_from_instance(instance)
@@ -270,8 +276,9 @@ class ORMAdapter(sql_util.ColumnAdapter):
and the AliasedClass if any is referenced.
"""
+
def __init__(self, entity, equivalents=None, adapt_required=False,
- chain_to=None):
+ chain_to=None):
info = inspection.inspect(entity)
self.mapper = info.mapper
@@ -292,6 +299,7 @@ class ORMAdapter(sql_util.ColumnAdapter):
else:
return None
+
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
@@ -332,30 +340,31 @@ class AliasedClass(object):
argument descriptions.
"""
+
def __init__(self, cls, alias=None,
- name=None,
- flat=False,
- adapt_on_names=False,
- # TODO: None for default here?
- with_polymorphic_mappers=(),
- with_polymorphic_discriminator=None,
- base_alias=None,
- use_mapper_path=False):
+ name=None,
+ flat=False,
+ adapt_on_names=False,
+ # TODO: None for default here?
+ with_polymorphic_mappers=(),
+ with_polymorphic_discriminator=None,
+ base_alias=None,
+ use_mapper_path=False):
mapper = _class_to_mapper(cls)
if alias is None:
alias = mapper._with_polymorphic_selectable.alias(
- name=name, flat=flat)
+ name=name, flat=flat)
self._aliased_insp = AliasedInsp(
self,
mapper,
alias,
name,
with_polymorphic_mappers
- if with_polymorphic_mappers
- else mapper.with_polymorphic_mappers,
+ if with_polymorphic_mappers
+ else mapper.with_polymorphic_mappers,
with_polymorphic_discriminator
- if with_polymorphic_discriminator is not None
- else mapper.polymorphic_on,
+ if with_polymorphic_discriminator is not None
+ else mapper.polymorphic_on,
base_alias,
use_mapper_path,
adapt_on_names
@@ -440,8 +449,8 @@ class AliasedInsp(_InspectionAttr):
"""
def __init__(self, entity, mapper, selectable, name,
- with_polymorphic_mappers, polymorphic_on,
- _base_alias, _use_mapper_path, adapt_on_names):
+ with_polymorphic_mappers, polymorphic_on,
+ _base_alias, _use_mapper_path, adapt_on_names):
self.entity = entity
self.mapper = mapper
self.selectable = selectable
@@ -451,9 +460,9 @@ class AliasedInsp(_InspectionAttr):
self._base_alias = _base_alias or self
self._use_mapper_path = _use_mapper_path
- self._adapter = sql_util.ClauseAdapter(selectable,
- equivalents=mapper._equivalent_columns,
- adapt_on_names=adapt_on_names)
+ self._adapter = sql_util.ClauseAdapter(
+ selectable, equivalents=mapper._equivalent_columns,
+ adapt_on_names=adapt_on_names)
self._adapt_on_names = adapt_on_names
self._target = mapper.class_
@@ -461,9 +470,9 @@ class AliasedInsp(_InspectionAttr):
for poly in self.with_polymorphic_mappers:
if poly is not mapper:
setattr(self.entity, poly.class_.__name__,
- AliasedClass(poly.class_, selectable, base_alias=self,
- adapt_on_names=adapt_on_names,
- use_mapper_path=_use_mapper_path))
+ AliasedClass(poly.class_, selectable, base_alias=self,
+ adapt_on_names=adapt_on_names,
+ use_mapper_path=_use_mapper_path))
is_aliased_class = True
"always returns True"
@@ -511,10 +520,10 @@ class AliasedInsp(_InspectionAttr):
def _adapt_element(self, elem):
return self._adapter.traverse(elem).\
- _annotate({
- 'parententity': self.entity,
- 'parentmapper': self.mapper}
- )
+ _annotate({
+ 'parententity': self.entity,
+ 'parentmapper': self.mapper}
+ )
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
@@ -523,7 +532,8 @@ class AliasedInsp(_InspectionAttr):
elif mapper.isa(self.mapper):
return self
else:
- assert False, "mapper %s doesn't correspond to %s" % (mapper, self)
+ assert False, "mapper %s doesn't correspond to %s" % (
+ mapper, self)
def __repr__(self):
return '<AliasedInsp at 0x%x; %s>' % (
@@ -574,11 +584,12 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
attribute name that will be accessible via tuples returned by a
:class:`.Query` object.
- :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias`
- call so that aliases of :class:`.Join` objects don't include an enclosing
- SELECT. This can lead to more efficient queries in many circumstances.
- A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased
- SELECT subquery on backends that don't support this syntax.
+ :param flat: Boolean, will be passed through to the
+ :meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects
+ don't include an enclosing SELECT. This can lead to more efficient
+ queries in many circumstances. A JOIN against a nested JOIN will be
+ rewritten as a JOIN against an aliased SELECT subquery on backends that
+ don't support this syntax.
.. versionadded:: 0.9.0
@@ -624,13 +635,13 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
return element.alias(name, flat=flat)
else:
return AliasedClass(element, alias=alias, flat=flat,
- name=name, adapt_on_names=adapt_on_names)
+ name=name, adapt_on_names=adapt_on_names)
def with_polymorphic(base, classes, selectable=False,
- flat=False,
- polymorphic_on=None, aliased=False,
- innerjoin=False, _use_mapper_path=False):
+ flat=False,
+ polymorphic_on=None, aliased=False,
+ innerjoin=False, _use_mapper_path=False):
"""Produce an :class:`.AliasedClass` construct which specifies
columns for descendant mappers of the given base.
@@ -661,11 +672,12 @@ def with_polymorphic(base, classes, selectable=False,
support parenthesized joins, such as SQLite and older
versions of MySQL.
- :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias`
- call so that aliases of :class:`.Join` objects don't include an enclosing
- SELECT. This can lead to more efficient queries in many circumstances.
- A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased
- SELECT subquery on backends that don't support this syntax.
+ :param flat: Boolean, will be passed through to the
+ :meth:`.FromClause.alias` call so that aliases of :class:`.Join`
+ objects don't include an enclosing SELECT. This can lead to more
+ efficient queries in many circumstances. A JOIN against a nested JOIN
+ will be rewritten as a JOIN against an aliased SELECT subquery on
+ backends that don't support this syntax.
Setting ``flat`` to ``True`` implies the ``aliased`` flag is
also ``True``.
@@ -695,15 +707,15 @@ def with_polymorphic(base, classes, selectable=False,
"""
primary_mapper = _class_to_mapper(base)
mappers, selectable = primary_mapper.\
- _with_polymorphic_args(classes, selectable,
- innerjoin=innerjoin)
+ _with_polymorphic_args(classes, selectable,
+ innerjoin=innerjoin)
if aliased or flat:
selectable = selectable.alias(flat=flat)
return AliasedClass(base,
- selectable,
- with_polymorphic_mappers=mappers,
- with_polymorphic_discriminator=polymorphic_on,
- use_mapper_path=_use_mapper_path)
+ selectable,
+ with_polymorphic_mappers=mappers,
+ with_polymorphic_discriminator=polymorphic_on,
+ use_mapper_path=_use_mapper_path)
def _orm_annotate(element, exclude=None):
@@ -726,8 +738,8 @@ def _orm_deannotate(element):
"""
return sql_util._deep_deannotate(element,
- values=("_orm_adapt", "parententity")
- )
+ values=("_orm_adapt", "parententity")
+ )
def _orm_full_deannotate(element):
@@ -762,18 +774,19 @@ class _ORMJoin(expression.Join):
prop = None
if prop:
- if sql_util.clause_is_present(on_selectable, left_info.selectable):
+ if sql_util.clause_is_present(
+ on_selectable, left_info.selectable):
adapt_from = on_selectable
else:
adapt_from = left_info.selectable
pj, sj, source, dest, \
secondary, target_adapter = prop._create_joins(
- source_selectable=adapt_from,
- dest_selectable=adapt_to,
- source_polymorphic=True,
- dest_polymorphic=True,
- of_type=right_info.mapper)
+ source_selectable=adapt_from,
+ dest_selectable=adapt_to,
+ source_polymorphic=True,
+ dest_polymorphic=True,
+ of_type=right_info.mapper)
if sj is not None:
if isouter:
@@ -886,7 +899,6 @@ def with_parent(instance, prop):
value_is_parent=True)
-
def has_identity(object):
"""Return True if the given object has a database
identity.
@@ -902,6 +914,7 @@ def has_identity(object):
state = attributes.instance_state(object)
return state.has_identity
+
def was_deleted(object):
"""Return True if the given object was deleted
within a session flush.
@@ -914,8 +927,6 @@ def was_deleted(object):
return state.deleted
-
-
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
to detect unit of work sorting issues.
@@ -935,9 +946,9 @@ def randomize_unitofwork():
By calling ``randomize_unitofwork()`` when a script first runs, the
ordering of a key series of sets within the unit of work implementation
- are randomized, so that the script can be minimized down to the fundamental
- mapping and operation that's failing, while still reproducing the issue
- on at least some runs.
+ are randomized, so that the script can be minimized down to the
+ fundamental mapping and operation that's failing, while still reproducing
+ the issue on at least some runs.
This utility is also available when running the test suite via the
``--reversetop`` flag.
@@ -950,5 +961,4 @@ def randomize_unitofwork():
from sqlalchemy.util import topological
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
- dependency.set = RandomSet
-
+ dependency.set = RandomSet