summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrian Jarrett <celttechie@gmail.com>2014-07-20 12:44:40 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2014-07-20 12:44:40 -0400
commitcca03097f47f22783d42d1853faac6cf84607c5a (patch)
tree4fe1a63d03a2d88d1cf37e1167759dfaf84f4ce7
parent827329a0cca5351094a1a86b6b2be2b9182f0ae2 (diff)
downloadsqlalchemy-cca03097f47f22783d42d1853faac6cf84607c5a.tar.gz
- apply pep8 formatting to sqlalchemy/sql, sqlalchemy/util, sqlalchemy/dialects,
sqlalchemy/orm, sqlalchemy/event, sqlalchemy/testing
-rw-r--r--lib/sqlalchemy/dialects/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py132
-rw-r--r--lib/sqlalchemy/dialects/firebird/fdb.py20
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py26
-rw-r--r--lib/sqlalchemy/dialects/mssql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py415
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py141
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py8
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py5
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py62
-rw-r--r--lib/sqlalchemy/dialects/mssql/zxjdbc.py13
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py14
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py485
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py9
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py14
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py34
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py22
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py9
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py3
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py6
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py8
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py415
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py308
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py47
-rw-r--r--lib/sqlalchemy/dialects/postgres.py7
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py543
-rw-r--r--lib/sqlalchemy/dialects/postgresql/constraints.py15
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py21
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py66
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py127
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pypostgresql.py3
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ranges.py7
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py114
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py35
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py10
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py98
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py7
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py11
-rw-r--r--lib/sqlalchemy/event/api.py8
-rw-r--r--lib/sqlalchemy/event/attr.py39
-rw-r--r--lib/sqlalchemy/event/base.py24
-rw-r--r--lib/sqlalchemy/event/legacy.py116
-rw-r--r--lib/sqlalchemy/event/registry.py30
-rw-r--r--lib/sqlalchemy/orm/__init__.py89
-rw-r--r--lib/sqlalchemy/orm/attributes.py310
-rw-r--r--lib/sqlalchemy/orm/base.py193
-rw-r--r--lib/sqlalchemy/orm/collections.py73
-rw-r--r--lib/sqlalchemy/orm/dependency.py570
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py64
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py135
-rw-r--r--lib/sqlalchemy/orm/dynamic.py80
-rw-r--r--lib/sqlalchemy/orm/evaluator.py22
-rw-r--r--lib/sqlalchemy/orm/events.py127
-rw-r--r--lib/sqlalchemy/orm/exc.py3
-rw-r--r--lib/sqlalchemy/orm/identity.py6
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py56
-rw-r--r--lib/sqlalchemy/orm/interfaces.py34
-rw-r--r--lib/sqlalchemy/orm/loading.py192
-rw-r--r--lib/sqlalchemy/orm/mapper.py525
-rw-r--r--lib/sqlalchemy/orm/path_registry.py37
-rw-r--r--lib/sqlalchemy/orm/persistence.py385
-rw-r--r--lib/sqlalchemy/orm/properties.py41
-rw-r--r--lib/sqlalchemy/orm/query.py594
-rw-r--r--lib/sqlalchemy/orm/relationships.py1029
-rw-r--r--lib/sqlalchemy/orm/scoping.py8
-rw-r--r--lib/sqlalchemy/orm/session.py323
-rw-r--r--lib/sqlalchemy/orm/state.py52
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py229
-rw-r--r--lib/sqlalchemy/orm/sync.py39
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py126
-rw-r--r--lib/sqlalchemy/orm/util.py184
-rw-r--r--lib/sqlalchemy/sql/__init__.py6
-rw-r--r--lib/sqlalchemy/sql/annotation.py19
-rw-r--r--lib/sqlalchemy/sql/base.py109
-rw-r--r--lib/sqlalchemy/sql/compiler.py1019
-rw-r--r--lib/sqlalchemy/sql/ddl.py109
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py107
-rw-r--r--lib/sqlalchemy/sql/dml.py183
-rw-r--r--lib/sqlalchemy/sql/elements.py542
-rw-r--r--lib/sqlalchemy/sql/expression.py46
-rw-r--r--lib/sqlalchemy/sql/functions.py33
-rw-r--r--lib/sqlalchemy/sql/naming.py22
-rw-r--r--lib/sqlalchemy/sql/operators.py34
-rw-r--r--lib/sqlalchemy/sql/schema.py631
-rw-r--r--lib/sqlalchemy/sql/selectable.py508
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py234
-rw-r--r--lib/sqlalchemy/sql/type_api.py65
-rw-r--r--lib/sqlalchemy/sql/util.py116
-rw-r--r--lib/sqlalchemy/sql/visitors.py12
-rw-r--r--lib/sqlalchemy/testing/__init__.py8
-rw-r--r--lib/sqlalchemy/testing/assertions.py56
-rw-r--r--lib/sqlalchemy/testing/assertsql.py46
-rw-r--r--lib/sqlalchemy/testing/config.py4
-rw-r--r--lib/sqlalchemy/testing/distutils_run.py1
-rw-r--r--lib/sqlalchemy/testing/engines.py37
-rw-r--r--lib/sqlalchemy/testing/entities.py3
-rw-r--r--lib/sqlalchemy/testing/exclusions.py68
-rw-r--r--lib/sqlalchemy/testing/fixtures.py11
-rw-r--r--lib/sqlalchemy/testing/mock.py5
-rw-r--r--lib/sqlalchemy/testing/pickleable.py4
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py13
-rw-r--r--lib/sqlalchemy/testing/plugin/plugin_base.py156
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py39
-rw-r--r--lib/sqlalchemy/testing/profiling.py60
-rw-r--r--lib/sqlalchemy/testing/requirements.py77
-rw-r--r--lib/sqlalchemy/testing/runner.py1
-rw-r--r--lib/sqlalchemy/testing/schema.py9
-rw-r--r--lib/sqlalchemy/testing/suite/test_ddl.py14
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py77
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py144
-rw-r--r--lib/sqlalchemy/testing/suite/test_results.py86
-rw-r--r--lib/sqlalchemy/testing/suite/test_select.py23
-rw-r--r--lib/sqlalchemy/testing/suite/test_sequence.py28
-rw-r--r--lib/sqlalchemy/testing/suite/test_types.py144
-rw-r--r--lib/sqlalchemy/testing/suite/test_update_delete.py12
-rw-r--r--lib/sqlalchemy/testing/util.py7
-rw-r--r--lib/sqlalchemy/testing/warnings.py1
-rw-r--r--lib/sqlalchemy/util/_collections.py29
-rw-r--r--lib/sqlalchemy/util/compat.py19
-rw-r--r--lib/sqlalchemy/util/deprecations.py6
-rw-r--r--lib/sqlalchemy/util/langhelpers.py105
-rw-r--r--lib/sqlalchemy/util/queue.py1
-rw-r--r--lib/sqlalchemy/util/topological.py16
126 files changed, 7470 insertions, 6655 deletions
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index 31afe1568..74c48820d 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -13,10 +13,11 @@ __all__ = (
'postgresql',
'sqlite',
'sybase',
- )
+)
from .. import util
+
def _auto_fn(name):
"""default dialect importer.
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index c8f081b2d..36229a105 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -119,7 +119,7 @@ RESERVED_WORDS = set([
"union", "unique", "update", "upper", "user", "using", "value",
"values", "varchar", "variable", "varying", "view", "wait", "when",
"where", "while", "with", "work", "write", "year",
- ])
+])
class _StringType(sqltypes.String):
@@ -160,20 +160,20 @@ colspecs = {
}
ischema_names = {
- 'SHORT': SMALLINT,
- 'LONG': INTEGER,
- 'QUAD': FLOAT,
- 'FLOAT': FLOAT,
- 'DATE': DATE,
- 'TIME': TIME,
- 'TEXT': TEXT,
- 'INT64': BIGINT,
- 'DOUBLE': FLOAT,
- 'TIMESTAMP': TIMESTAMP,
+ 'SHORT': SMALLINT,
+ 'LONG': INTEGER,
+ 'QUAD': FLOAT,
+ 'FLOAT': FLOAT,
+ 'DATE': DATE,
+ 'TIME': TIME,
+ 'TEXT': TEXT,
+ 'INT64': BIGINT,
+ 'DOUBLE': FLOAT,
+ 'TIMESTAMP': TIMESTAMP,
'VARYING': VARCHAR,
'CSTRING': CHAR,
- 'BLOB': BLOB,
- }
+ 'BLOB': BLOB,
+}
# TODO: date conversion types (should be implemented as _FBDateTime,
@@ -193,7 +193,7 @@ class FBTypeCompiler(compiler.GenericTypeCompiler):
return "BLOB SUB_TYPE 0"
def _extend_string(self, type_, basic):
- charset = getattr(type_, 'charset', None)
+ charset = getattr(type_, 'charset', None)
if charset is None:
return basic
else:
@@ -206,8 +206,8 @@ class FBTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARCHAR(self, type_):
if not type_.length:
raise exc.CompileError(
- "VARCHAR requires a length on dialect %s" %
- self.dialect.name)
+ "VARCHAR requires a length on dialect %s" %
+ self.dialect.name)
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_)
return self._extend_string(type_, basic)
@@ -217,46 +217,46 @@ class FBCompiler(sql.compiler.SQLCompiler):
ansi_bind_rules = True
- #def visit_contains_op_binary(self, binary, operator, **kw):
- # cant use CONTAINING b.c. it's case insensitive.
+ # def visit_contains_op_binary(self, binary, operator, **kw):
+ # cant use CONTAINING b.c. it's case insensitive.
- #def visit_notcontains_op_binary(self, binary, operator, **kw):
- # cant use NOT CONTAINING b.c. it's case insensitive.
+ # def visit_notcontains_op_binary(self, binary, operator, **kw):
+ # cant use NOT CONTAINING b.c. it's case insensitive.
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_startswith_op_binary(self, binary, operator, **kw):
return '%s STARTING WITH %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw))
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw))
def visit_notstartswith_op_binary(self, binary, operator, **kw):
return '%s NOT STARTING WITH %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw))
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw))
def visit_mod_binary(self, binary, operator, **kw):
return "mod(%s, %s)" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw))
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw))
def visit_alias(self, alias, asfrom=False, **kwargs):
if self.dialect._version_two:
return super(FBCompiler, self).\
- visit_alias(alias, asfrom=asfrom, **kwargs)
+ visit_alias(alias, asfrom=asfrom, **kwargs)
else:
# Override to not use the AS keyword which FB 1.5 does not like
if asfrom:
alias_name = isinstance(alias.name,
- expression._truncated_label) and \
- self._truncated_identifier("alias",
- alias.name) or alias.name
+ expression._truncated_label) and \
+ self._truncated_identifier("alias",
+ alias.name) or alias.name
return self.process(
- alias.original, asfrom=asfrom, **kwargs) + \
- " " + \
- self.preparer.format_alias(alias, alias_name)
+ alias.original, asfrom=asfrom, **kwargs) + \
+ " " + \
+ self.preparer.format_alias(alias, alias_name)
else:
return self.process(alias.original, **kwargs)
@@ -315,9 +315,9 @@ class FBCompiler(sql.compiler.SQLCompiler):
def returning_clause(self, stmt, returning_cols):
columns = [
- self._label_select_column(None, c, True, False, {})
- for c in expression._select_iterables(returning_cols)
- ]
+ self._label_select_column(None, c, True, False, {})
+ for c in expression._select_iterables(returning_cols)
+ ]
return 'RETURNING ' + ', '.join(columns)
@@ -332,34 +332,35 @@ class FBDDLCompiler(sql.compiler.DDLCompiler):
# http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
if create.element.start is not None:
raise NotImplemented(
- "Firebird SEQUENCE doesn't support START WITH")
+ "Firebird SEQUENCE doesn't support START WITH")
if create.element.increment is not None:
raise NotImplemented(
- "Firebird SEQUENCE doesn't support INCREMENT BY")
+ "Firebird SEQUENCE doesn't support INCREMENT BY")
if self.dialect._version_two:
return "CREATE SEQUENCE %s" % \
- self.preparer.format_sequence(create.element)
+ self.preparer.format_sequence(create.element)
else:
return "CREATE GENERATOR %s" % \
- self.preparer.format_sequence(create.element)
+ self.preparer.format_sequence(create.element)
def visit_drop_sequence(self, drop):
"""Generate a ``DROP GENERATOR`` statement for the sequence."""
if self.dialect._version_two:
return "DROP SEQUENCE %s" % \
- self.preparer.format_sequence(drop.element)
+ self.preparer.format_sequence(drop.element)
else:
return "DROP GENERATOR %s" % \
- self.preparer.format_sequence(drop.element)
+ self.preparer.format_sequence(drop.element)
class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
- illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_'])
+ illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(
+ ['_'])
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
@@ -370,10 +371,10 @@ class FBExecutionContext(default.DefaultExecutionContext):
"""Get the next value from the sequence using ``gen_id()``."""
return self._execute_scalar(
- "SELECT gen_id(%s, 1) FROM rdb$database" %
- self.dialect.identifier_preparer.format_sequence(seq),
- type_
- )
+ "SELECT gen_id(%s, 1) FROM rdb$database" %
+ self.dialect.identifier_preparer.format_sequence(seq),
+ type_
+ )
class FBDialect(default.DefaultDialect):
@@ -411,12 +412,12 @@ class FBDialect(default.DefaultDialect):
def initialize(self, connection):
super(FBDialect, self).initialize(connection)
- self._version_two = ('firebird' in self.server_version_info and \
- self.server_version_info >= (2, )
- ) or \
- ('interbase' in self.server_version_info and \
+ self._version_two = ('firebird' in self.server_version_info and
+ self.server_version_info >= (2, )
+ ) or \
+ ('interbase' in self.server_version_info and
self.server_version_info >= (6, )
- )
+ )
if not self._version_two:
# TODO: whatever other pre < 2.0 stuff goes here
@@ -426,8 +427,8 @@ class FBDialect(default.DefaultDialect):
sqltypes.DateTime: sqltypes.DATE
}
- self.implicit_returning = self._version_two and \
- self.__dict__.get('implicit_returning', True)
+ self.implicit_returning = self._version_two and \
+ self.__dict__.get('implicit_returning', True)
def normalize_name(self, name):
# Remove trailing spaces: FB uses a CHAR() type,
@@ -436,7 +437,7 @@ class FBDialect(default.DefaultDialect):
if name is None:
return None
elif name.upper() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
+ not self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
else:
return name
@@ -445,7 +446,7 @@ class FBDialect(default.DefaultDialect):
if name is None:
return None
elif name.lower() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
+ not self.identifier_preparer._requires_quotes(name.lower()):
return name.upper()
else:
return name
@@ -539,8 +540,8 @@ class FBDialect(default.DefaultDialect):
@reflection.cache
def get_column_sequence(self, connection,
- table_name, column_name,
- schema=None, **kw):
+ table_name, column_name,
+ schema=None, **kw):
tablename = self.denormalize_name(table_name)
colname = self.denormalize_name(column_name)
# Heuristic-query to determine the generator associated to a PK field
@@ -613,8 +614,8 @@ class FBDialect(default.DefaultDialect):
coltype = sqltypes.NULLTYPE
elif issubclass(coltype, Integer) and row['fprec'] != 0:
coltype = NUMERIC(
- precision=row['fprec'],
- scale=row['fscale'] * -1)
+ precision=row['fprec'],
+ scale=row['fscale'] * -1)
elif colspec in ('VARYING', 'CSTRING'):
coltype = coltype(row['flen'])
elif colspec == 'TEXT':
@@ -636,8 +637,8 @@ class FBDialect(default.DefaultDialect):
# (see also http://tracker.firebirdsql.org/browse/CORE-356)
defexpr = row['fdefault'].lstrip()
assert defexpr[:8].rstrip().upper() == \
- 'DEFAULT', "Unrecognized default value: %s" % \
- defexpr
+ 'DEFAULT', "Unrecognized default value: %s" % \
+ defexpr
defvalue = defexpr[8:].strip()
if defvalue == 'NULL':
# Redundant
@@ -700,9 +701,9 @@ class FBDialect(default.DefaultDialect):
fk['name'] = cname
fk['referred_table'] = self.normalize_name(row['targetrname'])
fk['constrained_columns'].append(
- self.normalize_name(row['fname']))
+ self.normalize_name(row['fname']))
fk['referred_columns'].append(
- self.normalize_name(row['targetfname']))
+ self.normalize_name(row['targetfname']))
return list(fks.values())
@reflection.cache
@@ -732,7 +733,6 @@ class FBDialect(default.DefaultDialect):
indexrec['unique'] = bool(row['unique_flag'])
indexrec['column_names'].append(
- self.normalize_name(row['field_name']))
+ self.normalize_name(row['field_name']))
return list(indexes.values())
-
diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py
index a691adb53..ddffc80f5 100644
--- a/lib/sqlalchemy/dialects/firebird/fdb.py
+++ b/lib/sqlalchemy/dialects/firebird/fdb.py
@@ -9,7 +9,8 @@
.. dialect:: firebird+fdb
:name: fdb
:dbapi: pyodbc
- :connectstring: firebird+fdb://user:password@host:port/path/to/db[?key=value&key=value...]
+ :connectstring: firebird+fdb://user:password@host:port/path/to/db\
+[?key=value&key=value...]
:url: http://pypi.python.org/pypi/fdb/
fdb is a kinterbasdb compatible DBAPI for Firebird.
@@ -23,8 +24,9 @@
Arguments
----------
-The ``fdb`` dialect is based on the :mod:`sqlalchemy.dialects.firebird.kinterbasdb`
-dialect, however does not accept every argument that Kinterbasdb does.
+The ``fdb`` dialect is based on the
+:mod:`sqlalchemy.dialects.firebird.kinterbasdb` dialect, however does not
+accept every argument that Kinterbasdb does.
* ``enable_rowcount`` - True by default, setting this to False disables
the usage of "cursor.rowcount" with the
@@ -61,8 +63,8 @@ dialect, however does not accept every argument that Kinterbasdb does.
.. seealso::
- http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions - information
- on the "retaining" flag.
+ http://pythonhosted.org/fdb/usage-guide.html#retaining-transactions
+ - information on the "retaining" flag.
"""
@@ -73,14 +75,14 @@ from ... import util
class FBDialect_fdb(FBDialect_kinterbasdb):
def __init__(self, enable_rowcount=True,
- retaining=False, **kwargs):
+ retaining=False, **kwargs):
super(FBDialect_fdb, self).__init__(
- enable_rowcount=enable_rowcount,
- retaining=retaining, **kwargs)
+ enable_rowcount=enable_rowcount,
+ retaining=retaining, **kwargs)
@classmethod
def dbapi(cls):
- return __import__('fdb')
+ return __import__('fdb')
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index cdd1f7e7b..6bd7887f7 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -9,15 +9,16 @@
.. dialect:: firebird+kinterbasdb
:name: kinterbasdb
:dbapi: kinterbasdb
- :connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db[?key=value&key=value...]
+ :connectstring: firebird+kinterbasdb://user:password@host:port/path/to/db\
+[?key=value&key=value...]
:url: http://firebirdsql.org/index.php?op=devel&sub=python
Arguments
----------
The Kinterbasdb backend accepts the ``enable_rowcount`` and ``retaining``
-arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect. In addition, it
-also accepts the following:
+arguments accepted by the :mod:`sqlalchemy.dialects.firebird.fdb` dialect.
+In addition, it also accepts the following:
* ``type_conv`` - select the kind of mapping done on the types: by default
SQLAlchemy uses 200 with Unicode, datetime and decimal support. See
@@ -52,9 +53,11 @@ class _kinterbasdb_numeric(object):
return value
return process
+
class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
pass
+
class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
pass
@@ -63,7 +66,7 @@ class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
def rowcount(self):
if self.execution_options.get('enable_rowcount',
- self.dialect.enable_rowcount):
+ self.dialect.enable_rowcount):
return self.cursor.rowcount
else:
return -1
@@ -87,8 +90,8 @@ class FBDialect_kinterbasdb(FBDialect):
)
def __init__(self, type_conv=200, concurrency_level=1,
- enable_rowcount=True,
- retaining=False, **kwargs):
+ enable_rowcount=True,
+ retaining=False, **kwargs):
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
self.enable_rowcount = enable_rowcount
self.type_conv = type_conv
@@ -123,7 +126,7 @@ class FBDialect_kinterbasdb(FBDialect):
type_conv = opts.pop('type_conv', self.type_conv)
concurrency_level = opts.pop('concurrency_level',
- self.concurrency_level)
+ self.concurrency_level)
if self.dbapi is not None:
initialized = getattr(self.dbapi, 'initialized', None)
@@ -134,7 +137,7 @@ class FBDialect_kinterbasdb(FBDialect):
initialized = getattr(self.dbapi, '_initialized', False)
if not initialized:
self.dbapi.init(type_conv=type_conv,
- concurrency_level=concurrency_level)
+ concurrency_level=concurrency_level)
return ([], opts)
def _get_server_version_info(self, connection):
@@ -156,10 +159,11 @@ class FBDialect_kinterbasdb(FBDialect):
return self._parse_version_info(version)
def _parse_version_info(self, version):
- m = match('\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
+ m = match(
+ '\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
if not m:
raise AssertionError(
- "Could not determine version from string '%s'" % version)
+ "Could not determine version from string '%s'" % version)
if m.group(5) != None:
return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird'])
@@ -168,7 +172,7 @@ class FBDialect_kinterbasdb(FBDialect):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, (self.dbapi.OperationalError,
- self.dbapi.ProgrammingError)):
+ self.dbapi.ProgrammingError)):
msg = str(e)
return ('Unable to complete network request to host' in msg or
'Invalid connection state' in msg or
diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py
index 4c059ae2f..d0047765e 100644
--- a/lib/sqlalchemy/dialects/mssql/__init__.py
+++ b/lib/sqlalchemy/dialects/mssql/__init__.py
@@ -6,7 +6,7 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from sqlalchemy.dialects.mssql import base, pyodbc, adodbapi, \
- pymssql, zxjdbc, mxodbc
+ pymssql, zxjdbc, mxodbc
base.dialect = pyodbc.dialect
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index d94a4517d..e9927f8ed 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -61,7 +61,7 @@ class MSDialect_adodbapi(MSDialect):
connectors = ["Provider=SQLOLEDB"]
if 'port' in keys:
connectors.append("Data Source=%s, %s" %
- (keys.get("host"), keys.get("port")))
+ (keys.get("host"), keys.get("port")))
else:
connectors.append("Data Source=%s" % keys.get("host"))
connectors.append("Initial Catalog=%s" % keys.get("database"))
@@ -75,6 +75,6 @@ class MSDialect_adodbapi(MSDialect):
def is_disconnect(self, e, connection, cursor):
return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \
- "'connection failure'" in str(e)
+ "'connection failure'" in str(e)
dialect = MSDialect_adodbapi
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 473f7df06..f4264b3d0 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -13,9 +13,9 @@
Auto Increment Behavior
-----------------------
-SQL Server provides so-called "auto incrementing" behavior using the ``IDENTITY``
-construct, which can be placed on an integer primary key. SQLAlchemy
-considers ``IDENTITY`` within its default "autoincrement" behavior,
+SQL Server provides so-called "auto incrementing" behavior using the
+``IDENTITY`` construct, which can be placed on an integer primary key.
+SQLAlchemy considers ``IDENTITY`` within its default "autoincrement" behavior,
described at :paramref:`.Column.autoincrement`; this means
that by default, the first integer primary key column in a :class:`.Table`
will be considered to be the identity column and will generate DDL as such::
@@ -52,24 +52,25 @@ specify ``autoincrement=False`` on all integer primary key columns::
An INSERT statement which refers to an explicit value for such
a column is prohibited by SQL Server, however SQLAlchemy will detect this
and modify the ``IDENTITY_INSERT`` flag accordingly at statement execution
- time. As this is not a high performing process, care should be taken to set
- the ``autoincrement`` flag appropriately for columns that will not actually
- require IDENTITY behavior.
+ time. As this is not a high performing process, care should be taken to
+ set the ``autoincrement`` flag appropriately for columns that will not
+ actually require IDENTITY behavior.
Controlling "Start" and "Increment"
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Specific control over the parameters of the ``IDENTITY`` value is supported
-using the :class:`.schema.Sequence` object. While this object normally represents
-an explicit "sequence" for supporting backends, on SQL Server it is re-purposed
-to specify behavior regarding the identity column, including support
-of the "start" and "increment" values::
+using the :class:`.schema.Sequence` object. While this object normally
+represents an explicit "sequence" for supporting backends, on SQL Server it is
+re-purposed to specify behavior regarding the identity column, including
+support of the "start" and "increment" values::
from sqlalchemy import Table, Integer, Sequence, Column
Table('test', metadata,
Column('id', Integer,
- Sequence('blah', start=100, increment=10), primary_key=True),
+ Sequence('blah', start=100, increment=10),
+ primary_key=True),
Column('name', String(20))
).create(some_engine)
@@ -88,10 +89,10 @@ optional and will default to 1,1.
INSERT behavior
^^^^^^^^^^^^^^^^
-Handling of the ``IDENTITY`` column at INSERT time involves two key techniques.
-The most common is being able to fetch the "last inserted value" for a given
-``IDENTITY`` column, a process which SQLAlchemy performs implicitly in many
-cases, most importantly within the ORM.
+Handling of the ``IDENTITY`` column at INSERT time involves two key
+techniques. The most common is being able to fetch the "last inserted value"
+for a given ``IDENTITY`` column, a process which SQLAlchemy performs
+implicitly in many cases, most importantly within the ORM.
The process for fetching this value has several variants:
@@ -106,9 +107,9 @@ The process for fetching this value has several variants:
``implicit_returning=False``, either the ``scope_identity()`` function or
the ``@@identity`` variable is used; behavior varies by backend:
- * when using PyODBC, the phrase ``; select scope_identity()`` will be appended
- to the end of the INSERT statement; a second result set will be fetched
- in order to receive the value. Given a table as::
+ * when using PyODBC, the phrase ``; select scope_identity()`` will be
+ appended to the end of the INSERT statement; a second result set will be
+ fetched in order to receive the value. Given a table as::
t = Table('t', m, Column('id', Integer, primary_key=True),
Column('x', Integer),
@@ -121,17 +122,18 @@ The process for fetching this value has several variants:
INSERT INTO t (x) VALUES (?); select scope_identity()
* Other dialects such as pymssql will call upon
- ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT statement.
- If the flag ``use_scope_identity=False`` is passed to :func:`.create_engine`,
- the statement ``SELECT @@identity AS lastrowid`` is used instead.
+ ``SELECT scope_identity() AS lastrowid`` subsequent to an INSERT
+ statement. If the flag ``use_scope_identity=False`` is passed to
+ :func:`.create_engine`, the statement ``SELECT @@identity AS lastrowid``
+ is used instead.
A table that contains an ``IDENTITY`` column will prohibit an INSERT statement
that refers to the identity column explicitly. The SQLAlchemy dialect will
detect when an INSERT construct, created using a core :func:`.insert`
construct (not a plain string SQL), refers to the identity column, and
-in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert statement
-proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the execution.
-Given this example::
+in this case will emit ``SET IDENTITY_INSERT ON`` prior to the insert
+statement proceeding, and ``SET IDENTITY_INSERT OFF`` subsequent to the
+execution. Given this example::
m = MetaData()
t = Table('t', m, Column('id', Integer, primary_key=True),
@@ -250,7 +252,8 @@ To generate a clustered primary key use::
which will render the table, for example, as::
- CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y))
+ CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL,
+ PRIMARY KEY CLUSTERED (x, y))
Similarly, we can generate a clustered unique constraint using::
@@ -272,7 +275,8 @@ for :class:`.Index`.
INCLUDE
^^^^^^^
-The ``mssql_include`` option renders INCLUDE(colname) for the given string names::
+The ``mssql_include`` option renders INCLUDE(colname) for the given string
+names::
Index("my_index", table.c.x, mssql_include=['y'])
@@ -364,13 +368,13 @@ import re
from ... import sql, schema as sa_schema, exc, util
from ...sql import compiler, expression, \
- util as sql_util, cast
+ util as sql_util, cast
from ... import engine
from ...engine import reflection, default
from ... import types as sqltypes
from ...types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
- FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
- VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
+ FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
+ VARBINARY, TEXT, VARCHAR, NVARCHAR, CHAR, NCHAR
from ...util import update_wrapper
@@ -409,7 +413,7 @@ RESERVED_WORDS = set(
'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values',
'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with',
'writetext',
- ])
+ ])
class REAL(sqltypes.REAL):
@@ -431,6 +435,7 @@ class TINYINT(sqltypes.Integer):
# not sure about other dialects).
class _MSDate(sqltypes.Date):
+
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
@@ -447,15 +452,16 @@ class _MSDate(sqltypes.Date):
return value.date()
elif isinstance(value, util.string_types):
return datetime.date(*[
- int(x or 0)
- for x in self._reg.match(value).groups()
- ])
+ int(x or 0)
+ for x in self._reg.match(value).groups()
+ ])
else:
return value
return process
class TIME(sqltypes.TIME):
+
def __init__(self, precision=None, **kwargs):
self.precision = precision
super(TIME, self).__init__()
@@ -466,7 +472,7 @@ class TIME(sqltypes.TIME):
def process(value):
if isinstance(value, datetime.datetime):
value = datetime.datetime.combine(
- self.__zero_date, value.time())
+ self.__zero_date, value.time())
elif isinstance(value, datetime.time):
value = datetime.datetime.combine(self.__zero_date, value)
return value
@@ -480,8 +486,8 @@ class TIME(sqltypes.TIME):
return value.time()
elif isinstance(value, util.string_types):
return datetime.time(*[
- int(x or 0)
- for x in self._reg.match(value).groups()])
+ int(x or 0)
+ for x in self._reg.match(value).groups()])
else:
return value
return process
@@ -489,6 +495,7 @@ _MSTime = TIME
class _DateTimeBase(object):
+
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
@@ -523,22 +530,21 @@ class DATETIMEOFFSET(sqltypes.TypeEngine):
class _StringType(object):
+
"""Base for MSSQL string types."""
def __init__(self, collation=None):
super(_StringType, self).__init__(collation=collation)
-
-
class NTEXT(sqltypes.UnicodeText):
+
"""MSSQL NTEXT type, for variable-length unicode text up to 2^30
characters."""
__visit_name__ = 'NTEXT'
-
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
@@ -620,6 +626,7 @@ ischema_names = {
class MSTypeCompiler(compiler.GenericTypeCompiler):
+
def _extend(self, spec, type_, length=None):
"""Extend a string-type declaration with standard SQL
COLLATE annotations.
@@ -638,7 +645,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
spec = spec + "(%s)" % length
return ' '.join([c for c in (spec, collation)
- if c is not None])
+ if c is not None])
def visit_FLOAT(self, type_):
precision = getattr(type_, 'precision', None)
@@ -717,9 +724,9 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARBINARY(self, type_):
return self._extend(
- "VARBINARY",
- type_,
- length=type_.length or 'max')
+ "VARBINARY",
+ type_,
+ length=type_.length or 'max')
def visit_boolean(self, type_):
return self.visit_BIT(type_)
@@ -762,20 +769,23 @@ class MSExecutionContext(default.DefaultExecutionContext):
if insert_has_sequence:
self._enable_identity_insert = \
- seq_column.key in self.compiled_parameters[0]
+ seq_column.key in self.compiled_parameters[0]
else:
self._enable_identity_insert = False
self._select_lastrowid = insert_has_sequence and \
- not self.compiled.returning and \
- not self._enable_identity_insert and \
- not self.executemany
+ not self.compiled.returning and \
+ not self._enable_identity_insert and \
+ not self.executemany
if self._enable_identity_insert:
- self.root_connection._cursor_execute(self.cursor,
- self._opt_encode("SET IDENTITY_INSERT %s ON" %
- self.dialect.identifier_preparer.format_table(tbl)),
- (), self)
+ self.root_connection._cursor_execute(
+ self.cursor,
+ self._opt_encode(
+ "SET IDENTITY_INSERT %s ON" %
+ self.dialect.identifier_preparer.format_table(tbl)),
+ (),
+ self)
def post_exec(self):
"""Disable IDENTITY_INSERT if enabled."""
@@ -783,11 +793,14 @@ class MSExecutionContext(default.DefaultExecutionContext):
conn = self.root_connection
if self._select_lastrowid:
if self.dialect.use_scope_identity:
- conn._cursor_execute(self.cursor,
+ conn._cursor_execute(
+ self.cursor,
"SELECT scope_identity() AS lastrowid", (), self)
else:
conn._cursor_execute(self.cursor,
- "SELECT @@identity AS lastrowid", (), self)
+ "SELECT @@identity AS lastrowid",
+ (),
+ self)
# fetchall() ensures the cursor is consumed without closing it
row = self.cursor.fetchall()[0]
self._lastrowid = int(row[0])
@@ -797,11 +810,14 @@ class MSExecutionContext(default.DefaultExecutionContext):
self._result_proxy = engine.FullyBufferedResultProxy(self)
if self._enable_identity_insert:
- conn._cursor_execute(self.cursor,
- self._opt_encode("SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.
- format_table(self.compiled.statement.table)),
- (), self)
+ conn._cursor_execute(
+ self.cursor,
+ self._opt_encode(
+ "SET IDENTITY_INSERT %s OFF" %
+ self.dialect.identifier_preparer. format_table(
+ self.compiled.statement.table)),
+ (),
+ self)
def get_lastrowid(self):
return self._lastrowid
@@ -810,10 +826,10 @@ class MSExecutionContext(default.DefaultExecutionContext):
if self._enable_identity_insert:
try:
self.cursor.execute(
- self._opt_encode("SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.\
- format_table(self.compiled.statement.table))
- )
+ self._opt_encode(
+ "SET IDENTITY_INSERT %s OFF" %
+ self.dialect.identifier_preparer. format_table(
+ self.compiled.statement.table)))
except:
pass
@@ -830,11 +846,11 @@ class MSSQLCompiler(compiler.SQLCompiler):
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
{
- 'doy': 'dayofyear',
- 'dow': 'weekday',
- 'milliseconds': 'millisecond',
- 'microseconds': 'microsecond'
- })
+ 'doy': 'dayofyear',
+ 'dow': 'weekday',
+ 'milliseconds': 'millisecond',
+ 'microseconds': 'microsecond'
+ })
def __init__(self, *args, **kwargs):
self.tablealiases = {}
@@ -854,8 +870,8 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_concat_op_binary(self, binary, operator, **kw):
return "%s + %s" % \
- (self.process(binary.left, **kw),
- self.process(binary.right, **kw))
+ (self.process(binary.left, **kw),
+ self.process(binary.right, **kw))
def visit_true(self, expr, **kw):
return '1'
@@ -865,8 +881,8 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_match_op_binary(self, binary, operator, **kw):
return "CONTAINS (%s, %s)" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw))
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw))
def get_select_precolumns(self, select):
""" MS-SQL puts TOP, it's version of LIMIT here """
@@ -902,20 +918,20 @@ class MSSQLCompiler(compiler.SQLCompiler):
"""
if (
- (
- not select._simple_int_limit and
- select._limit_clause is not None
- ) or (
- select._offset_clause is not None and
- not select._simple_int_offset or select._offset
- )
- ) and not getattr(select, '_mssql_visit', None):
+ (
+ not select._simple_int_limit and
+ select._limit_clause is not None
+ ) or (
+ select._offset_clause is not None and
+ not select._simple_int_offset or select._offset
+ )
+ ) and not getattr(select, '_mssql_visit', None):
# to use ROW_NUMBER(), an ORDER BY is required.
if not select._order_by_clause.clauses:
raise exc.CompileError('MSSQL requires an order_by when '
- 'using an OFFSET or a non-simple '
- 'LIMIT clause')
+ 'using an OFFSET or a non-simple '
+ 'LIMIT clause')
_order_by_clauses = select._order_by_clause.clauses
limit_clause = select._limit_clause
@@ -923,20 +939,20 @@ class MSSQLCompiler(compiler.SQLCompiler):
select = select._generate()
select._mssql_visit = True
select = select.column(
- sql.func.ROW_NUMBER().over(order_by=_order_by_clauses)
- .label("mssql_rn")).order_by(None).alias()
+ sql.func.ROW_NUMBER().over(order_by=_order_by_clauses)
+ .label("mssql_rn")).order_by(None).alias()
mssql_rn = sql.column('mssql_rn')
limitselect = sql.select([c for c in select.c if
- c.key != 'mssql_rn'])
+ c.key != 'mssql_rn'])
if offset_clause is not None:
limitselect.append_whereclause(mssql_rn > offset_clause)
if limit_clause is not None:
limitselect.append_whereclause(
- mssql_rn <= (limit_clause + offset_clause))
+ mssql_rn <= (limit_clause + offset_clause))
else:
limitselect.append_whereclause(
- mssql_rn <= (limit_clause))
+ mssql_rn <= (limit_clause))
return self.process(limitselect, iswrapper=True, **kwargs)
else:
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
@@ -968,10 +984,11 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_extract(self, extract, **kw):
field = self.extract_map.get(extract.field, extract.field)
return 'DATEPART("%s", %s)' % \
- (field, self.process(extract.expr, **kw))
+ (field, self.process(extract.expr, **kw))
def visit_savepoint(self, savepoint_stmt):
- return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)
+ return "SAVE TRANSACTION %s" % \
+ self.preparer.format_savepoint(savepoint_stmt)
def visit_rollback_to_savepoint(self, savepoint_stmt):
return ("ROLLBACK TRANSACTION %s"
@@ -979,25 +996,26 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_column(self, column, add_to_result_map=None, **kwargs):
if column.table is not None and \
- (not self.isupdate and not self.isdelete) or self.is_subquery():
+ (not self.isupdate and not self.isdelete) or \
+ self.is_subquery():
# translate for schema-qualified table aliases
t = self._schema_aliased_table(column.table)
if t is not None:
converted = expression._corresponding_column_or_error(
- t, column)
+ t, column)
if add_to_result_map is not None:
add_to_result_map(
- column.name,
- column.name,
- (column, column.name, column.key),
- column.type
+ column.name,
+ column.name,
+ (column, column.name, column.key),
+ column.type
)
return super(MSSQLCompiler, self).\
- visit_column(converted, **kwargs)
+ visit_column(converted, **kwargs)
return super(MSSQLCompiler, self).visit_column(
- column, add_to_result_map=add_to_result_map, **kwargs)
+ column, add_to_result_map=add_to_result_map, **kwargs)
def visit_binary(self, binary, **kwargs):
"""Move bind parameters to the right-hand side of an operator, where
@@ -1008,12 +1026,12 @@ class MSSQLCompiler(compiler.SQLCompiler):
isinstance(binary.left, expression.BindParameter)
and binary.operator == operator.eq
and not isinstance(binary.right, expression.BindParameter)
- ):
+ ):
return self.process(
- expression.BinaryExpression(binary.right,
- binary.left,
- binary.operator),
- **kwargs)
+ expression.BinaryExpression(binary.right,
+ binary.left,
+ binary.operator),
+ **kwargs)
return super(MSSQLCompiler, self).visit_binary(binary, **kwargs)
def returning_clause(self, stmt, returning_cols):
@@ -1026,10 +1044,10 @@ class MSSQLCompiler(compiler.SQLCompiler):
adapter = sql_util.ClauseAdapter(target)
columns = [
- self._label_select_column(None, adapter.traverse(c),
- True, False, {})
- for c in expression._select_iterables(returning_cols)
- ]
+ self._label_select_column(None, adapter.traverse(c),
+ True, False, {})
+ for c in expression._select_iterables(returning_cols)
+ ]
return 'OUTPUT ' + ', '.join(columns)
@@ -1045,7 +1063,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
return column.label(None)
else:
return super(MSSQLCompiler, self).\
- label_select_column(select, column, asfrom)
+ label_select_column(select, column, asfrom)
def for_update_clause(self, select):
# "FOR UPDATE" is only allowed on "DECLARE CURSOR" which
@@ -1062,9 +1080,9 @@ class MSSQLCompiler(compiler.SQLCompiler):
return ""
def update_from_clause(self, update_stmt,
- from_table, extra_froms,
- from_hints,
- **kw):
+ from_table, extra_froms,
+ from_hints,
+ **kw):
"""Render the UPDATE..FROM clause specific to MSSQL.
In MSSQL, if the UPDATE statement involves an alias of the table to
@@ -1073,12 +1091,13 @@ class MSSQLCompiler(compiler.SQLCompiler):
"""
return "FROM " + ', '.join(
- t._compiler_dispatch(self, asfrom=True,
- fromhints=from_hints, **kw)
- for t in [from_table] + extra_froms)
+ t._compiler_dispatch(self, asfrom=True,
+ fromhints=from_hints, **kw)
+ for t in [from_table] + extra_froms)
class MSSQLStrictCompiler(MSSQLCompiler):
+
"""A subclass of MSSQLCompiler which disables the usage of bind
parameters where not allowed natively by MS-SQL.
@@ -1091,16 +1110,16 @@ class MSSQLStrictCompiler(MSSQLCompiler):
def visit_in_op_binary(self, binary, operator, **kw):
kw['literal_binds'] = True
return "%s IN %s" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw)
- )
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
def visit_notin_op_binary(self, binary, operator, **kw):
kw['literal_binds'] = True
return "%s NOT IN %s" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw)
- )
+ self.process(binary.left, **kw),
+ self.process(binary.right, **kw)
+ )
def render_literal_value(self, value, type_):
"""
@@ -1119,10 +1138,11 @@ class MSSQLStrictCompiler(MSSQLCompiler):
return "'" + str(value) + "'"
else:
return super(MSSQLStrictCompiler, self).\
- render_literal_value(value, type_)
+ render_literal_value(value, type_)
class MSDDLCompiler(compiler.DDLCompiler):
+
def get_column_specification(self, column, **kwargs):
colspec = (self.preparer.format_column(column) + " "
+ self.dialect.type_compiler.process(column.type))
@@ -1136,17 +1156,19 @@ class MSDDLCompiler(compiler.DDLCompiler):
if column.table is None:
raise exc.CompileError(
- "mssql requires Table-bound columns "
- "in order to generate DDL")
+ "mssql requires Table-bound columns "
+ "in order to generate DDL")
- # install an IDENTITY Sequence if we either a sequence or an implicit IDENTITY column
+ # install an IDENTITY Sequence if we either a sequence or an implicit
+ # IDENTITY column
if isinstance(column.default, sa_schema.Sequence):
if column.default.start == 0:
start = 0
else:
start = column.default.start or 1
- colspec += " IDENTITY(%s,%s)" % (start, column.default.increment or 1)
+ colspec += " IDENTITY(%s,%s)" % (start,
+ column.default.increment or 1)
elif column is column.table._autoincrement_column:
colspec += " IDENTITY(1,1)"
else:
@@ -1169,21 +1191,24 @@ class MSDDLCompiler(compiler.DDLCompiler):
text += "CLUSTERED "
text += "INDEX %s ON %s (%s)" \
- % (
- self._prepared_index_name(index,
- include_schema=include_schema),
- preparer.format_table(index.table),
- ', '.join(
- self.sql_compiler.process(expr,
- include_table=False, literal_binds=True) for
- expr in index.expressions)
- )
+ % (
+ self._prepared_index_name(index,
+ include_schema=include_schema),
+ preparer.format_table(index.table),
+ ', '.join(
+ self.sql_compiler.process(expr,
+ include_table=False,
+ literal_binds=True) for
+ expr in index.expressions)
+ )
# handle other included columns
if index.dialect_options['mssql']['include']:
inclusions = [index.table.c[col]
- if isinstance(col, util.string_types) else col
- for col in index.dialect_options['mssql']['include']]
+ if isinstance(col, util.string_types) else col
+ for col in
+ index.dialect_options['mssql']['include']
+ ]
text += " INCLUDE (%s)" \
% ', '.join([preparer.quote(c.name)
@@ -1195,7 +1220,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
return "\nDROP INDEX %s ON %s" % (
self._prepared_index_name(drop.element, include_schema=False),
self.preparer.format_table(drop.element.table)
- )
+ )
def visit_primary_key_constraint(self, constraint):
if len(constraint) == 0:
@@ -1231,6 +1256,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
text += self.define_constraint_deferrability(constraint)
return text
+
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
@@ -1251,7 +1277,7 @@ def _db_plus_owner_listing(fn):
def wrap(dialect, connection, schema=None, **kw):
dbname, owner = _owner_plus_db(dialect, schema)
return _switch_db(dbname, connection, fn, dialect, connection,
- dbname, owner, schema, **kw)
+ dbname, owner, schema, **kw)
return update_wrapper(wrap, fn)
@@ -1259,7 +1285,7 @@ def _db_plus_owner(fn):
def wrap(dialect, connection, tablename, schema=None, **kw):
dbname, owner = _owner_plus_db(dialect, schema)
return _switch_db(dbname, connection, fn, dialect, connection,
- tablename, dbname, owner, schema, **kw)
+ tablename, dbname, owner, schema, **kw)
return update_wrapper(wrap, fn)
@@ -1334,7 +1360,7 @@ class MSDialect(default.DefaultDialect):
self.use_scope_identity = use_scope_identity
self.max_identifier_length = int(max_identifier_length or 0) or \
- self.max_identifier_length
+ self.max_identifier_length
super(MSDialect, self).__init__(**opts)
def do_savepoint(self, connection, name):
@@ -1359,7 +1385,7 @@ class MSDialect(default.DefaultDialect):
"is configured in the FreeTDS configuration." %
".".join(str(x) for x in self.server_version_info))
if self.server_version_info >= MS_2005_VERSION and \
- 'implicit_returning' not in self.__dict__:
+ 'implicit_returning' not in self.__dict__:
self.implicit_returning = True
if self.server_version_info >= MS_2008_VERSION:
self.supports_multivalues_insert = True
@@ -1395,8 +1421,8 @@ class MSDialect(default.DefaultDialect):
@reflection.cache
def get_schema_names(self, connection, **kw):
s = sql.select([ischema.schemata.c.schema_name],
- order_by=[ischema.schemata.c.schema_name]
- )
+ order_by=[ischema.schemata.c.schema_name]
+ )
schema_names = [r[0] for r in connection.execute(s)]
return schema_names
@@ -1405,10 +1431,10 @@ class MSDialect(default.DefaultDialect):
def get_table_names(self, connection, dbname, owner, schema, **kw):
tables = ischema.tables
s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == owner,
- tables.c.table_type == 'BASE TABLE'
- ),
+ sql.and_(
+ tables.c.table_schema == owner,
+ tables.c.table_type == 'BASE TABLE'
+ ),
order_by=[tables.c.table_name]
)
table_names = [r[0] for r in connection.execute(s)]
@@ -1419,10 +1445,10 @@ class MSDialect(default.DefaultDialect):
def get_view_names(self, connection, dbname, owner, schema, **kw):
tables = ischema.tables
s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == owner,
- tables.c.table_type == 'VIEW'
- ),
+ sql.and_(
+ tables.c.table_schema == owner,
+ tables.c.table_type == 'VIEW'
+ ),
order_by=[tables.c.table_name]
)
view_names = [r[0] for r in connection.execute(s)]
@@ -1438,22 +1464,22 @@ class MSDialect(default.DefaultDialect):
rp = connection.execute(
sql.text("select ind.index_id, ind.is_unique, ind.name "
- "from sys.indexes as ind join sys.tables as tab on "
- "ind.object_id=tab.object_id "
- "join sys.schemas as sch on sch.schema_id=tab.schema_id "
- "where tab.name = :tabname "
- "and sch.name=:schname "
- "and ind.is_primary_key=0",
- bindparams=[
- sql.bindparam('tabname', tablename,
- sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', owner,
- sqltypes.String(convert_unicode=True))
- ],
- typemap={
- 'name': sqltypes.Unicode()
- }
- )
+ "from sys.indexes as ind join sys.tables as tab on "
+ "ind.object_id=tab.object_id "
+ "join sys.schemas as sch on sch.schema_id=tab.schema_id "
+ "where tab.name = :tabname "
+ "and sch.name=:schname "
+ "and ind.is_primary_key=0",
+ bindparams=[
+ sql.bindparam('tabname', tablename,
+ sqltypes.String(convert_unicode=True)),
+ sql.bindparam('schname', owner,
+ sqltypes.String(convert_unicode=True))
+ ],
+ typemap={
+ 'name': sqltypes.Unicode()
+ }
+ )
)
indexes = {}
for row in rp:
@@ -1473,15 +1499,15 @@ class MSDialect(default.DefaultDialect):
"join sys.schemas as sch on sch.schema_id=tab.schema_id "
"where tab.name=:tabname "
"and sch.name=:schname",
- bindparams=[
- sql.bindparam('tabname', tablename,
- sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', owner,
- sqltypes.String(convert_unicode=True))
- ],
- typemap={'name': sqltypes.Unicode()}
- ),
- )
+ bindparams=[
+ sql.bindparam('tabname', tablename,
+ sqltypes.String(convert_unicode=True)),
+ sql.bindparam('schname', owner,
+ sqltypes.String(convert_unicode=True))
+ ],
+ typemap={'name': sqltypes.Unicode()}
+ ),
+ )
for row in rp:
if row['index_id'] in indexes:
indexes[row['index_id']]['column_names'].append(row['name'])
@@ -1490,7 +1516,8 @@ class MSDialect(default.DefaultDialect):
@reflection.cache
@_db_plus_owner
- def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw):
+ def get_view_definition(self, connection, viewname,
+ dbname, owner, schema, **kw):
rp = connection.execute(
sql.text(
"select definition from sys.sql_modules as mod, "
@@ -1502,9 +1529,9 @@ class MSDialect(default.DefaultDialect):
"views.name=:viewname and sch.name=:schname",
bindparams=[
sql.bindparam('viewname', viewname,
- sqltypes.String(convert_unicode=True)),
+ sqltypes.String(convert_unicode=True)),
sql.bindparam('schname', owner,
- sqltypes.String(convert_unicode=True))
+ sqltypes.String(convert_unicode=True))
]
)
)
@@ -1524,7 +1551,7 @@ class MSDialect(default.DefaultDialect):
else:
whereclause = columns.c.table_name == tablename
s = sql.select([columns], whereclause,
- order_by=[columns.c.ordinal_position])
+ order_by=[columns.c.ordinal_position])
c = connection.execute(s)
cols = []
@@ -1594,7 +1621,7 @@ class MSDialect(default.DefaultDialect):
ic = col_name
colmap[col_name]['autoincrement'] = True
colmap[col_name]['sequence'] = dict(
- name='%s_identity' % col_name)
+ name='%s_identity' % col_name)
break
cursor.close()
@@ -1603,7 +1630,7 @@ class MSDialect(default.DefaultDialect):
cursor = connection.execute(
"select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
- )
+ )
row = cursor.first()
if row is not None and row[0] is not None:
@@ -1615,18 +1642,21 @@ class MSDialect(default.DefaultDialect):
@reflection.cache
@_db_plus_owner
- def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw):
+ def get_pk_constraint(self, connection, tablename,
+ dbname, owner, schema, **kw):
pkeys = []
TC = ischema.constraints
C = ischema.key_constraints.alias('C')
# Primary key constraints
- s = sql.select([C.c.column_name, TC.c.constraint_type, C.c.constraint_name],
- sql.and_(TC.c.constraint_name == C.c.constraint_name,
- TC.c.table_schema == C.c.table_schema,
- C.c.table_name == tablename,
- C.c.table_schema == owner)
- )
+ s = sql.select([C.c.column_name,
+ TC.c.constraint_type,
+ C.c.constraint_name],
+ sql.and_(TC.c.constraint_name == C.c.constraint_name,
+ TC.c.table_schema == C.c.table_schema,
+ C.c.table_name == tablename,
+ C.c.table_schema == owner)
+ )
c = connection.execute(s)
constraint_name = None
for row in c:
@@ -1638,7 +1668,8 @@ class MSDialect(default.DefaultDialect):
@reflection.cache
@_db_plus_owner
- def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw):
+ def get_foreign_keys(self, connection, tablename,
+ dbname, owner, schema, **kw):
RR = ischema.ref_constraints
C = ischema.key_constraints.alias('C')
R = ischema.key_constraints.alias('R')
@@ -1653,11 +1684,11 @@ class MSDialect(default.DefaultDialect):
C.c.table_schema == owner,
C.c.constraint_name == RR.c.constraint_name,
R.c.constraint_name ==
- RR.c.unique_constraint_name,
+ RR.c.unique_constraint_name,
C.c.ordinal_position == R.c.ordinal_position
),
order_by=[RR.c.constraint_name, R.c.ordinal_position]
- )
+ )
# group rows by constraint ID, to handle multi-column FKs
fkeys = []
@@ -1687,8 +1718,8 @@ class MSDialect(default.DefaultDialect):
rec['referred_schema'] = rschema
local_cols, remote_cols = \
- rec['constrained_columns'],\
- rec['referred_columns']
+ rec['constrained_columns'],\
+ rec['referred_columns']
local_cols.append(scol)
remote_cols.append(rcol)
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 77251e61a..371a1edcc 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -5,7 +5,8 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-# TODO: should be using the sys. catalog with SQL Server, not information schema
+# TODO: should be using the sys. catalog with SQL Server, not information
+# schema
from ... import Table, MetaData, Column
from ...types import String, Unicode, UnicodeText, Integer, TypeDecorator
@@ -16,6 +17,7 @@ from ...ext.compiler import compiles
ischema = MetaData()
+
class CoerceUnicode(TypeDecorator):
impl = Unicode
@@ -27,10 +29,12 @@ class CoerceUnicode(TypeDecorator):
def bind_expression(self, bindvalue):
return _cast_on_2005(bindvalue)
+
class _cast_on_2005(expression.ColumnElement):
def __init__(self, bindvalue):
self.bindvalue = bindvalue
+
@compiles(_cast_on_2005)
def _compile(element, compiler, **kw):
from . import base
@@ -40,76 +44,93 @@ def _compile(element, compiler, **kw):
return compiler.process(cast(element.bindvalue, Unicode), **kw)
schemata = Table("SCHEMATA", ischema,
- Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
- Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
- Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
- schema="INFORMATION_SCHEMA")
+ Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
+ Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
+ Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
+ schema="INFORMATION_SCHEMA")
tables = Table("TABLES", ischema,
- Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
+ Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode, key="table_name"),
+ Column(
+ "TABLE_TYPE", String(convert_unicode=True),
+ key="table_type"),
+ schema="INFORMATION_SCHEMA")
columns = Table("COLUMNS", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("IS_NULLABLE", Integer, key="is_nullable"),
- Column("DATA_TYPE", String, key="data_type"),
- Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
- Column("CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"),
- Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
- Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
- Column("COLUMN_DEFAULT", Integer, key="column_default"),
- Column("COLLATION_NAME", String, key="collation_name"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode, key="table_name"),
+ Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
+ Column("IS_NULLABLE", Integer, key="is_nullable"),
+ Column("DATA_TYPE", String, key="data_type"),
+ Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
+ Column("CHARACTER_MAXIMUM_LENGTH", Integer,
+ key="character_maximum_length"),
+ Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
+ Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
+ Column("COLUMN_DEFAULT", Integer, key="column_default"),
+ Column("COLLATION_NAME", String, key="collation_name"),
+ schema="INFORMATION_SCHEMA")
constraints = Table("TABLE_CONSTRAINTS", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- Column("CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode, key="table_name"),
+ Column("CONSTRAINT_NAME", CoerceUnicode,
+ key="constraint_name"),
+ Column("CONSTRAINT_TYPE", String(
+ convert_unicode=True), key="constraint_type"),
+ schema="INFORMATION_SCHEMA")
column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_SCHEMA", CoerceUnicode,
+ key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode,
+ key="table_name"),
+ Column("COLUMN_NAME", CoerceUnicode,
+ key="column_name"),
+ Column("CONSTRAINT_NAME", CoerceUnicode,
+ key="constraint_name"),
+ schema="INFORMATION_SCHEMA")
key_constraints = Table("KEY_COLUMN_USAGE", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_SCHEMA", CoerceUnicode,
+ key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode,
+ key="table_name"),
+ Column("COLUMN_NAME", CoerceUnicode,
+ key="column_name"),
+ Column("CONSTRAINT_NAME", CoerceUnicode,
+ key="constraint_name"),
+ Column("ORDINAL_POSITION", Integer,
+ key="ordinal_position"),
+ schema="INFORMATION_SCHEMA")
ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
- Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"),
- Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- # TODO: is CATLOG misspelled ?
- Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
- key="unique_constraint_catalog"),
-
- Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
- key="unique_constraint_schema"),
- Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
- key="unique_constraint_name"),
- Column("MATCH_OPTION", String, key="match_option"),
- Column("UPDATE_RULE", String, key="update_rule"),
- Column("DELETE_RULE", String, key="delete_rule"),
- schema="INFORMATION_SCHEMA")
+ Column("CONSTRAINT_CATALOG", CoerceUnicode,
+ key="constraint_catalog"),
+ Column("CONSTRAINT_SCHEMA", CoerceUnicode,
+ key="constraint_schema"),
+ Column("CONSTRAINT_NAME", CoerceUnicode,
+ key="constraint_name"),
+ # TODO: is CATLOG misspelled ?
+ Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
+ key="unique_constraint_catalog"),
+
+ Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
+ key="unique_constraint_schema"),
+ Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
+ key="unique_constraint_name"),
+ Column("MATCH_OPTION", String, key="match_option"),
+ Column("UPDATE_RULE", String, key="update_rule"),
+ Column("DELETE_RULE", String, key="delete_rule"),
+ schema="INFORMATION_SCHEMA")
views = Table("VIEWS", ischema,
- Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
- Column("CHECK_OPTION", String, key="check_option"),
- Column("IS_UPDATABLE", String, key="is_updatable"),
- schema="INFORMATION_SCHEMA")
+ Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
+ Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
+ Column("TABLE_NAME", CoerceUnicode, key="table_name"),
+ Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
+ Column("CHECK_OPTION", String, key="check_option"),
+ Column("IS_UPDATABLE", String, key="is_updatable"),
+ schema="INFORMATION_SCHEMA")
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ad9e9c2ba..ffe38d8dd 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -47,8 +47,8 @@ from ... import types as sqltypes
from ...connectors.mxodbc import MxODBCConnector
from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc
from .base import (MSDialect,
- MSSQLStrictCompiler,
- _MSDateTime, _MSDate, _MSTime)
+ MSSQLStrictCompiler,
+ _MSDateTime, _MSDate, _MSTime)
class _MSNumeric_mxodbc(_MSNumeric_pyodbc):
@@ -82,7 +82,7 @@ class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
SELECT SCOPE_IDENTITY in cases where OUTPUT clause
does not work (tables with insert triggers).
"""
- #todo - investigate whether the pyodbc execution context
+ # todo - investigate whether the pyodbc execution context
# is really only being used in cases where OUTPUT
# won't work.
@@ -91,7 +91,7 @@ class MSDialect_mxodbc(MxODBCConnector, MSDialect):
# this is only needed if "native ODBC" mode is used,
# which is now disabled by default.
- #statement_compiler = MSSQLStrictCompiler
+ # statement_compiler = MSSQLStrictCompiler
execution_ctx_cls = MSExecutionContext_mxodbc
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index 4b7be1ac4..8f76336ae 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -9,7 +9,8 @@
.. dialect:: mssql+pymssql
:name: pymssql
:dbapi: pymssql
- :connectstring: mssql+pymssql://<username>:<password>@<freetds_name>?charset=utf8
+ :connectstring: mssql+pymssql://<username>:<password>@<freetds_name>?\
+charset=utf8
:url: http://pymssql.org/
pymssql is a Python module that provides a Python DBAPI interface around
@@ -52,7 +53,7 @@ class MSDialect_pymssql(MSDialect):
client_ver = tuple(int(x) for x in module.__version__.split("."))
if client_ver < (1, ):
util.warn("The pymssql dialect expects at least "
- "the 1.0 series of the pymssql DBAPI.")
+ "the 1.0 series of the pymssql DBAPI.")
return module
def __init__(self, **params):
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 31c55f502..1c75fe1ff 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -55,9 +55,9 @@ Examples of pyodbc connection string URLs:
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
-* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a connection
- string that includes a custom
- ODBC driver name. This will create the following connection string::
+* ``mssql+pyodbc://user:pass@host/db?driver=MyDriver`` - connects using a
+ connection string that includes a custom ODBC driver name. This will create
+ the following connection string::
DRIVER={MyDriver};Server=host;Database=db;UID=user;PWD=pass
@@ -85,14 +85,14 @@ Unicode Binds
-------------
The current state of PyODBC on a unix backend with FreeTDS and/or
-EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC
-versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically
-alter how strings are received. The PyODBC dialect attempts to use all the information
-it knows to determine whether or not a Python unicode literal can be
-passed directly to the PyODBC driver or not; while SQLAlchemy can encode
-these to bytestrings first, some users have reported that PyODBC mis-handles
-bytestrings for certain encodings and requires a Python unicode object,
-while the author has observed widespread cases where a Python unicode
+EasySoft is poor regarding unicode; different OS platforms and versions of
+UnixODBC versus IODBC versus FreeTDS/EasySoft versus PyODBC itself
+dramatically alter how strings are received. The PyODBC dialect attempts to
+use all the information it knows to determine whether or not a Python unicode
+literal can be passed directly to the PyODBC driver or not; while SQLAlchemy
+can encode these to bytestrings first, some users have reported that PyODBC
+mis-handles bytestrings for certain encodings and requires a Python unicode
+object, while the author has observed widespread cases where a Python unicode
is completely misinterpreted by PyODBC, particularly when dealing with
the information schema tables used in table reflection, and the value
must first be encoded to a bytestring.
@@ -117,6 +117,7 @@ from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
+
class _ms_numeric_pyodbc(object):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
@@ -129,7 +130,7 @@ class _ms_numeric_pyodbc(object):
def bind_processor(self, dialect):
super_process = super(_ms_numeric_pyodbc, self).\
- bind_processor(dialect)
+ bind_processor(dialect)
if not dialect._need_decimal_fix:
return super_process
@@ -155,38 +156,41 @@ class _ms_numeric_pyodbc(object):
def _small_dec_to_string(self, value):
return "%s0.%s%s" % (
- (value < 0 and '-' or ''),
- '0' * (abs(value.adjusted()) - 1),
- "".join([str(nint) for nint in value.as_tuple()[1]]))
+ (value < 0 and '-' or ''),
+ '0' * (abs(value.adjusted()) - 1),
+ "".join([str(nint) for nint in value.as_tuple()[1]]))
def _large_dec_to_string(self, value):
_int = value.as_tuple()[1]
if 'E' in str(value):
result = "%s%s%s" % (
- (value < 0 and '-' or ''),
- "".join([str(s) for s in _int]),
- "0" * (value.adjusted() - (len(_int) - 1)))
+ (value < 0 and '-' or ''),
+ "".join([str(s) for s in _int]),
+ "0" * (value.adjusted() - (len(_int) - 1)))
else:
if (len(_int) - 1) > value.adjusted():
result = "%s%s.%s" % (
- (value < 0 and '-' or ''),
- "".join(
- [str(s) for s in _int][0:value.adjusted() + 1]),
- "".join(
- [str(s) for s in _int][value.adjusted() + 1:]))
+ (value < 0 and '-' or ''),
+ "".join(
+ [str(s) for s in _int][0:value.adjusted() + 1]),
+ "".join(
+ [str(s) for s in _int][value.adjusted() + 1:]))
else:
result = "%s%s" % (
- (value < 0 and '-' or ''),
- "".join(
- [str(s) for s in _int][0:value.adjusted() + 1]))
+ (value < 0 and '-' or ''),
+ "".join(
+ [str(s) for s in _int][0:value.adjusted() + 1]))
return result
+
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
pass
+
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
pass
+
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
@@ -254,9 +258,9 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
self.description_encoding = params.pop('description_encoding')
super(MSDialect_pyodbc, self).__init__(**params)
self.use_scope_identity = self.use_scope_identity and \
- self.dbapi and \
- hasattr(self.dbapi.Cursor, 'nextset')
+ self.dbapi and \
+ hasattr(self.dbapi.Cursor, 'nextset')
self._need_decimal_fix = self.dbapi and \
- self._dbapi_version() < (2, 1, 8)
+ self._dbapi_version() < (2, 1, 8)
dialect = MSDialect_pyodbc
diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
index 5377be1ce..b23a010e7 100644
--- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
@@ -9,7 +9,8 @@
.. dialect:: mssql+zxjdbc
:name: zxJDBC for Jython
:dbapi: zxjdbc
- :connectstring: mssql+zxjdbc://user:pass@host:port/dbname[?key=value&key=value...]
+ :connectstring: mssql+zxjdbc://user:pass@host:port/dbname\
+[?key=value&key=value...]
:driverurl: http://jtds.sourceforge.net/
@@ -42,12 +43,12 @@ class MSExecutionContext_zxjdbc(MSExecutionContext):
self._lastrowid = int(row[0])
if (self.isinsert or self.isupdate or self.isdelete) and \
- self.compiled.returning:
+ self.compiled.returning:
self._result_proxy = engine.FullyBufferedResultProxy(self)
if self._enable_identity_insert:
table = self.dialect.identifier_preparer.format_table(
- self.compiled.statement.table)
+ self.compiled.statement.table)
self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table)
@@ -59,8 +60,8 @@ class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect):
def _get_server_version_info(self, connection):
return tuple(
- int(x)
- for x in connection.connection.dbversion.split('.')
- )
+ int(x)
+ for x in connection.connection.dbversion.split('.')
+ )
dialect = MSDialect_zxjdbc
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index a9dbd819e..498603cf7 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -6,8 +6,8 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import base, mysqldb, oursql, \
- pyodbc, zxjdbc, mysqlconnector, pymysql,\
- gaerdbms, cymysql
+ pyodbc, zxjdbc, mysqlconnector, pymysql,\
+ gaerdbms, cymysql
# default dialect
base.dialect = mysqldb.dialect
@@ -22,8 +22,10 @@ from .base import \
VARBINARY, VARCHAR, YEAR, dialect
__all__ = (
-'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE',
-'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT',
-'MEDIUMTEXT', 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP',
-'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', 'YEAR', 'dialect'
+ 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME',
+ 'DECIMAL', 'DOUBLE', 'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER',
+ 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT', 'MEDIUMTEXT', 'NCHAR',
+ 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME',
+ 'TIMESTAMP', 'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR',
+ 'YEAR', 'dialect'
)
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index ee5747e39..0c00cf530 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -26,9 +26,9 @@ supported in any given server release.
Connection Timeouts
-------------------
-MySQL features an automatic connection close behavior, for connections that have
-been idle for eight hours or more. To circumvent having this issue, use the
-``pool_recycle`` option which controls the maximum age of any connection::
+MySQL features an automatic connection close behavior, for connections that
+have been idle for eight hours or more. To circumvent having this issue, use
+the ``pool_recycle`` option which controls the maximum age of any connection::
engine = create_engine('mysql+mysqldb://...', pool_recycle=3600)
@@ -38,10 +38,12 @@ CREATE TABLE arguments including Storage Engines
------------------------------------------------
MySQL's CREATE TABLE syntax includes a wide array of special options,
-including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``, ``INSERT_METHOD``, and many more.
+including ``ENGINE``, ``CHARSET``, ``MAX_ROWS``, ``ROW_FORMAT``,
+``INSERT_METHOD``, and many more.
To accommodate the rendering of these arguments, specify the form
``mysql_argument_name="value"``. For example, to specify a table with
-``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8``, and ``KEY_BLOCK_SIZE`` of ``1024``::
+``ENGINE`` of ``InnoDB``, ``CHARSET`` of ``utf8``, and ``KEY_BLOCK_SIZE``
+of ``1024``::
Table('mytable', metadata,
Column('data', String(32)),
@@ -50,26 +52,28 @@ To accommodate the rendering of these arguments, specify the form
mysql_key_block_size="1024"
)
-The MySQL dialect will normally transfer any keyword specified as ``mysql_keyword_name``
-to be rendered as ``KEYWORD_NAME`` in the ``CREATE TABLE`` statement. A handful
-of these names will render with a space instead of an underscore; to support this,
-the MySQL dialect has awareness of these particular names, which include
-``DATA DIRECTORY`` (e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g.
-``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g. ``mysql_index_directory``).
-
-The most common argument is ``mysql_engine``, which refers to the storage engine
-for the table. Historically, MySQL server installations would default
+The MySQL dialect will normally transfer any keyword specified as
+``mysql_keyword_name`` to be rendered as ``KEYWORD_NAME`` in the
+``CREATE TABLE`` statement. A handful of these names will render with a space
+instead of an underscore; to support this, the MySQL dialect has awareness of
+these particular names, which include ``DATA DIRECTORY``
+(e.g. ``mysql_data_directory``), ``CHARACTER SET`` (e.g.
+``mysql_character_set``) and ``INDEX DIRECTORY`` (e.g.
+``mysql_index_directory``).
+
+The most common argument is ``mysql_engine``, which refers to the storage
+engine for the table. Historically, MySQL server installations would default
to ``MyISAM`` for this value, although newer versions may be defaulting
to ``InnoDB``. The ``InnoDB`` engine is typically preferred for its support
of transactions and foreign keys.
A :class:`.Table` that is created in a MySQL database with a storage engine
-of ``MyISAM`` will be essentially non-transactional, meaning any INSERT/UPDATE/DELETE
-statement referring to this table will be invoked as autocommit. It also will have no
-support for foreign key constraints; while the ``CREATE TABLE`` statement
-accepts foreign key options, when using the ``MyISAM`` storage engine these
-arguments are discarded. Reflecting such a table will also produce no
-foreign key constraint information.
+of ``MyISAM`` will be essentially non-transactional, meaning any
+INSERT/UPDATE/DELETE statement referring to this table will be invoked as
+autocommit. It also will have no support for foreign key constraints; while
+the ``CREATE TABLE`` statement accepts foreign key options, when using the
+``MyISAM`` storage engine these arguments are discarded. Reflecting such a
+table will also produce no foreign key constraint information.
For fully atomic transactions as well as support for foreign key
constraints, all participating ``CREATE TABLE`` statements must specify a
@@ -118,7 +122,8 @@ AUTO_INCREMENT Behavior
-----------------------
When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on
-the first :class:`.Integer` primary key column which is not marked as a foreign key::
+the first :class:`.Integer` primary key column which is not marked as a
+foreign key::
>>> t = Table('mytable', metadata,
... Column('mytable_id', Integer, primary_key=True)
@@ -129,10 +134,10 @@ the first :class:`.Integer` primary key column which is not marked as a foreign
PRIMARY KEY (id)
)
-You can disable this behavior by passing ``False`` to the :paramref:`~.Column.autoincrement`
-argument of :class:`.Column`. This flag can also be used to enable
-auto-increment on a secondary column in a multi-column key for some storage
-engines::
+You can disable this behavior by passing ``False`` to the
+:paramref:`~.Column.autoincrement` argument of :class:`.Column`. This flag
+can also be used to enable auto-increment on a secondary column in a
+multi-column key for some storage engines::
Table('mytable', metadata,
Column('gid', Integer, primary_key=True, autoincrement=False),
@@ -184,8 +189,8 @@ usual definition of "number of rows matched by an UPDATE or DELETE" statement.
This is in contradiction to the default setting on most MySQL DBAPI drivers,
which is "number of rows actually modified/deleted". For this reason, the
SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag,
-or whatever is equivalent for the DBAPI in use, on connect, unless the flag value
-is overridden using DBAPI-specific options
+or whatever is equivalent for the DBAPI in use, on connect, unless the flag
+value is overridden using DBAPI-specific options
(such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the
OurSQL driver).
@@ -197,14 +202,14 @@ See also:
CAST Support
------------
-MySQL documents the CAST operator as available in version 4.0.2. When using the
-SQLAlchemy :func:`.cast` function, SQLAlchemy
-will not render the CAST token on MySQL before this version, based on server version
-detection, instead rendering the internal expression directly.
+MySQL documents the CAST operator as available in version 4.0.2. When using
+the SQLAlchemy :func:`.cast` function, SQLAlchemy
+will not render the CAST token on MySQL before this version, based on server
+version detection, instead rendering the internal expression directly.
-CAST may still not be desirable on an early MySQL version post-4.0.2, as it didn't
-add all datatype support until 4.1.1. If your application falls into this
-narrow area, the behavior of CAST can be controlled using the
+CAST may still not be desirable on an early MySQL version post-4.0.2, as it
+didn't add all datatype support until 4.1.1. If your application falls into
+this narrow area, the behavior of CAST can be controlled using the
:ref:`sqlalchemy.ext.compiler_toplevel` system, as per the recipe below::
from sqlalchemy.sql.expression import Cast
@@ -241,7 +246,8 @@ become part of the index. SQLAlchemy provides this feature via the
Index('my_index', my_table.c.data, mysql_length=10)
- Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4, 'b': 9})
+ Index('a_b_idx', my_table.c.a, my_table.c.b, mysql_length={'a': 4,
+ 'b': 9})
Prefix lengths are given in characters for nonbinary string types and in bytes
for binary string types. The value passed to the keyword argument *must* be
@@ -289,10 +295,10 @@ Foreign Key Arguments to Avoid
MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY",
or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with
-:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being
-rendered in a DDL expression, which will then raise an error on MySQL.
-In order to use these keywords on a foreign key while having them ignored
-on a MySQL backend, use a custom compile rule::
+:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of
+these keywords being rendered in a DDL expression, which will then raise an
+error on MySQL. In order to use these keywords on a foreign key while having
+them ignored on a MySQL backend, use a custom compile rule::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import ForeignKeyConstraint
@@ -303,19 +309,20 @@ on a MySQL backend, use a custom compile rule::
return compiler.visit_foreign_key_constraint(element, **kw)
.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores
- the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint`
- and :class:`.ForeignKey`.
+ the ``deferrable`` or ``initially`` keyword arguments of
+ :class:`.ForeignKeyConstraint` and :class:`.ForeignKey`.
The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
-by SQLAlchemy in conjunction with the MySQL backend. This argument is silently
-ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options
-also being ignored by the backend. Therefore MATCH should never be used with the
-MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation
-rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time.
+by SQLAlchemy in conjunction with the MySQL backend. This argument is
+silently ignored by MySQL, but in addition has the effect of ON UPDATE and ON
+DELETE options also being ignored by the backend. Therefore MATCH should
+never be used with the MySQL backend; as is the case with DEFERRABLE and
+INITIALLY, custom compilation rules can be used to correct a MySQL
+ForeignKeyConstraint at DDL definition time.
-.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError`
- when the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
- or :class:`.ForeignKey`.
+.. versionadded:: 0.9.0 - the MySQL backend will raise a
+ :class:`.CompileError` when the ``match`` keyword is used with
+ :class:`.ForeignKeyConstraint` or :class:`.ForeignKey`.
Reflection of Foreign Key Constraints
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -350,7 +357,7 @@ from ...engine import default
from ... import types as sqltypes
from ...util import topological
from ...types import DATE, BOOLEAN, \
- BLOB, BINARY, VARBINARY
+ BLOB, BINARY, VARBINARY
RESERVED_WORDS = set(
['accessible', 'add', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
@@ -364,15 +371,15 @@ RESERVED_WORDS = set(
'deterministic', 'distinct', 'distinctrow', 'div', 'double', 'drop',
'dual', 'each', 'else', 'elseif', 'enclosed', 'escaped', 'exists',
'exit', 'explain', 'false', 'fetch', 'float', 'float4', 'float8',
- 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group', 'having',
- 'high_priority', 'hour_microsecond', 'hour_minute', 'hour_second', 'if',
- 'ignore', 'in', 'index', 'infile', 'inner', 'inout', 'insensitive',
- 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8', 'integer',
- 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys', 'kill',
- 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines', 'load',
- 'localtime', 'localtimestamp', 'lock', 'long', 'longblob', 'longtext',
- 'loop', 'low_priority', 'master_ssl_verify_server_cert', 'match',
- 'mediumblob', 'mediumint', 'mediumtext', 'middleint',
+ 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group',
+ 'having', 'high_priority', 'hour_microsecond', 'hour_minute',
+ 'hour_second', 'if', 'ignore', 'in', 'index', 'infile', 'inner', 'inout',
+ 'insensitive', 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8',
+ 'integer', 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys',
+ 'kill', 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines',
+ 'load', 'localtime', 'localtimestamp', 'lock', 'long', 'longblob',
+ 'longtext', 'loop', 'low_priority', 'master_ssl_verify_server_cert',
+ 'match', 'mediumblob', 'mediumint', 'mediumtext', 'middleint',
'minute_microsecond', 'minute_second', 'mod', 'modifies', 'natural',
'not', 'no_write_to_binlog', 'null', 'numeric', 'on', 'optimize',
'option', 'optionally', 'or', 'order', 'out', 'outer', 'outfile',
@@ -397,9 +404,9 @@ RESERVED_WORDS = set(
'read_only', 'read_write', # 5.1
'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue',
- 'resignal', 'signal', 'slow', # 5.5
+ 'resignal', 'signal', 'slow', # 5.5
- 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot',
+ 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot',
'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6
])
@@ -427,7 +434,8 @@ class _NumericType(object):
def __repr__(self):
return util.generic_repr(self,
- to_inspect=[_NumericType, sqltypes.Numeric])
+ to_inspect=[_NumericType, sqltypes.Numeric])
+
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
@@ -435,16 +443,19 @@ class _FloatType(_NumericType, sqltypes.Float):
(
(precision is None and scale is not None) or
(precision is not None and scale is None)
- ):
+ ):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
- super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
+ super(_FloatType, self).__init__(
+ precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
def __repr__(self):
- return util.generic_repr(self,
- to_inspect=[_FloatType, _NumericType, sqltypes.Float])
+ return util.generic_repr(self, to_inspect=[_FloatType,
+ _NumericType,
+ sqltypes.Float])
+
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
@@ -452,8 +463,10 @@ class _IntegerType(_NumericType, sqltypes.Integer):
super(_IntegerType, self).__init__(**kw)
def __repr__(self):
- return util.generic_repr(self,
- to_inspect=[_IntegerType, _NumericType, sqltypes.Integer])
+ return util.generic_repr(self, to_inspect=[_IntegerType,
+ _NumericType,
+ sqltypes.Integer])
+
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
@@ -474,7 +487,8 @@ class _StringType(sqltypes.String):
def __repr__(self):
return util.generic_repr(self,
- to_inspect=[_StringType, sqltypes.String])
+ to_inspect=[_StringType, sqltypes.String])
+
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
@@ -498,7 +512,7 @@ class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""
super(NUMERIC, self).__init__(precision=precision,
- scale=scale, asdecimal=asdecimal, **kw)
+ scale=scale, asdecimal=asdecimal, **kw)
class DECIMAL(_NumericType, sqltypes.DECIMAL):
@@ -537,10 +551,10 @@ class DOUBLE(_FloatType):
.. note::
The :class:`.DOUBLE` type by default converts from float
- to Decimal, using a truncation that defaults to 10 digits. Specify
- either ``scale=n`` or ``decimal_return_scale=n`` in order to change
- this scale, or ``asdecimal=False`` to return values directly as
- Python floating points.
+ to Decimal, using a truncation that defaults to 10 digits.
+ Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+ to change this scale, or ``asdecimal=False`` to return values
+ directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -570,10 +584,10 @@ class REAL(_FloatType, sqltypes.REAL):
.. note::
The :class:`.REAL` type by default converts from float
- to Decimal, using a truncation that defaults to 10 digits. Specify
- either ``scale=n`` or ``decimal_return_scale=n`` in order to change
- this scale, or ``asdecimal=False`` to return values directly as
- Python floating points.
+ to Decimal, using a truncation that defaults to 10 digits.
+ Specify either ``scale=n`` or ``decimal_return_scale=n`` in order
+ to change this scale, or ``asdecimal=False`` to return values
+ directly as Python floating points.
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -728,9 +742,9 @@ class SMALLINT(_IntegerType, sqltypes.SMALLINT):
class BIT(sqltypes.TypeEngine):
"""MySQL BIT type.
- This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater for
- MyISAM, MEMORY, InnoDB and BDB. For older versions, use a MSTinyInteger()
- type.
+ This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater
+ for MyISAM, MEMORY, InnoDB and BDB. For older versions, use a
+ MSTinyInteger() type.
"""
@@ -1075,11 +1089,12 @@ class CHAR(_StringType, sqltypes.CHAR):
ascii=type_.ascii,
binary=type_.binary,
unicode=type_.unicode,
- national=False # not supported in CAST
+ national=False # not supported in CAST
)
else:
return CHAR(length=type_.length)
+
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
@@ -1149,6 +1164,7 @@ class LONGBLOB(sqltypes._Binary):
__visit_name__ = 'LONGBLOB'
+
class _EnumeratedValues(_StringType):
def _init_values(self, values, kw):
self.quoting = kw.pop('quoting', 'auto')
@@ -1191,6 +1207,7 @@ class _EnumeratedValues(_StringType):
strip_values.append(a)
return strip_values
+
class ENUM(sqltypes.Enum, _EnumeratedValues):
"""MySQL ENUM type."""
@@ -1257,8 +1274,8 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
sqltypes.Enum.__init__(self, *values)
def __repr__(self):
- return util.generic_repr(self,
- to_inspect=[ENUM, _StringType, sqltypes.Enum])
+ return util.generic_repr(
+ self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
def bind_processor(self, dialect):
super_convert = super(ENUM, self).bind_processor(dialect)
@@ -1266,7 +1283,7 @@ class ENUM(sqltypes.Enum, _EnumeratedValues):
def process(value):
if self.strict and value is not None and value not in self.enums:
raise exc.InvalidRequestError('"%s" not a valid value for '
- 'this enum' % value)
+ 'this enum' % value)
if super_convert:
return super_convert(value)
else:
@@ -1360,7 +1377,8 @@ class SET(_EnumeratedValues):
super_convert = super(SET, self).bind_processor(dialect)
def process(value):
- if value is None or isinstance(value, util.int_types + util.string_types):
+ if value is None or isinstance(
+ value, util.int_types + util.string_types):
pass
else:
if None in value:
@@ -1480,11 +1498,11 @@ class MySQLCompiler(compiler.SQLCompiler):
def visit_concat_op_binary(self, binary, operator, **kw):
return "concat(%s, %s)" % (self.process(binary.left),
- self.process(binary.right))
+ self.process(binary.right))
def visit_match_op_binary(self, binary, operator, **kw):
return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % \
- (self.process(binary.left), self.process(binary.right))
+ (self.process(binary.left), self.process(binary.right))
def get_from_hint_text(self, table, text):
return text
@@ -1499,16 +1517,17 @@ class MySQLCompiler(compiler.SQLCompiler):
elif isinstance(type_, sqltypes.TIMESTAMP):
return 'DATETIME'
elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime,
- sqltypes.Date, sqltypes.Time)):
+ sqltypes.Date, sqltypes.Time)):
return self.dialect.type_compiler.process(type_)
- elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)):
+ elif isinstance(type_, sqltypes.String) \
+ and not isinstance(type_, (ENUM, SET)):
adapted = CHAR._adapt_string_for_cast(type_)
return self.dialect.type_compiler.process(adapted)
elif isinstance(type_, sqltypes._Binary):
return 'BINARY'
elif isinstance(type_, sqltypes.NUMERIC):
return self.dialect.type_compiler.process(
- type_).replace('NUMERIC', 'DECIMAL')
+ type_).replace('NUMERIC', 'DECIMAL')
else:
return None
@@ -1569,7 +1588,8 @@ class MySQLCompiler(compiler.SQLCompiler):
# The latter is more readable for offsets but we're stuck with the
# former until we can refine dialects by server revision.
- limit_clause, offset_clause = select._limit_clause, select._offset_clause
+ limit_clause, offset_clause = select._limit_clause, \
+ select._offset_clause
if limit_clause is None and offset_clause is None:
return ''
@@ -1585,12 +1605,12 @@ class MySQLCompiler(compiler.SQLCompiler):
# bound as part of MySQL's "syntax" for OFFSET with
# no LIMIT
return ' \n LIMIT %s, %s' % (
- self.process(offset_clause),
- "18446744073709551615")
+ self.process(offset_clause),
+ "18446744073709551615")
else:
return ' \n LIMIT %s, %s' % (
- self.process(offset_clause),
- self.process(limit_clause))
+ self.process(offset_clause),
+ self.process(limit_clause))
else:
# No offset provided, so just use the limit
return ' \n LIMIT %s' % (self.process(limit_clause),)
@@ -1602,12 +1622,13 @@ class MySQLCompiler(compiler.SQLCompiler):
else:
return None
- def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
+ def update_tables_clause(self, update_stmt, from_table,
+ extra_froms, **kw):
return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw)
- for t in [from_table] + list(extra_froms))
+ for t in [from_table] + list(extra_froms))
def update_from_clause(self, update_stmt, from_table,
- extra_froms, from_hints, **kw):
+ extra_froms, from_hints, **kw):
return None
@@ -1620,11 +1641,12 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
def create_table_constraints(self, table):
"""Get table constraints."""
constraint_string = super(
- MySQLDDLCompiler, self).create_table_constraints(table)
+ MySQLDDLCompiler, self).create_table_constraints(table)
# why self.dialect.name and not 'mysql'? because of drizzle
is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \
- table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb'
+ table.dialect_options[self.dialect.name][
+ 'engine'].lower() == 'innodb'
auto_inc_column = table._autoincrement_column
@@ -1634,11 +1656,11 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
if constraint_string:
constraint_string += ", \n\t"
constraint_string += "KEY %s (%s)" % (
- self.preparer.quote(
- "idx_autoinc_%s" % auto_inc_column.name
- ),
- self.preparer.format_column(auto_inc_column)
- )
+ self.preparer.quote(
+ "idx_autoinc_%s" % auto_inc_column.name
+ ),
+ self.preparer.format_column(auto_inc_column)
+ )
return constraint_string
@@ -1646,7 +1668,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
"""Builds column DDL."""
colspec = [self.preparer.format_column(column),
- self.dialect.type_compiler.process(column.type)
+ self.dialect.type_compiler.process(column.type)
]
default = self.get_column_default_string(column)
@@ -1661,7 +1683,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
colspec.append('NULL')
if column is column.table._autoincrement_column and \
- column.server_default is None:
+ column.server_default is None:
colspec.append('AUTO_INCREMENT')
return ' '.join(colspec)
@@ -1697,7 +1719,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
joiner = '='
if opt in ('TABLESPACE', 'DEFAULT CHARACTER SET',
- 'CHARACTER SET', 'COLLATE', 'PARTITION BY', 'PARTITIONS'):
+ 'CHARACTER SET', 'COLLATE',
+ 'PARTITION BY', 'PARTITIONS'):
joiner = ' '
table_opts.append(joiner.join((opt, arg)))
@@ -1709,8 +1732,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
preparer = self.preparer
table = preparer.format_table(index.table)
columns = [self.sql_compiler.process(expr, include_table=False,
- literal_binds=True)
- for expr in index.expressions]
+ literal_binds=True)
+ for expr in index.expressions]
name = self._prepared_index_name(index)
@@ -1723,8 +1746,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
if length is not None:
if isinstance(length, dict):
- # length value can be a (column_name --> integer value) mapping
- # specifying the prefix length for each column of the index
+ # length value can be a (column_name --> integer value)
+ # mapping specifying the prefix length for each column of the
+ # index
columns = ', '.join(
'%s(%d)' % (expr, length[col.name]) if col.name in length
else
@@ -1763,9 +1787,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
index = drop.element
return "\nDROP INDEX %s ON %s" % (
- self._prepared_index_name(index,
- include_schema=False),
- self.preparer.format_table(index.table))
+ self._prepared_index_name(index,
+ include_schema=False),
+ self.preparer.format_table(index.table))
def visit_drop_constraint(self, drop):
constraint = drop.element
@@ -1782,16 +1806,17 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
qual = ""
const = self.preparer.format_constraint(constraint)
return "ALTER TABLE %s DROP %s%s" % \
- (self.preparer.format_table(constraint.table),
- qual, const)
+ (self.preparer.format_table(constraint.table),
+ qual, const)
def define_constraint_match(self, constraint):
if constraint.match is not None:
raise exc.CompileError(
- "MySQL ignores the 'MATCH' keyword while at the same time "
- "causes ON UPDATE/ON DELETE clauses to be ignored.")
+ "MySQL ignores the 'MATCH' keyword while at the same time "
+ "causes ON UPDATE/ON DELETE clauses to be ignored.")
return ""
+
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def _extend_numeric(self, type_, spec):
"Extend a numeric-type declaration with MySQL specific extensions."
@@ -1845,78 +1870,78 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
return self._extend_numeric(type_, "NUMERIC")
elif type_.scale is None:
return self._extend_numeric(type_,
- "NUMERIC(%(precision)s)" %
- {'precision': type_.precision})
+ "NUMERIC(%(precision)s)" %
+ {'precision': type_.precision})
else:
return self._extend_numeric(type_,
- "NUMERIC(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale': type_.scale})
+ "NUMERIC(%(precision)s, %(scale)s)" %
+ {'precision': type_.precision,
+ 'scale': type_.scale})
def visit_DECIMAL(self, type_):
if type_.precision is None:
return self._extend_numeric(type_, "DECIMAL")
elif type_.scale is None:
return self._extend_numeric(type_,
- "DECIMAL(%(precision)s)" %
- {'precision': type_.precision})
+ "DECIMAL(%(precision)s)" %
+ {'precision': type_.precision})
else:
return self._extend_numeric(type_,
- "DECIMAL(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale': type_.scale})
+ "DECIMAL(%(precision)s, %(scale)s)" %
+ {'precision': type_.precision,
+ 'scale': type_.scale})
def visit_DOUBLE(self, type_):
if type_.precision is not None and type_.scale is not None:
return self._extend_numeric(type_,
- "DOUBLE(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale': type_.scale})
+ "DOUBLE(%(precision)s, %(scale)s)" %
+ {'precision': type_.precision,
+ 'scale': type_.scale})
else:
return self._extend_numeric(type_, 'DOUBLE')
def visit_REAL(self, type_):
if type_.precision is not None and type_.scale is not None:
return self._extend_numeric(type_,
- "REAL(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale': type_.scale})
+ "REAL(%(precision)s, %(scale)s)" %
+ {'precision': type_.precision,
+ 'scale': type_.scale})
else:
return self._extend_numeric(type_, 'REAL')
def visit_FLOAT(self, type_):
if self._mysql_type(type_) and \
- type_.scale is not None and \
- type_.precision is not None:
- return self._extend_numeric(type_,
- "FLOAT(%s, %s)" % (type_.precision, type_.scale))
+ type_.scale is not None and \
+ type_.precision is not None:
+ return self._extend_numeric(
+ type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale))
elif type_.precision is not None:
return self._extend_numeric(type_,
- "FLOAT(%s)" % (type_.precision,))
+ "FLOAT(%s)" % (type_.precision,))
else:
return self._extend_numeric(type_, "FLOAT")
def visit_INTEGER(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "INTEGER(%(display_width)s)" %
- {'display_width': type_.display_width})
+ return self._extend_numeric(
+ type_, "INTEGER(%(display_width)s)" %
+ {'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "INTEGER")
def visit_BIGINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "BIGINT(%(display_width)s)" %
- {'display_width': type_.display_width})
+ return self._extend_numeric(
+ type_, "BIGINT(%(display_width)s)" %
+ {'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "BIGINT")
def visit_MEDIUMINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "MEDIUMINT(%(display_width)s)" %
- {'display_width': type_.display_width})
+ return self._extend_numeric(
+ type_, "MEDIUMINT(%(display_width)s)" %
+ {'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "MEDIUMINT")
@@ -1930,9 +1955,9 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_SMALLINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
return self._extend_numeric(type_,
- "SMALLINT(%(display_width)s)" %
- {'display_width': type_.display_width}
- )
+ "SMALLINT(%(display_width)s)" %
+ {'display_width': type_.display_width}
+ )
else:
return self._extend_numeric(type_, "SMALLINT")
@@ -1986,16 +2011,17 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARCHAR(self, type_):
if type_.length:
- return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
+ return self._extend_string(
+ type_, {}, "VARCHAR(%d)" % type_.length)
else:
raise exc.CompileError(
- "VARCHAR requires a length on dialect %s" %
- self.dialect.name)
+ "VARCHAR requires a length on dialect %s" %
+ self.dialect.name)
def visit_CHAR(self, type_):
if type_.length:
return self._extend_string(type_, {}, "CHAR(%(length)s)" %
- {'length': type_.length})
+ {'length': type_.length})
else:
return self._extend_string(type_, {}, "CHAR")
@@ -2003,19 +2029,21 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
# We'll actually generate the equiv. "NATIONAL VARCHAR" instead
# of "NVARCHAR".
if type_.length:
- return self._extend_string(type_, {'national': True},
- "VARCHAR(%(length)s)" % {'length': type_.length})
+ return self._extend_string(
+ type_, {'national': True},
+ "VARCHAR(%(length)s)" % {'length': type_.length})
else:
raise exc.CompileError(
- "NVARCHAR requires a length on dialect %s" %
- self.dialect.name)
+ "NVARCHAR requires a length on dialect %s" %
+ self.dialect.name)
def visit_NCHAR(self, type_):
# We'll actually generate the equiv.
# "NATIONAL CHAR" instead of "NCHAR".
if type_.length:
- return self._extend_string(type_, {'national': True},
- "CHAR(%(length)s)" % {'length': type_.length})
+ return self._extend_string(
+ type_, {'national': True},
+ "CHAR(%(length)s)" % {'length': type_.length})
else:
return self._extend_string(type_, {'national': True}, "CHAR")
@@ -2051,16 +2079,16 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
for e in enumerated_values:
quoted_enums.append("'%s'" % e.replace("'", "''"))
return self._extend_string(type_, {}, "%s(%s)" % (
- name, ",".join(quoted_enums))
- )
+ name, ",".join(quoted_enums))
+ )
def visit_ENUM(self, type_):
return self._visit_enumerated_values("ENUM", type_,
- type_._enumerated_values)
+ type_._enumerated_values)
def visit_SET(self, type_):
return self._visit_enumerated_values("SET", type_,
- type_._enumerated_values)
+ type_._enumerated_values)
def visit_BOOLEAN(self, type):
return "BOOL"
@@ -2077,9 +2105,9 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
quote = '"'
super(MySQLIdentifierPreparer, self).__init__(
- dialect,
- initial_quote=quote,
- escape_quote=quote)
+ dialect,
+ initial_quote=quote,
+ escape_quote=quote)
def _quote_free_identifiers(self, *ids):
"""Unilaterally identifier-quote any number of strings."""
@@ -2089,7 +2117,9 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
@log.class_logger
class MySQLDialect(default.DefaultDialect):
- """Details of the MySQL dialect. Not used directly in application code."""
+ """Details of the MySQL dialect.
+ Not used directly in application code.
+ """
name = 'mysql'
supports_alter = True
@@ -2148,8 +2178,8 @@ class MySQLDialect(default.DefaultDialect):
else:
return None
- _isolation_lookup = set(['SERIALIZABLE',
- 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
+ _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
+ 'READ COMMITTED', 'REPEATABLE READ'])
def set_isolation_level(self, connection, level):
level = level.replace('_', ' ')
@@ -2158,7 +2188,7 @@ class MySQLDialect(default.DefaultDialect):
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
cursor = connection.cursor()
cursor.execute("SET SESSION TRANSACTION ISOLATION LEVEL %s" % level)
cursor.execute("COMMIT")
@@ -2177,10 +2207,11 @@ class MySQLDialect(default.DefaultDialect):
"""Execute a COMMIT."""
# COMMIT/ROLLBACK were introduced in 3.23.15.
- # Yes, we have at least one user who has to talk to these old versions!
+ # Yes, we have at least one user who has to talk to these old
+ # versions!
#
- # Ignore commit/rollback if support isn't present, otherwise even basic
- # operations via autocommit fail.
+ # Ignore commit/rollback if support isn't present, otherwise even
+ # basic operations via autocommit fail.
try:
dbapi_connection.commit()
except:
@@ -2226,9 +2257,10 @@ class MySQLDialect(default.DefaultDialect):
return [row['data'][0:row['gtrid_length']] for row in resultset]
def is_disconnect(self, e, connection, cursor):
- if isinstance(e, (self.dbapi.OperationalError, self.dbapi.ProgrammingError)):
+ if isinstance(e, (self.dbapi.OperationalError,
+ self.dbapi.ProgrammingError)):
return self._extract_error_code(e) in \
- (2006, 2013, 2014, 2045, 2055)
+ (2006, 2013, 2014, 2045, 2055)
elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
@@ -2296,15 +2328,15 @@ class MySQLDialect(default.DefaultDialect):
if self._server_ansiquotes:
# if ansiquotes == True, build a new IdentifierPreparer
# with the new setting
- self.identifier_preparer = self.preparer(self,
- server_ansiquotes=self._server_ansiquotes)
+ self.identifier_preparer = self.preparer(
+ self, server_ansiquotes=self._server_ansiquotes)
default.DefaultDialect.initialize(self, connection)
@property
def _supports_cast(self):
return self.server_version_info is None or \
- self.server_version_info >= (4, 0, 2)
+ self.server_version_info >= (4, 0, 2)
@reflection.cache
def get_schema_names(self, connection, **kw):
@@ -2321,17 +2353,19 @@ class MySQLDialect(default.DefaultDialect):
charset = self._connection_charset
if self.server_version_info < (5, 0, 2):
- rp = connection.execute("SHOW TABLES FROM %s" %
+ rp = connection.execute(
+ "SHOW TABLES FROM %s" %
self.identifier_preparer.quote_identifier(current_schema))
return [row[0] for
- row in self._compat_fetchall(rp, charset=charset)]
+ row in self._compat_fetchall(rp, charset=charset)]
else:
- rp = connection.execute("SHOW FULL TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(current_schema))
+ rp = connection.execute(
+ "SHOW FULL TABLES FROM %s" %
+ self.identifier_preparer.quote_identifier(current_schema))
return [row[0]
- for row in self._compat_fetchall(rp, charset=charset)
- if row[1] == 'BASE TABLE']
+ for row in self._compat_fetchall(rp, charset=charset)
+ if row[1] == 'BASE TABLE']
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
@@ -2342,29 +2376,30 @@ class MySQLDialect(default.DefaultDialect):
if self.server_version_info < (5, 0, 2):
return self.get_table_names(connection, schema)
charset = self._connection_charset
- rp = connection.execute("SHOW FULL TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(schema))
+ rp = connection.execute(
+ "SHOW FULL TABLES FROM %s" %
+ self.identifier_preparer.quote_identifier(schema))
return [row[0]
- for row in self._compat_fetchall(rp, charset=charset)
- if row[1] in ('VIEW', 'SYSTEM VIEW')]
+ for row in self._compat_fetchall(rp, charset=charset)
+ if row[1] in ('VIEW', 'SYSTEM VIEW')]
@reflection.cache
def get_table_options(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(
- connection, table_name, schema, **kw)
+ connection, table_name, schema, **kw)
return parsed_state.table_options
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(
- connection, table_name, schema, **kw)
+ connection, table_name, schema, **kw)
return parsed_state.columns
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(
- connection, table_name, schema, **kw)
+ connection, table_name, schema, **kw)
for key in parsed_state.keys:
if key['type'] == 'PRIMARY':
# There can be only one.
@@ -2376,7 +2411,7 @@ class MySQLDialect(default.DefaultDialect):
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(
- connection, table_name, schema, **kw)
+ connection, table_name, schema, **kw)
default_schema = None
fkeys = []
@@ -2384,7 +2419,8 @@ class MySQLDialect(default.DefaultDialect):
for spec in parsed_state.constraints:
# only FOREIGN KEYs
ref_name = spec['table'][-1]
- ref_schema = len(spec['table']) > 1 and spec['table'][-2] or schema
+ ref_schema = len(spec['table']) > 1 and \
+ spec['table'][-2] or schema
if not ref_schema:
if default_schema is None:
@@ -2416,7 +2452,7 @@ class MySQLDialect(default.DefaultDialect):
def get_indexes(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(
- connection, table_name, schema, **kw)
+ connection, table_name, schema, **kw)
indexes = []
for spec in parsed_state.keys:
@@ -2466,13 +2502,13 @@ class MySQLDialect(default.DefaultDialect):
return sql
def _parsed_state_or_create(self, connection, table_name,
- schema=None, **kw):
+ schema=None, **kw):
return self._setup_parser(
- connection,
- table_name,
- schema,
- info_cache=kw.get('info_cache', None)
- )
+ connection,
+ table_name,
+ schema,
+ info_cache=kw.get('info_cache', None)
+ )
@util.memoized_property
def _tabledef_parser(self):
@@ -2519,7 +2555,7 @@ class MySQLDialect(default.DefaultDialect):
charset = self._connection_charset
row = self._compat_first(connection.execute(
"SHOW VARIABLES LIKE 'lower_case_table_names'"),
- charset=charset)
+ charset=charset)
if not row:
cs = 0
else:
@@ -2554,7 +2590,7 @@ class MySQLDialect(default.DefaultDialect):
row = self._compat_first(
connection.execute("SHOW VARIABLES LIKE 'sql_mode'"),
- charset=self._connection_charset)
+ charset=self._connection_charset)
if not row:
mode = ''
@@ -2570,7 +2606,6 @@ class MySQLDialect(default.DefaultDialect):
# as of MySQL 5.0.1
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
-
def _show_create_table(self, connection, table, charset=None,
full_name=None):
"""Run SHOW CREATE TABLE for a ``Table``."""
@@ -2595,7 +2630,7 @@ class MySQLDialect(default.DefaultDialect):
return sql
def _describe_table(self, connection, table, charset=None,
- full_name=None):
+ full_name=None):
"""Run DESCRIBE for a ``Table`` and return processed rows."""
if full_name is None:
@@ -2687,7 +2722,7 @@ class MySQLTableDefinitionParser(object):
if m:
spec = m.groupdict()
spec['table'] = \
- self.preparer.unformat_identifiers(spec['table'])
+ self.preparer.unformat_identifiers(spec['table'])
spec['local'] = [c[0]
for c in self._parse_keyexprs(spec['local'])]
spec['foreign'] = [c[0]
@@ -2768,7 +2803,7 @@ class MySQLTableDefinitionParser(object):
util.warn("Incomplete reflection of column definition %r" % line)
name, type_, args, notnull = \
- spec['name'], spec['coltype'], spec['arg'], spec['notnull']
+ spec['name'], spec['coltype'], spec['arg'], spec['notnull']
try:
col_type = self.dialect.ischema_names[type_]
@@ -2838,7 +2873,7 @@ class MySQLTableDefinitionParser(object):
buffer = []
for row in columns:
(name, col_type, nullable, default, extra) = \
- [row[i] for i in (0, 1, 2, 4, 5)]
+ [row[i] for i in (0, 1, 2, 4, 5)]
line = [' ']
line.append(self.preparer.quote_identifier(name))
@@ -2917,15 +2952,15 @@ class MySQLTableDefinitionParser(object):
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
r'(?P<coltype>\w+)'
r'(?:\((?P<arg>(?:\d+|\d+,\d+|'
- r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
+ r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
r'(?: +(?P<unsigned>UNSIGNED))?'
r'(?: +(?P<zerofill>ZEROFILL))?'
r'(?: +CHARACTER SET +(?P<charset>[\w_]+))?'
r'(?: +COLLATE +(?P<collate>[\w_]+))?'
r'(?: +(?P<notnull>NOT NULL))?'
r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
- r'(?: +ON UPDATE \w+)?)'
+ r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
+ r'(?: +ON UPDATE \w+)?)'
r'))?'
r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
@@ -2934,7 +2969,7 @@ class MySQLTableDefinitionParser(object):
r'(?: +(?P<extra>.*))?'
r',?$'
% quotes
- )
+ )
# Fallback, try to parse as little as possible
self._re_column_loose = _re_compile(
@@ -2944,7 +2979,7 @@ class MySQLTableDefinitionParser(object):
r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
r'.*?(?P<notnull>NOT NULL)?'
% quotes
- )
+ )
# (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
# (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
@@ -2960,7 +2995,7 @@ class MySQLTableDefinitionParser(object):
r'(?: +WITH PARSER +(?P<parser>\S+))?'
r',?$'
% quotes
- )
+ )
# CONSTRAINT `name` FOREIGN KEY (`local_col`)
# REFERENCES `remote` (`remote_col`)
@@ -2976,13 +3011,14 @@ class MySQLTableDefinitionParser(object):
r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
r'FOREIGN KEY +'
r'\((?P<local>[^\)]+?)\) REFERENCES +'
- r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
+ r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s'
+ r'(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
r'\((?P<foreign>[^\)]+?)\)'
r'(?: +(?P<match>MATCH \w+))?'
r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
% kw
- )
+ )
# PARTITION
#
@@ -3005,8 +3041,9 @@ class MySQLTableDefinitionParser(object):
self._add_option_regex('UNION', r'\([^\)]+\)')
self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
- self._add_option_regex('RAID_TYPE',
- r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
+ self._add_option_regex(
+ 'RAID_TYPE',
+ r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
_optional_equals = r'(?:\s*(?:=\s*)|\s+)'
@@ -3014,8 +3051,9 @@ class MySQLTableDefinitionParser(object):
regex = (r'(?P<directive>%s)%s'
r"'(?P<val>(?:[^']|'')*?)'(?!')" %
(re.escape(directive), self._optional_equals))
- self._pr_options.append(_pr_compile(regex, lambda v:
- v.replace("\\\\", "\\").replace("''", "'")))
+ self._pr_options.append(_pr_compile(
+ regex, lambda v: v.replace("\\\\", "\\").replace("''", "'")
+ ))
def _add_option_word(self, directive):
regex = (r'(?P<directive>%s)%s'
@@ -3033,7 +3071,6 @@ _options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
'PASSWORD', 'CONNECTION')
-
class _DecodingRowProxy(object):
"""Return unicode-decoded values based on type inspection.
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index c9f82a0bd..51b63044e 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -10,7 +10,8 @@
.. dialect:: mysql+cymysql
:name: CyMySQL
:dbapi: cymysql
- :connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>[?<options>]
+ :connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>\
+[?<options>]
:url: https://github.com/nakagami/CyMySQL
"""
@@ -20,6 +21,7 @@ from .mysqldb import MySQLDialect_mysqldb
from .base import (BIT, MySQLDialect)
from ... import util
+
class _cymysqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a long.
@@ -74,7 +76,7 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.OperationalError):
return self._extract_error_code(e) in \
- (2006, 2013, 2014, 2045, 2055)
+ (2006, 2013, 2014, 2045, 2055)
elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index 6f231198d..0059f5a65 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -9,10 +9,11 @@
:name: Google Cloud SQL
:dbapi: rdbms
:connectstring: mysql+gaerdbms:///<dbname>?instance=<instancename>
- :url: https://developers.google.com/appengine/docs/python/cloud-sql/developers-guide
+ :url: https://developers.google.com/appengine/docs/python/cloud-sql/\
+developers-guide
- This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with minimal
- changes.
+ This dialect is based primarily on the :mod:`.mysql.mysqldb` dialect with
+ minimal changes.
.. versionadded:: 0.7.8
@@ -45,7 +46,7 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
# from django:
# http://code.google.com/p/googleappengine/source/
# browse/trunk/python/google/storage/speckle/
- # python/django/backend/base.py#118
+ # python/django/backend/base.py#118
# see also [ticket:2649]
# see also http://stackoverflow.com/q/14224679/34549
from google.appengine.api import apiproxy_stub_map
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index 91223e270..e51e80005 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -9,15 +9,16 @@
.. dialect:: mysql+mysqlconnector
:name: MySQL Connector/Python
:dbapi: myconnpy
- :connectstring: mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
+ :connectstring: mysql+mysqlconnector://<user>:<password>@\
+<host>[:<port>]/<dbname>
:url: http://dev.mysql.com/downloads/connector/python/
"""
-from .base import (MySQLDialect,
- MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer,
- BIT)
+from .base import (MySQLDialect, MySQLExecutionContext,
+ MySQLCompiler, MySQLIdentifierPreparer,
+ BIT)
from ... import util
@@ -31,7 +32,7 @@ class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ self.process(binary.right, **kw)
def post_process_text(self, text):
return text.replace('%', '%%')
@@ -98,7 +99,8 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
- client_flags = opts.get('client_flags', ClientFlag.get_default())
+ client_flags = opts.get(
+ 'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except:
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 8ee367a07..73210d67a 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -42,7 +42,7 @@ It is strongly advised to use the latest version of MySQL-Python.
"""
from .base import (MySQLDialect, MySQLExecutionContext,
- MySQLCompiler, MySQLIdentifierPreparer)
+ MySQLCompiler, MySQLIdentifierPreparer)
from .base import TEXT
from ... import sql
from ... import util
@@ -58,14 +58,16 @@ class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
else:
return self.cursor.rowcount
+
class MySQLCompiler_mysqldb(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ self.process(binary.right, **kw)
def post_process_text(self, text):
return text.replace('%', '%%')
+
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
@@ -86,7 +88,6 @@ class MySQLDialect_mysqldb(MySQLDialect):
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
-
@classmethod
def dbapi(cls):
return __import__('MySQLdb')
@@ -102,23 +103,22 @@ class MySQLDialect_mysqldb(MySQLDialect):
# specific issue w/ the utf8_bin collation and unicode returns
has_utf8_bin = connection.scalar(
- "show collation where %s = 'utf8' and %s = 'utf8_bin'"
- % (
- self.identifier_preparer.quote("Charset"),
- self.identifier_preparer.quote("Collation")
- ))
+ "show collation where %s = 'utf8' and %s = 'utf8_bin'"
+ % (
+ self.identifier_preparer.quote("Charset"),
+ self.identifier_preparer.quote("Collation")
+ ))
if has_utf8_bin:
additional_tests = [
sql.collate(sql.cast(
- sql.literal_column(
+ sql.literal_column(
"'test collated returns'"),
- TEXT(charset='utf8')), "utf8_bin")
+ TEXT(charset='utf8')), "utf8_bin")
]
else:
additional_tests = []
return super(MySQLDialect_mysqldb, self)._check_unicode_returns(
- connection, additional_tests)
-
+ connection, additional_tests)
def create_connect_args(self, url):
opts = url.translate_connect_args(database='db', username='user',
@@ -130,9 +130,9 @@ class MySQLDialect_mysqldb(MySQLDialect):
util.coerce_kw_type(opts, 'read_timeout', int)
util.coerce_kw_type(opts, 'client_flag', int)
util.coerce_kw_type(opts, 'local_infile', int)
- # Note: using either of the below will cause all strings to be returned
- # as Unicode, both in raw SQL operations and with column types like
- # String and MSString.
+ # Note: using either of the below will cause all strings to be
+ # returned as Unicode, both in raw SQL operations and with column
+ # types like String and MSString.
util.coerce_kw_type(opts, 'use_unicode', bool)
util.coerce_kw_type(opts, 'charset', str)
@@ -155,8 +155,8 @@ class MySQLDialect_mysqldb(MySQLDialect):
if self.dbapi is not None:
try:
CLIENT_FLAGS = __import__(
- self.dbapi.__name__ + '.constants.CLIENT'
- ).constants.CLIENT
+ self.dbapi.__name__ + '.constants.CLIENT'
+ ).constants.CLIENT
client_flag |= CLIENT_FLAGS.FOUND_ROWS
except (AttributeError, ImportError):
self.supports_sane_rowcount = False
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index 12136514c..fa127f3b0 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -80,7 +80,8 @@ class MySQLDialect_oursql(MySQLDialect):
return __import__('oursql')
def do_execute(self, cursor, statement, parameters, context=None):
- """Provide an implementation of *cursor.execute(statement, parameters)*."""
+ """Provide an implementation of
+ *cursor.execute(statement, parameters)*."""
if context and context.plain_query:
cursor.execute(statement, plain_query=True)
@@ -95,9 +96,11 @@ class MySQLDialect_oursql(MySQLDialect):
arg = connection.connection._escape_string(xid)
else:
charset = self._connection_charset
- arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
+ arg = connection.connection._escape_string(
+ xid.encode(charset)).decode(charset)
arg = "'%s'" % arg
- connection.execution_options(_oursql_plain_query=True).execute(query % arg)
+ connection.execution_options(
+ _oursql_plain_query=True).execute(query % arg)
# Because mysql is bad, these methods have to be
# reimplemented to use _PlainQuery. Basically, some queries
@@ -127,10 +130,10 @@ class MySQLDialect_oursql(MySQLDialect):
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
return MySQLDialect.has_table(
- self,
- connection.connect().execution_options(_oursql_plain_query=True),
- table_name,
- schema
+ self,
+ connection.connect().execution_options(_oursql_plain_query=True),
+ table_name,
+ schema
)
def get_table_options(self, connection, table_name, schema=None, **kw):
@@ -190,7 +193,8 @@ class MySQLDialect_oursql(MySQLDialect):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
- return e.errno is None and 'cursor' not in e.args[1] and e.args[1].endswith('closed')
+ return e.errno is None and 'cursor' not in e.args[1] \
+ and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
@@ -218,7 +222,7 @@ class MySQLDialect_oursql(MySQLDialect):
ssl = {}
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
- 'ssl_capath', 'ssl_cipher']:
+ 'ssl_capath', 'ssl_cipher']:
if key in opts:
ssl[key[4:]] = opts[key]
util.coerce_kw_type(ssl, key[4:], str)
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index b05c22295..31226cea0 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -10,21 +10,23 @@
.. dialect:: mysql+pymysql
:name: PyMySQL
:dbapi: pymysql
- :connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>[?<options>]
+ :connectstring: mysql+pymysql://<username>:<password>@<host>/<dbname>\
+[?<options>]
:url: http://code.google.com/p/pymysql/
MySQL-Python Compatibility
--------------------------
The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
-and targets 100% compatibility. Most behavioral notes for MySQL-python apply to
-the pymysql driver as well.
+and targets 100% compatibility. Most behavioral notes for MySQL-python apply
+to the pymysql driver as well.
"""
from .mysqldb import MySQLDialect_mysqldb
from ...util import py3k
+
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = 'pymysql'
@@ -32,7 +34,6 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb):
if py3k:
supports_unicode_statements = True
-
@classmethod
def dbapi(cls):
return __import__('pymysql')
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index 8b6821643..58e8b30fe 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -67,7 +67,8 @@ class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
if opts.get(key, None):
return opts[key]
- util.warn("Could not detect the connection character set. Assuming latin1.")
+ util.warn("Could not detect the connection character set. "
+ "Assuming latin1.")
return 'latin1'
def _extract_error_code(self, exception):
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 17e062770..0cf92cd13 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -10,7 +10,8 @@
.. dialect:: mysql+zxjdbc
:name: zxjdbc for Jython
:dbapi: zxjdbc
- :connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/<database>
+ :connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/\
+<database>
:driverurl: http://dev.mysql.com/downloads/connector/j/
Character Sets
@@ -83,7 +84,8 @@ class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
if opts.get(key, None):
return opts[key]
- util.warn("Could not detect the connection character set. Assuming latin1.")
+ util.warn("Could not detect the connection character set. "
+ "Assuming latin1.")
return 'latin1'
def _driver_kwargs(self):
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index 4e57e3cee..fd32f2235 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -17,8 +17,8 @@ from sqlalchemy.dialects.oracle.base import \
__all__ = (
-'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER',
-'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
-'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
-'VARCHAR2', 'NVARCHAR2', 'ROWID'
+ 'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'NUMBER',
+ 'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
+ 'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
+ 'VARCHAR2', 'NVARCHAR2', 'ROWID'
)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 781fc601f..40ba051f7 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -9,31 +9,37 @@
.. dialect:: oracle
:name: Oracle
- Oracle version 8 through current (11g at the time of this writing) are supported.
+ Oracle version 8 through current (11g at the time of this writing) are
+ supported.
Connect Arguments
-----------------
-The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
-affect the behavior of the dialect regardless of driver in use.
+The dialect supports several :func:`~sqlalchemy.create_engine()` arguments
+which affect the behavior of the dialect regardless of driver in use.
-* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
- to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
+* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8).
+ Defaults to ``True``. If ``False``, Oracle-8 compatible constructs are used
+ for joins.
-* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET.
+* ``optimize_limits`` - defaults to ``False``. see the section on
+ LIMIT/OFFSET.
-* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET.
+* ``use_binds_for_limits`` - defaults to ``True``. see the section on
+ LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
-SQLAlchemy Table objects which include integer primary keys are usually assumed to have
-"autoincrementing" behavior, meaning they can generate their own primary key values upon
-INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences
-to produce these values. With the Oracle dialect, *a sequence must always be explicitly
-specified to enable autoincrement*. This is divergent with the majority of documentation
-examples which assume the usage of an autoincrement-capable database. To specify sequences,
-use the sqlalchemy.schema.Sequence object which is passed to a Column construct::
+SQLAlchemy Table objects which include integer primary keys are usually
+assumed to have "autoincrementing" behavior, meaning they can generate their
+own primary key values upon INSERT. Since Oracle has no "autoincrement"
+feature, SQLAlchemy relies upon sequences to produce these values. With the
+Oracle dialect, *a sequence must always be explicitly specified to enable
+autoincrement*. This is divergent with the majority of documentation
+examples which assume the usage of an autoincrement-capable database. To
+specify sequences, use the sqlalchemy.schema.Sequence object which is passed
+to a Column construct::
t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
@@ -50,15 +56,16 @@ This step is also required when using table reflection, i.e. autoload=True::
Identifier Casing
-----------------
-In Oracle, the data dictionary represents all case insensitive identifier names
-using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier
-name to be case insensitive. The Oracle dialect converts all case insensitive identifiers
-to and from those two formats during schema level communication, such as reflection of
-tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a
-case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches
-against data dictionary data received from Oracle, so unless identifier names have been
-truly created as case sensitive (i.e. using quoted names), all lowercase names should be
-used on the SQLAlchemy side.
+In Oracle, the data dictionary represents all case insensitive identifier
+names using UPPERCASE text. SQLAlchemy on the other hand considers an
+all-lower case identifier name to be case insensitive. The Oracle dialect
+converts all case insensitive identifiers to and from those two formats during
+schema level communication, such as reflection of tables and indexes. Using
+an UPPERCASE name on the SQLAlchemy side indicates a case sensitive
+identifier, and SQLAlchemy will quote the name - this will cause mismatches
+against data dictionary data received from Oracle, so unless identifier names
+have been truly created as case sensitive (i.e. using quoted names), all
+lowercase names should be used on the SQLAlchemy side.
LIMIT/OFFSET Support
@@ -71,44 +78,49 @@ http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html .
There are two options which affect its behavior:
-* the "FIRST ROWS()" optimization keyword is not used by default. To enable the usage of this
- optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`.
-* the values passed for the limit/offset are sent as bound parameters. Some users have observed
- that Oracle produces a poor query plan when the values are sent as binds and not
- rendered literally. To render the limit/offset values literally within the SQL
- statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`.
-
-Some users have reported better performance when the entirely different approach of a
-window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note
-that the majority of users don't observe this). To suit this case the
-method used for LIMIT/OFFSET can be replaced entirely. See the recipe at
+* the "FIRST ROWS()" optimization keyword is not used by default. To enable
+ the usage of this optimization directive, specify ``optimize_limits=True``
+ to :func:`.create_engine`.
+* the values passed for the limit/offset are sent as bound parameters. Some
+ users have observed that Oracle produces a poor query plan when the values
+ are sent as binds and not rendered literally. To render the limit/offset
+ values literally within the SQL statement, specify
+ ``use_binds_for_limits=False`` to :func:`.create_engine`.
+
+Some users have reported better performance when the entirely different
+approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to
+provide LIMIT/OFFSET (note that the majority of users don't observe this).
+To suit this case the method used for LIMIT/OFFSET can be replaced entirely.
+See the recipe at
http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
-which installs a select compiler that overrides the generation of limit/offset with
-a window function.
+which installs a select compiler that overrides the generation of limit/offset
+with a window function.
.. _oracle_returning:
RETURNING Support
-----------------
-The Oracle database supports a limited form of RETURNING, in order to retrieve result
-sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's
-RETURNING..INTO syntax only supports one row being returned, as it relies upon
-OUT parameters in order to function. In addition, supported DBAPIs have further
-limitations (see :ref:`cx_oracle_returning`).
+The Oracle database supports a limited form of RETURNING, in order to retrieve
+result sets of matched rows from INSERT, UPDATE and DELETE statements.
+Oracle's RETURNING..INTO syntax only supports one row being returned, as it
+relies upon OUT parameters in order to function. In addition, supported
+DBAPIs have further limitations (see :ref:`cx_oracle_returning`).
-SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT
-and sometimes an UPDATE statement in order to fetch newly generated primary key values
-and other SQL defaults and expressions, is normally enabled on the Oracle
-backend. By default, "implicit returning" typically only fetches the value of a
-single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment
-a sequence within an INSERT statement and get the value back at the same time.
-To disable this feature across the board, specify ``implicit_returning=False`` to
-:func:`.create_engine`::
+SQLAlchemy's "implicit returning" feature, which employs RETURNING within an
+INSERT and sometimes an UPDATE statement in order to fetch newly generated
+primary key values and other SQL defaults and expressions, is normally enabled
+on the Oracle backend. By default, "implicit returning" typically only
+fetches the value of a single ``nextval(some_seq)`` expression embedded into
+an INSERT in order to increment a sequence within an INSERT statement and get
+the value back at the same time. To disable this feature across the board,
+specify ``implicit_returning=False`` to :func:`.create_engine`::
- engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
+ engine = create_engine("oracle://scott:tiger@dsn",
+ implicit_returning=False)
-Implicit returning can also be disabled on a table-by-table basis as a table option::
+Implicit returning can also be disabled on a table-by-table basis as a table
+option::
# Core Table
my_table = Table("my_table", metadata, ..., implicit_returning=False)
@@ -121,13 +133,15 @@ Implicit returning can also be disabled on a table-by-table basis as a table opt
.. seealso::
- :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning.
+ :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on
+ implicit returning.
ON UPDATE CASCADE
-----------------
-Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution
-is available at http://asktom.oracle.com/tkyte/update_cascade/index.html .
+Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based
+solution is available at
+http://asktom.oracle.com/tkyte/update_cascade/index.html .
When using the SQLAlchemy ORM, the ORM has limited ability to manually issue
cascading updates - specify ForeignKey objects using the
@@ -137,29 +151,32 @@ and specify "passive_updates=False" on each relationship().
Oracle 8 Compatibility
----------------------
-When Oracle 8 is detected, the dialect internally configures itself to the following
-behaviors:
+When Oracle 8 is detected, the dialect internally configures itself to the
+following behaviors:
* the use_ansi flag is set to False. This has the effect of converting all
JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN
makes use of Oracle's (+) operator.
* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
- the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued
- instead. This because these types don't seem to work correctly on Oracle 8
- even though they are available. The :class:`~sqlalchemy.types.NVARCHAR`
- and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB.
+ the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are
+ issued instead. This because these types don't seem to work correctly on
+ Oracle 8 even though they are available. The
+ :class:`~sqlalchemy.types.NVARCHAR` and
+ :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate
+ NVARCHAR2 and NCLOB.
* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
- encodes all Python unicode objects to "string" before passing in as bind parameters.
+ encodes all Python unicode objects to "string" before passing in as bind
+ parameters.
Synonym/DBLINK Reflection
-------------------------
-When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms, either in local or remote schemas or accessed over DBLINK,
-by passing the flag ``oracle_resolve_synonyms=True`` as a
-keyword argument to the :class:`.Table` construct::
+When using reflection with Table objects, the dialect can optionally search
+for tables indicated by synonyms, either in local or remote schemas or
+accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as
+a keyword argument to the :class:`.Table` construct::
some_table = Table('some_table', autoload=True,
autoload_with=some_engine,
@@ -167,9 +184,10 @@ keyword argument to the :class:`.Table` construct::
When this flag is set, the given name (such as ``some_table`` above) will
be searched not just in the ``ALL_TABLES`` view, but also within the
-``ALL_SYNONYMS`` view to see if this name is actually a synonym to another name.
-If the synonym is located and refers to a DBLINK, the oracle dialect knows
-how to locate the table's information using DBLINK syntax (e.g. ``@dblink``).
+``ALL_SYNONYMS`` view to see if this name is actually a synonym to another
+name. If the synonym is located and refers to a DBLINK, the oracle dialect
+knows how to locate the table's information using DBLINK syntax(e.g.
+``@dblink``).
``oracle_resolve_synonyms`` is accepted wherever reflection arguments are
accepted, including methods such as :meth:`.MetaData.reflect` and
@@ -202,24 +220,25 @@ import re
from sqlalchemy import util, sql
from sqlalchemy.engine import default, base, reflection
from sqlalchemy.sql import compiler, visitors, expression
-from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
+from sqlalchemy.sql import (operators as sql_operators,
+ functions as sql_functions)
from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
- BLOB, CLOB, TIMESTAMP, FLOAT
+ BLOB, CLOB, TIMESTAMP, FLOAT
RESERVED_WORDS = \
- set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '\
- 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '\
- 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '\
- 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '\
- 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '\
- 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '\
- 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '\
- 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '\
+ set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '
+ 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '
+ 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '
+ 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '
+ 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '
+ 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '
+ 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '
+ 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '
'DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL'.split())
NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER '
- 'CURRENT_TIME CURRENT_TIMESTAMP'.split())
+ 'CURRENT_TIME CURRENT_TIMESTAMP'.split())
class RAW(sqltypes._Binary):
@@ -244,7 +263,8 @@ class NUMBER(sqltypes.Numeric, sqltypes.Integer):
if asdecimal is None:
asdecimal = bool(scale and scale > 0)
- super(NUMBER, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
+ super(NUMBER, self).__init__(
+ precision=precision, scale=scale, asdecimal=asdecimal)
def adapt(self, impltype):
ret = super(NUMBER, self).adapt(impltype)
@@ -267,7 +287,8 @@ class DOUBLE_PRECISION(sqltypes.Numeric):
if asdecimal is None:
asdecimal = False
- super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
+ super(DOUBLE_PRECISION, self).__init__(
+ precision=precision, scale=scale, asdecimal=asdecimal)
class BFILE(sqltypes.LargeBinary):
@@ -277,6 +298,7 @@ class BFILE(sqltypes.LargeBinary):
class LONG(sqltypes.Text):
__visit_name__ = 'LONG'
+
class DATE(sqltypes.DateTime):
"""Provide the oracle DATE type.
@@ -289,7 +311,6 @@ class DATE(sqltypes.DateTime):
"""
__visit_name__ = 'DATE'
-
def _compare_type_affinity(self, other):
return other._type_affinity in (sqltypes.DateTime, sqltypes.Date)
@@ -298,18 +319,19 @@ class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
def __init__(self,
- day_precision=None,
- second_precision=None):
+ day_precision=None,
+ second_precision=None):
"""Construct an INTERVAL.
Note that only DAY TO SECOND intervals are currently supported.
This is due to a lack of support for YEAR TO MONTH intervals
within available DBAPIs (cx_oracle and zxjdbc).
- :param day_precision: the day precision value. this is the number of digits
- to store for the day field. Defaults to "2"
- :param second_precision: the second precision value. this is the number of digits
- to store for the fractional seconds field. Defaults to "6".
+ :param day_precision: the day precision value. this is the number of
+ digits to store for the day field. Defaults to "2"
+ :param second_precision: the second precision value. this is the
+ number of digits to store for the fractional seconds field.
+ Defaults to "6".
"""
self.day_precision = day_precision
@@ -385,11 +407,11 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_INTERVAL(self, type_):
return "INTERVAL DAY%s TO SECOND%s" % (
type_.day_precision is not None and
- "(%d)" % type_.day_precision or
- "",
+ "(%d)" % type_.day_precision or
+ "",
type_.second_precision is not None and
- "(%d)" % type_.second_precision or
- "",
+ "(%d)" % type_.second_precision or
+ "",
)
def visit_LONG(self, type_):
@@ -483,7 +505,7 @@ class OracleCompiler(compiler.SQLCompiler):
compound_keywords = util.update_copy(
compiler.SQLCompiler.compound_keywords,
{
- expression.CompoundSelect.EXCEPT: 'MINUS'
+ expression.CompoundSelect.EXCEPT: 'MINUS'
}
)
@@ -504,7 +526,7 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_match_op_binary(self, binary, operator, **kw):
return "CONTAINS (%s, %s)" % (self.process(binary.left),
- self.process(binary.right))
+ self.process(binary.right))
def visit_true(self, expr, **kw):
return '1'
@@ -542,8 +564,7 @@ class OracleCompiler(compiler.SQLCompiler):
else:
right = join.right
return self.process(join.left, **kwargs) + \
- ", " + self.process(right, **kwargs)
-
+ ", " + self.process(right, **kwargs)
def _get_nonansi_join_whereclause(self, froms):
clauses = []
@@ -556,8 +577,8 @@ class OracleCompiler(compiler.SQLCompiler):
binary.left = _OuterJoinColumn(binary.left)
elif join.right.is_derived_from(binary.right.table):
binary.right = _OuterJoinColumn(binary.right)
- clauses.append(visitors.cloned_traverse(join.onclause, {},
- {'binary': visit_binary}))
+ clauses.append(visitors.cloned_traverse(
+ join.onclause, {}, {'binary': visit_binary}))
else:
clauses.append(join.onclause)
@@ -580,46 +601,54 @@ class OracleCompiler(compiler.SQLCompiler):
return self.process(vc.column) + "(+)"
def visit_sequence(self, seq):
- return self.dialect.identifier_preparer.format_sequence(seq) + ".nextval"
+ return (self.dialect.identifier_preparer.format_sequence(seq) +
+ ".nextval")
def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
- """Oracle doesn't like ``FROM table AS alias``. Is the AS standard SQL??"""
+ """Oracle doesn't like ``FROM table AS alias``. Is the AS standard
+ SQL??
+ """
if asfrom or ashint:
alias_name = isinstance(alias.name, expression._truncated_label) and \
- self._truncated_identifier("alias", alias.name) or alias.name
+ self._truncated_identifier("alias", alias.name) or alias.name
if ashint:
return alias_name
elif asfrom:
return self.process(alias.original, asfrom=asfrom, **kwargs) + \
- " " + self.preparer.format_alias(alias, alias_name)
+ " " + self.preparer.format_alias(alias, alias_name)
else:
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
columns = []
binds = []
- for i, column in enumerate(expression._select_iterables(returning_cols)):
+ for i, column in enumerate(
+ expression._select_iterables(returning_cols)):
if column.type._has_column_expression:
col_expr = column.type.column_expression(column)
else:
col_expr = column
outparam = sql.outparam("ret_%d" % i, type_=column.type)
self.binds[outparam.key] = outparam
- binds.append(self.bindparam_string(self._truncate_bindparam(outparam)))
- columns.append(self.process(col_expr, within_columns_clause=False))
+ binds.append(
+ self.bindparam_string(self._truncate_bindparam(outparam)))
+ columns.append(
+ self.process(col_expr, within_columns_clause=False))
self.result_map[outparam.key] = (
outparam.key,
(column, getattr(column, 'name', None),
- getattr(column, 'key', None)),
+ getattr(column, 'key', None)),
column.type
)
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
def _TODO_visit_compound_select(self, select):
- """Need to determine how to get ``LIMIT``/``OFFSET`` into a ``UNION`` for Oracle."""
+ """Need to determine how to get ``LIMIT``/``OFFSET`` into a
+ ``UNION`` for Oracle.
+ """
pass
def visit_select(self, select, **kwargs):
@@ -630,7 +659,7 @@ class OracleCompiler(compiler.SQLCompiler):
if not getattr(select, '_oracle_visit', None):
if not self.dialect.use_ansi:
froms = self._display_froms_for_select(
- select, kwargs.get('asfrom', False))
+ select, kwargs.get('asfrom', False))
whereclause = self._get_nonansi_join_whereclause(froms)
if whereclause is not None:
select = select.where(whereclause)
@@ -639,15 +668,17 @@ class OracleCompiler(compiler.SQLCompiler):
limit_clause = select._limit_clause
offset_clause = select._offset_clause
if limit_clause is not None or offset_clause is not None:
- # See http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html
+ # See http://www.oracle.com/technology/oramag/oracle/06-sep/\
+ # o56asktom.html
#
# Generalized form of an Oracle pagination query:
# select ... from (
- # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from (
- # select distinct ... where ... order by ...
+ # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from
+ # ( select distinct ... where ... order by ...
# ) where ROWNUM <= :limit+:offset
# ) where ora_rn > :offset
- # Outer select and "ROWNUM as ora_rn" can be dropped if limit=0
+ # Outer select and "ROWNUM as ora_rn" can be dropped if
+ # limit=0
# TODO: use annotations instead of clone + attr set ?
select = select._generate()
@@ -659,8 +690,8 @@ class OracleCompiler(compiler.SQLCompiler):
self.dialect.optimize_limits and \
select._simple_int_limit:
limitselect = limitselect.prefix_with(
- "/*+ FIRST_ROWS(%d) */" %
- select._limit)
+ "/*+ FIRST_ROWS(%d) */" %
+ select._limit)
limitselect._oracle_visit = True
limitselect._is_wrapper = True
@@ -680,7 +711,7 @@ class OracleCompiler(compiler.SQLCompiler):
if offset_clause is not None:
max_row = max_row + offset_clause
limitselect.append_whereclause(
- sql.literal_column("ROWNUM") <= max_row)
+ sql.literal_column("ROWNUM") <= max_row)
# If needed, add the ora_rn, and wrap again with offset.
if offset_clause is None:
@@ -688,20 +719,20 @@ class OracleCompiler(compiler.SQLCompiler):
select = limitselect
else:
limitselect = limitselect.column(
- sql.literal_column("ROWNUM").label("ora_rn"))
+ sql.literal_column("ROWNUM").label("ora_rn"))
limitselect._oracle_visit = True
limitselect._is_wrapper = True
offsetselect = sql.select(
- [c for c in limitselect.c if c.key != 'ora_rn'])
+ [c for c in limitselect.c if c.key != 'ora_rn'])
offsetselect._oracle_visit = True
offsetselect._is_wrapper = True
if not self.dialect.use_binds_for_limits:
offset_clause = sql.literal_column(
- "%d" % select._offset)
+ "%d" % select._offset)
offsetselect.append_whereclause(
- sql.literal_column("ora_rn") > offset_clause)
+ sql.literal_column("ora_rn") > offset_clause)
offsetselect._for_update_arg = select._for_update_arg
select = offsetselect
@@ -720,9 +751,9 @@ class OracleCompiler(compiler.SQLCompiler):
if select._for_update_arg.of:
tmp += ' OF ' + ', '.join(
- self.process(elem) for elem in
- select._for_update_arg.of
- )
+ self.process(elem) for elem in
+ select._for_update_arg.of
+ )
if select._for_update_arg.nowait:
tmp += " NOWAIT"
@@ -738,18 +769,20 @@ class OracleDDLCompiler(compiler.DDLCompiler):
text += " ON DELETE %s" % constraint.ondelete
# oracle has no ON UPDATE CASCADE -
- # its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html
+ # its only available via triggers
+ # http://asktom.oracle.com/tkyte/update_cascade/index.html
if constraint.onupdate is not None:
util.warn(
"Oracle does not contain native UPDATE CASCADE "
- "functionality - onupdates will not be rendered for foreign keys. "
- "Consider using deferrable=True, initially='deferred' or triggers.")
+ "functionality - onupdates will not be rendered for foreign "
+ "keys. Consider using deferrable=True, initially='deferred' "
+ "or triggers.")
return text
def visit_create_index(self, create, **kw):
return super(OracleDDLCompiler, self).\
- visit_create_index(create, include_schema=True)
+ visit_create_index(create, include_schema=True)
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
@@ -767,14 +800,16 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer):
def format_savepoint(self, savepoint):
name = re.sub(r'^_+', '', savepoint.ident)
- return super(OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
+ return super(
+ OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
- return self._execute_scalar("SELECT " +
- self.dialect.identifier_preparer.format_sequence(seq) +
- ".nextval FROM DUAL", type_)
+ return self._execute_scalar(
+ "SELECT " +
+ self.dialect.identifier_preparer.format_sequence(seq) +
+ ".nextval FROM DUAL", type_)
class OracleDialect(default.DefaultDialect):
@@ -811,10 +846,10 @@ class OracleDialect(default.DefaultDialect):
]
def __init__(self,
- use_ansi=True,
- optimize_limits=False,
- use_binds_for_limits=True,
- **kwargs):
+ use_ansi=True,
+ optimize_limits=False,
+ use_binds_for_limits=True,
+ **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.use_ansi = use_ansi
self.optimize_limits = optimize_limits
@@ -823,9 +858,9 @@ class OracleDialect(default.DefaultDialect):
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
self.implicit_returning = self.__dict__.get(
- 'implicit_returning',
- self.server_version_info > (10, )
- )
+ 'implicit_returning',
+ self.server_version_info > (10, )
+ )
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
@@ -835,7 +870,7 @@ class OracleDialect(default.DefaultDialect):
@property
def _is_oracle_8(self):
return self.server_version_info and \
- self.server_version_info < (9, )
+ self.server_version_info < (9, )
@property
def _supports_char_length(self):
@@ -855,7 +890,8 @@ class OracleDialect(default.DefaultDialect):
cursor = connection.execute(
sql.text("SELECT table_name FROM all_tables "
"WHERE table_name = :name AND owner = :schema_name"),
- name=self.denormalize_name(table_name), schema_name=self.denormalize_name(schema))
+ name=self.denormalize_name(table_name),
+ schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
@@ -863,8 +899,10 @@ class OracleDialect(default.DefaultDialect):
schema = self.default_schema_name
cursor = connection.execute(
sql.text("SELECT sequence_name FROM all_sequences "
- "WHERE sequence_name = :name AND sequence_owner = :schema_name"),
- name=self.denormalize_name(sequence_name), schema_name=self.denormalize_name(schema))
+ "WHERE sequence_name = :name AND "
+ "sequence_owner = :schema_name"),
+ name=self.denormalize_name(sequence_name),
+ schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def normalize_name(self, name):
@@ -873,8 +911,8 @@ class OracleDialect(default.DefaultDialect):
if util.py2k:
if isinstance(name, str):
name = name.decode(self.encoding)
- if name.upper() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
+ if name.upper() == name and not \
+ self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
else:
return name
@@ -882,7 +920,8 @@ class OracleDialect(default.DefaultDialect):
def denormalize_name(self, name):
if name is None:
return None
- elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()):
+ elif name.lower() == name and not \
+ self.identifier_preparer._requires_quotes(name.lower()):
name = name.upper()
if util.py2k:
if not self.supports_unicode_binds:
@@ -892,18 +931,21 @@ class OracleDialect(default.DefaultDialect):
return name
def _get_default_schema_name(self, connection):
- return self.normalize_name(connection.execute('SELECT USER FROM DUAL').scalar())
+ return self.normalize_name(
+ connection.execute('SELECT USER FROM DUAL').scalar())
- def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
+ def _resolve_synonym(self, connection, desired_owner=None,
+ desired_synonym=None, desired_table=None):
"""search for a local synonym matching the given desired owner/name.
if desired_owner is None, attempts to locate a distinct owner.
- returns the actual name, owner, dblink name, and synonym name if found.
+ returns the actual name, owner, dblink name, and synonym name if
+ found.
"""
q = "SELECT owner, table_owner, table_name, db_link, "\
- "synonym_name FROM all_synonyms WHERE "
+ "synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
@@ -922,16 +964,20 @@ class OracleDialect(default.DefaultDialect):
if desired_owner:
row = result.first()
if row:
- return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name']
+ return (row['table_name'], row['table_owner'],
+ row['db_link'], row['synonym_name'])
else:
return None, None, None, None
else:
rows = result.fetchall()
if len(rows) > 1:
- raise AssertionError("There are multiple tables visible to the schema, you must specify owner")
+ raise AssertionError(
+ "There are multiple tables visible to the schema, you "
+ "must specify owner")
elif len(rows) == 1:
row = rows[0]
- return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name']
+ return (row['table_name'], row['table_owner'],
+ row['db_link'], row['synonym_name'])
else:
return None, None, None, None
@@ -941,10 +987,10 @@ class OracleDialect(default.DefaultDialect):
if resolve_synonyms:
actual_name, owner, dblink, synonym = self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(schema),
- desired_synonym=self.denormalize_name(table_name)
- )
+ connection,
+ desired_owner=self.denormalize_name(schema),
+ desired_synonym=self.denormalize_name(table_name)
+ )
else:
actual_name, owner, dblink, synonym = None, None, None, None
if not actual_name:
@@ -957,8 +1003,8 @@ class OracleDialect(default.DefaultDialect):
# will need to hear from more users if we are doing
# the right thing here. See [ticket:2619]
owner = connection.scalar(
- sql.text("SELECT username FROM user_db_links "
- "WHERE db_link=:link"), link=dblink)
+ sql.text("SELECT username FROM user_db_links "
+ "WHERE db_link=:link"), link=dblink)
dblink = "@" + dblink
elif not owner:
owner = self.denormalize_name(schema or self.default_schema_name)
@@ -980,7 +1026,8 @@ class OracleDialect(default.DefaultDialect):
schema = self.default_schema_name
s = sql.text(
"SELECT table_name FROM all_tables "
- "WHERE nvl(tablespace_name, 'no tablespace') NOT IN ('SYSTEM', 'SYSAUX') "
+ "WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
+ "('SYSTEM', 'SYSAUX') "
"AND OWNER = :owner "
"AND IOT_NAME IS NULL")
cursor = connection.execute(s, owner=schema)
@@ -1021,9 +1068,9 @@ class OracleDialect(default.DefaultDialect):
params = {"table_name": table_name}
text = "SELECT column_name, data_type, %(char_length_col)s, "\
- "data_precision, data_scale, "\
- "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\
- "WHERE table_name = :table_name"
+ "data_precision, data_scale, "\
+ "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\
+ "WHERE table_name = :table_name"
if schema is not None:
params['owner'] = schema
text += " AND owner = :owner "
@@ -1034,7 +1081,8 @@ class OracleDialect(default.DefaultDialect):
for row in c:
(colname, orig_colname, coltype, length, precision, scale, nullable, default) = \
- (self.normalize_name(row[0]), row[0], row[1], row[2], row[3], row[4], row[5] == 'Y', row[6])
+ (self.normalize_name(row[0]), row[0], row[1], row[
+ 2], row[3], row[4], row[5] == 'Y', row[6])
if coltype == 'NUMBER':
coltype = NUMBER(precision, scale)
@@ -1121,21 +1169,23 @@ class OracleDialect(default.DefaultDialect):
for rset in rp:
if rset.index_name != last_index_name:
remove_if_primary_key(index)
- index = dict(name=self.normalize_name(rset.index_name), column_names=[])
+ index = dict(name=self.normalize_name(rset.index_name),
+ column_names=[])
indexes.append(index)
index['unique'] = uniqueness.get(rset.uniqueness, False)
# filter out Oracle SYS_NC names. could also do an outer join
# to the all_tab_columns table and check for real col names there.
if not oracle_sys_col.match(rset.column_name):
- index['column_names'].append(self.normalize_name(rset.column_name))
+ index['column_names'].append(
+ self.normalize_name(rset.column_name))
last_index_name = rset.index_name
remove_if_primary_key(index)
return indexes
@reflection.cache
def _get_constraint_data(self, connection, table_name, schema=None,
- dblink='', **kw):
+ dblink='', **kw):
params = {'table_name': table_name}
@@ -1184,9 +1234,9 @@ class OracleDialect(default.DefaultDialect):
info_cache=info_cache)
pkeys = []
constraint_name = None
- constraint_data = self._get_constraint_data(connection, table_name,
- schema, dblink,
- info_cache=kw.get('info_cache'))
+ constraint_data = self._get_constraint_data(
+ connection, table_name, schema, dblink,
+ info_cache=kw.get('info_cache'))
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
@@ -1219,9 +1269,9 @@ class OracleDialect(default.DefaultDialect):
resolve_synonyms, dblink,
info_cache=info_cache)
- constraint_data = self._get_constraint_data(connection, table_name,
- schema, dblink,
- info_cache=kw.get('info_cache'))
+ constraint_data = self._get_constraint_data(
+ connection, table_name, schema, dblink,
+ info_cache=kw.get('info_cache'))
def fkey_rec():
return {
@@ -1236,7 +1286,7 @@ class OracleDialect(default.DefaultDialect):
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
- row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
+ row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'R':
if remote_table is None:
@@ -1249,23 +1299,28 @@ class OracleDialect(default.DefaultDialect):
rec = fkeys[cons_name]
rec['name'] = cons_name
- local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']
+ local_cols, remote_cols = rec[
+ 'constrained_columns'], rec['referred_columns']
if not rec['referred_table']:
if resolve_synonyms:
ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
- self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(remote_owner),
- desired_table=self.denormalize_name(remote_table)
- )
+ self._resolve_synonym(
+ connection,
+ desired_owner=self.denormalize_name(
+ remote_owner),
+ desired_table=self.denormalize_name(
+ remote_table)
+ )
if ref_synonym:
remote_table = self.normalize_name(ref_synonym)
- remote_owner = self.normalize_name(ref_remote_owner)
+ remote_owner = self.normalize_name(
+ ref_remote_owner)
rec['referred_table'] = remote_table
- if requested_schema is not None or self.denormalize_name(remote_owner) != schema:
+ if requested_schema is not None or \
+ self.denormalize_name(remote_owner) != schema:
rec['referred_schema'] = remote_owner
local_cols.append(local_column)
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index bb3c837cc..4a1ceecb1 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -10,7 +10,8 @@
.. dialect:: oracle+cx_oracle
:name: cx-Oracle
:dbapi: cx_oracle
- :connectstring: oracle+cx_oracle://user:pass@host:port/dbname[?key=value&key=value...]
+ :connectstring: oracle+cx_oracle://user:pass@host:port/dbname\
+[?key=value&key=value...]
:url: http://cx-oracle.sourceforge.net/
Additional Connect Arguments
@@ -52,21 +53,21 @@ on the URL, or as keyword arguments to :func:`.create_engine()` are:
.. versionadded:: 0.8 specific DBAPI types can be excluded from the
auto_setinputsizes feature via the exclude_setinputsizes attribute.
-* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or alternatively
- an integer value. This value is only available as a URL query string
- argument.
+* ``mode`` - This is given the string value of SYSDBA or SYSOPER, or
+ alternatively an integer value. This value is only available as a URL query
+ string argument.
-* ``threaded`` - enable multithreaded access to cx_oracle connections. Defaults
- to ``True``. Note that this is the opposite default of the cx_Oracle DBAPI
- itself.
+* ``threaded`` - enable multithreaded access to cx_oracle connections.
+ Defaults to ``True``. Note that this is the opposite default of the
+ cx_Oracle DBAPI itself.
.. _cx_oracle_unicode:
Unicode
-------
-The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the ability
-to return string results as Python unicode objects natively.
+The cx_Oracle DBAPI as of version 5 fully supports unicode, and has the
+ability to return string results as Python unicode objects natively.
When used in Python 3, cx_Oracle returns all strings as Python unicode objects
(that is, plain ``str`` in Python 3). In Python 2, it will return as Python
@@ -74,37 +75,39 @@ unicode those column values that are of type ``NVARCHAR`` or ``NCLOB``. For
column values that are of type ``VARCHAR`` or other non-unicode string types,
it will return values as Python strings (e.g. bytestrings).
-The cx_Oracle SQLAlchemy dialect presents two different options for the use case of
-returning ``VARCHAR`` column values as Python unicode objects under Python 2:
+The cx_Oracle SQLAlchemy dialect presents two different options for the use
+case of returning ``VARCHAR`` column values as Python unicode objects under
+Python 2:
* the cx_Oracle DBAPI has the ability to coerce all string results to Python
unicode objects unconditionally using output type handlers. This has
the advantage that the unicode conversion is global to all statements
at the cx_Oracle driver level, meaning it works with raw textual SQL
statements that have no typing information associated. However, this system
- has been observed to incur signfiicant performance overhead, not only because
- it takes effect for all string values unconditionally, but also because cx_Oracle under
- Python 2 seems to use a pure-Python function call in order to do the
- decode operation, which under cPython can orders of magnitude slower
- than doing it using C functions alone.
-
-* SQLAlchemy has unicode-decoding services built in, and when using SQLAlchemy's
- C extensions, these functions do not use any Python function calls and
- are very fast. The disadvantage to this approach is that the unicode
- conversion only takes effect for statements where the :class:`.Unicode` type
- or :class:`.String` type with ``convert_unicode=True`` is explicitly
- associated with the result column. This is the case for any ORM or Core
- query or SQL expression as well as for a :func:`.text` construct that specifies
- output column types, so in the vast majority of cases this is not an issue.
- However, when sending a completely raw string to :meth:`.Connection.execute`,
- this typing information isn't present, unless the string is handled
- within a :func:`.text` construct that adds typing information.
+ has been observed to incur signfiicant performance overhead, not only
+ because it takes effect for all string values unconditionally, but also
+ because cx_Oracle under Python 2 seems to use a pure-Python function call in
+ order to do the decode operation, which under cPython can orders of
+ magnitude slower than doing it using C functions alone.
+
+* SQLAlchemy has unicode-decoding services built in, and when using
+ SQLAlchemy's C extensions, these functions do not use any Python function
+ calls and are very fast. The disadvantage to this approach is that the
+ unicode conversion only takes effect for statements where the
+ :class:`.Unicode` type or :class:`.String` type with
+ ``convert_unicode=True`` is explicitly associated with the result column.
+ This is the case for any ORM or Core query or SQL expression as well as for
+ a :func:`.text` construct that specifies output column types, so in the vast
+ majority of cases this is not an issue. However, when sending a completely
+ raw string to :meth:`.Connection.execute`, this typing information isn't
+ present, unless the string is handled within a :func:`.text` construct that
+ adds typing information.
As of version 0.9.2 of SQLAlchemy, the default approach is to use SQLAlchemy's
typing system. This keeps cx_Oracle's expensive Python 2 approach
-disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy detects
-that cx_Oracle is returning unicode objects natively and cx_Oracle's system
-is used.
+disabled unless the user explicitly wants it. Under Python 3, SQLAlchemy
+detects that cx_Oracle is returning unicode objects natively and cx_Oracle's
+system is used.
To re-enable cx_Oracle's output type handler under Python 2, the
``coerce_to_unicode=True`` flag (new in 0.9.4) can be passed to
@@ -117,12 +120,13 @@ as Python unicode under Python 2 without using cx_Oracle's native handlers,
the :func:`.text` feature can be used::
from sqlalchemy import text, Unicode
- result = conn.execute(text("select username from user").columns(username=Unicode))
+ result = conn.execute(
+ text("select username from user").columns(username=Unicode))
-.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used for
- unicode results of non-unicode datatypes in Python 2, after they were identified as a major
- performance bottleneck. SQLAlchemy's own unicode facilities are used
- instead.
+.. versionchanged:: 0.9.2 cx_Oracle's outputtypehandlers are no longer used
+ for unicode results of non-unicode datatypes in Python 2, after they were
+ identified as a major performance bottleneck. SQLAlchemy's own unicode
+ facilities are used instead.
.. versionadded:: 0.9.4 Added the ``coerce_to_unicode`` flag, to re-enable
cx_Oracle's outputtypehandler and revert to pre-0.9.2 behavior.
@@ -132,38 +136,43 @@ the :func:`.text` feature can be used::
RETURNING Support
-----------------
-The cx_oracle DBAPI supports a limited subset of Oracle's already limited RETURNING support.
-Typically, results can only be guaranteed for at most one column being returned;
-this is the typical case when SQLAlchemy uses RETURNING to get just the value of a
-primary-key-associated sequence value. Additional column expressions will
-cause problems in a non-determinative way, due to cx_oracle's lack of support for
-the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios.
+The cx_oracle DBAPI supports a limited subset of Oracle's already limited
+RETURNING support. Typically, results can only be guaranteed for at most one
+column being returned; this is the typical case when SQLAlchemy uses RETURNING
+to get just the value of a primary-key-associated sequence value.
+Additional column expressions will cause problems in a non-determinative way,
+due to cx_oracle's lack of support for the OCI_DATA_AT_EXEC API which is
+required for more complex RETURNING scenarios.
-For this reason, stability may be enhanced by disabling RETURNING support completely;
-SQLAlchemy otherwise will use RETURNING to fetch newly sequence-generated
-primary keys. As illustrated in :ref:`oracle_returning`::
+For this reason, stability may be enhanced by disabling RETURNING support
+completely; SQLAlchemy otherwise will use RETURNING to fetch newly
+sequence-generated primary keys. As illustrated in :ref:`oracle_returning`::
- engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
+ engine = create_engine("oracle://scott:tiger@dsn",
+ implicit_returning=False)
.. seealso::
- http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING
+ http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693
+ - OCI documentation for RETURNING
- http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary
+ http://sourceforge.net/mailarchive/message.php?msg_id=31338136
+ - cx_oracle developer commentary
.. _cx_oracle_lob:
LOB Objects
-----------
-cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy converts
-these to strings so that the interface of the Binary type is consistent with that of
-other backends, and so that the linkage to a live cursor is not needed in scenarios
-like result.fetchmany() and result.fetchall(). This means that by default, LOB
-objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
-cursor is broken.
+cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy
+converts these to strings so that the interface of the Binary type is
+consistent with that of other backends, and so that the linkage to a live
+cursor is not needed in scenarios like result.fetchmany() and
+result.fetchall(). This means that by default, LOB objects are fully fetched
+unconditionally by SQLAlchemy, and the linkage to a live cursor is broken.
-To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`.
+To disable this processing, pass ``auto_convert_lobs=False`` to
+:func:`.create_engine()`.
Two Phase Transaction Support
-----------------------------
@@ -314,7 +323,7 @@ class _OracleNumeric(sqltypes.Numeric):
if self.precision is None and self.scale is None:
return processors.to_float
elif not getattr(self, '_is_oracle_number', False) \
- and self.scale is not None:
+ and self.scale is not None:
return processors.to_float
else:
return None
@@ -322,7 +331,7 @@ class _OracleNumeric(sqltypes.Numeric):
# cx_oracle 4 behavior, will assume
# floats
return super(_OracleNumeric, self).\
- result_processor(dialect, coltype)
+ result_processor(dialect, coltype)
class _OracleDate(sqltypes.Date):
@@ -363,7 +372,8 @@ class _NativeUnicodeMixin(object):
return unicode(value)
return process
else:
- return super(_NativeUnicodeMixin, self).bind_processor(dialect)
+ return super(
+ _NativeUnicodeMixin, self).bind_processor(dialect)
# we apply a connection output handler that returns
# unicode in all cases, so the "native_unicode" flag
@@ -392,11 +402,13 @@ class _OracleLong(oracle.LONG):
def get_dbapi_type(self, dbapi):
return dbapi.LONG_STRING
+
class _OracleString(_NativeUnicodeMixin, sqltypes.String):
pass
-class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText):
+class _OracleUnicodeText(
+ _LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText):
def get_dbapi_type(self, dbapi):
return dbapi.NCLOB
@@ -405,7 +417,8 @@ class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText):
if lob_processor is None:
return None
- string_processor = sqltypes.UnicodeText.result_processor(self, dialect, coltype)
+ string_processor = sqltypes.UnicodeText.result_processor(
+ self, dialect, coltype)
if string_processor is None:
return lob_processor
@@ -450,7 +463,7 @@ class OracleCompiler_cx_oracle(OracleCompiler):
def bindparam_string(self, name, **kw):
quote = getattr(name, 'quote', None)
if quote is True or quote is not False and \
- self.preparer._bindparam_requires_quotes(name):
+ self.preparer._bindparam_requires_quotes(name):
quoted_name = '"%s"' % name
self._quoted_bind_names[name] = quoted_name
return OracleCompiler.bindparam_string(self, quoted_name, **kw)
@@ -470,12 +483,12 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
# here. so convert names in quoted_bind_names
# to encoded as well.
quoted_bind_names = \
- dict(
- (fromname.encode(self.dialect.encoding),
- toname.encode(self.dialect.encoding))
- for fromname, toname in
- quoted_bind_names.items()
- )
+ dict(
+ (fromname.encode(self.dialect.encoding),
+ toname.encode(self.dialect.encoding))
+ for fromname, toname in
+ quoted_bind_names.items()
+ )
for param in self.parameters:
for fromname, toname in quoted_bind_names.items():
param[toname] = param[fromname]
@@ -485,29 +498,30 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
# cx_oracle really has issues when you setinputsizes
# on String, including that outparams/RETURNING
# breaks for varchars
- self.set_input_sizes(quoted_bind_names,
- exclude_types=self.dialect.exclude_setinputsizes
- )
+ self.set_input_sizes(
+ quoted_bind_names,
+ exclude_types=self.dialect.exclude_setinputsizes
+ )
# if a single execute, check for outparams
if len(self.compiled_parameters) == 1:
for bindparam in self.compiled.binds.values():
if bindparam.isoutparam:
dbtype = bindparam.type.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
+ get_dbapi_type(self.dialect.dbapi)
if not hasattr(self, 'out_parameters'):
self.out_parameters = {}
if dbtype is None:
raise exc.InvalidRequestError(
- "Cannot create out parameter for parameter "
- "%r - its type %r is not supported by"
- " cx_oracle" %
- (bindparam.key, bindparam.type)
- )
+ "Cannot create out parameter for parameter "
+ "%r - its type %r is not supported by"
+ " cx_oracle" %
+ (bindparam.key, bindparam.type)
+ )
name = self.compiled.bind_names[bindparam]
self.out_parameters[name] = self.cursor.var(dbtype)
self.parameters[0][quoted_bind_names.get(name, name)] = \
- self.out_parameters[name]
+ self.out_parameters[name]
def create_cursor(self):
c = self._dbapi_connection.cursor()
@@ -519,9 +533,9 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
def get_result_proxy(self):
if hasattr(self, 'out_parameters') and self.compiled.returning:
returning_params = dict(
- (k, v.getvalue())
- for k, v in self.out_parameters.items()
- )
+ (k, v.getvalue())
+ for k, v in self.out_parameters.items()
+ )
return ReturningResultProxy(self, returning_params)
result = None
@@ -543,25 +557,29 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
if name in self.out_parameters:
type = bind.type
impl_type = type.dialect_impl(self.dialect)
- dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
+ dbapi_type = impl_type.get_dbapi_type(
+ self.dialect.dbapi)
result_processor = impl_type.\
- result_processor(self.dialect,
- dbapi_type)
+ result_processor(self.dialect,
+ dbapi_type)
if result_processor is not None:
out_parameters[name] = \
- result_processor(self.out_parameters[name].getvalue())
+ result_processor(
+ self.out_parameters[name].getvalue())
else:
- out_parameters[name] = self.out_parameters[name].getvalue()
+ out_parameters[name] = self.out_parameters[
+ name].getvalue()
else:
result.out_parameters = dict(
(k, v.getvalue())
- for k, v in self.out_parameters.items()
- )
+ for k, v in self.out_parameters.items()
+ )
return result
-class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
+class OracleExecutionContext_cx_oracle_with_unicode(
+ OracleExecutionContext_cx_oracle):
"""Support WITH_UNICODE in Python 2.xx.
WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
@@ -574,17 +592,19 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or
passed as Python unicode objects.
"""
+
def __init__(self, *arg, **kw):
OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
self.statement = util.text_type(self.statement)
def _execute_scalar(self, stmt):
return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
- _execute_scalar(util.text_type(stmt))
+ _execute_scalar(util.text_type(stmt))
class ReturningResultProxy(_result.FullyBufferedResultProxy):
- """Result proxy which stuffs the _returning clause + outparams into the fetch."""
+ """Result proxy which stuffs the _returning clause + outparams
+ into the fetch."""
def __init__(self, context, returning_params):
self._returning_params = returning_params
@@ -598,8 +618,10 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy):
]
def _buffer_rows(self):
- return collections.deque([tuple(self._returning_params["ret_%d" % i]
- for i, c in enumerate(self._returning_params))])
+ return collections.deque(
+ [tuple(self._returning_params["ret_%d" % i]
+ for i, c in enumerate(self._returning_params))]
+ )
class OracleDialect_cx_oracle(OracleDialect):
@@ -610,7 +632,8 @@ class OracleDialect_cx_oracle(OracleDialect):
colspecs = colspecs = {
sqltypes.Numeric: _OracleNumeric,
- sqltypes.Date: _OracleDate, # generic type, assume datetime.date is desired
+ # generic type, assume datetime.date is desired
+ sqltypes.Date: _OracleDate,
sqltypes.LargeBinary: _OracleBinary,
sqltypes.Boolean: oracle._OracleBoolean,
sqltypes.Interval: _OracleInterval,
@@ -637,50 +660,50 @@ class OracleDialect_cx_oracle(OracleDialect):
execute_sequence_format = list
def __init__(self,
- auto_setinputsizes=True,
- exclude_setinputsizes=("STRING", "UNICODE"),
- auto_convert_lobs=True,
- threaded=True,
- allow_twophase=True,
- coerce_to_decimal=True,
- coerce_to_unicode=False,
- arraysize=50, **kwargs):
+ auto_setinputsizes=True,
+ exclude_setinputsizes=("STRING", "UNICODE"),
+ auto_convert_lobs=True,
+ threaded=True,
+ allow_twophase=True,
+ coerce_to_decimal=True,
+ coerce_to_unicode=False,
+ arraysize=50, **kwargs):
OracleDialect.__init__(self, **kwargs)
self.threaded = threaded
self.arraysize = arraysize
self.allow_twophase = allow_twophase
self.supports_timestamp = self.dbapi is None or \
- hasattr(self.dbapi, 'TIMESTAMP')
+ hasattr(self.dbapi, 'TIMESTAMP')
self.auto_setinputsizes = auto_setinputsizes
self.auto_convert_lobs = auto_convert_lobs
if hasattr(self.dbapi, 'version'):
self.cx_oracle_ver = tuple([int(x) for x in
- self.dbapi.version.split('.')])
+ self.dbapi.version.split('.')])
else:
self.cx_oracle_ver = (0, 0, 0)
def types(*names):
return set(
- getattr(self.dbapi, name, None) for name in names
- ).difference([None])
+ getattr(self.dbapi, name, None) for name in names
+ ).difference([None])
self.exclude_setinputsizes = types(*(exclude_setinputsizes or ()))
self._cx_oracle_string_types = types("STRING", "UNICODE",
- "NCLOB", "CLOB")
+ "NCLOB", "CLOB")
self._cx_oracle_unicode_types = types("UNICODE", "NCLOB")
self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0)
self.coerce_to_unicode = (
- self.cx_oracle_ver >= (5, 0) and
- coerce_to_unicode
- )
+ self.cx_oracle_ver >= (5, 0) and
+ coerce_to_unicode
+ )
self.supports_native_decimal = (
- self.cx_oracle_ver >= (5, 0) and
- coerce_to_decimal
- )
+ self.cx_oracle_ver >= (5, 0) and
+ coerce_to_decimal
+ )
self._cx_oracle_native_nvarchar = self.cx_oracle_ver >= (5, 0)
@@ -688,7 +711,8 @@ class OracleDialect_cx_oracle(OracleDialect):
# this occurs in tests with mock DBAPIs
self._cx_oracle_string_types = set()
self._cx_oracle_with_unicode = False
- elif self.cx_oracle_ver >= (5,) and not hasattr(self.dbapi, 'UNICODE'):
+ elif self.cx_oracle_ver >= (5,) and not \
+ hasattr(self.dbapi, 'UNICODE'):
# cx_Oracle WITH_UNICODE mode. *only* python
# unicode objects accepted for anything
self.supports_unicode_statements = True
@@ -696,32 +720,32 @@ class OracleDialect_cx_oracle(OracleDialect):
self._cx_oracle_with_unicode = True
if util.py2k:
- # There's really no reason to run with WITH_UNICODE under Python 2.x.
- # Give the user a hint.
+ # There's really no reason to run with WITH_UNICODE under
+ # Python 2.x. Give the user a hint.
util.warn(
"cx_Oracle is compiled under Python 2.xx using the "
"WITH_UNICODE flag. Consider recompiling cx_Oracle "
- "without this flag, which is in no way necessary for full "
- "support of Unicode. Otherwise, all string-holding bind "
- "parameters must be explicitly typed using SQLAlchemy's "
- "String type or one of its subtypes,"
+ "without this flag, which is in no way necessary for "
+ "full support of Unicode. Otherwise, all string-holding "
+ "bind parameters must be explicitly typed using "
+ "SQLAlchemy's String type or one of its subtypes,"
"or otherwise be passed as Python unicode. "
"Plain Python strings passed as bind parameters will be "
"silently corrupted by cx_Oracle."
- )
+ )
self.execution_ctx_cls = \
- OracleExecutionContext_cx_oracle_with_unicode
+ OracleExecutionContext_cx_oracle_with_unicode
else:
self._cx_oracle_with_unicode = False
if self.cx_oracle_ver is None or \
- not self.auto_convert_lobs or \
- not hasattr(self.dbapi, 'CLOB'):
+ not self.auto_convert_lobs or \
+ not hasattr(self.dbapi, 'CLOB'):
self.dbapi_type_map = {}
else:
# only use this for LOB objects. using it for strings, dates
- # etc. leads to a little too much magic, reflection doesn't know if it should
- # expect encoded strings or unicodes, etc.
+ # etc. leads to a little too much magic, reflection doesn't know
+ # if it should expect encoded strings or unicodes, etc.
self.dbapi_type_map = {
self.dbapi.CLOB: oracle.CLOB(),
self.dbapi.NCLOB: oracle.NCLOB(),
@@ -764,8 +788,8 @@ class OracleDialect_cx_oracle(OracleDialect):
def output_type_handler(cursor, name, defaultType,
size, precision, scale):
return cursor.var(
- cx_Oracle.STRING,
- 255, arraysize=cursor.arraysize)
+ cx_Oracle.STRING,
+ 255, arraysize=cursor.arraysize)
cursor = conn.cursor()
cursor.outputtypehandler = output_type_handler
@@ -796,17 +820,17 @@ class OracleDialect_cx_oracle(OracleDialect):
cx_Oracle = self.dbapi
def output_type_handler(cursor, name, defaultType,
- size, precision, scale):
+ size, precision, scale):
# convert all NUMBER with precision + positive scale to Decimal
# this almost allows "native decimal" mode.
if self.supports_native_decimal and \
defaultType == cx_Oracle.NUMBER and \
precision and scale > 0:
return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._to_decimal,
- arraysize=cursor.arraysize)
+ cx_Oracle.STRING,
+ 255,
+ outconverter=self._to_decimal,
+ arraysize=cursor.arraysize)
# if NUMBER with zero precision and 0 or neg scale, this appears
# to indicate "ambiguous". Use a slower converter that will
# make a decision based on each value received - the type
@@ -816,10 +840,10 @@ class OracleDialect_cx_oracle(OracleDialect):
defaultType == cx_Oracle.NUMBER \
and not precision and scale <= 0:
return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._detect_decimal,
- arraysize=cursor.arraysize)
+ cx_Oracle.STRING,
+ 255,
+ outconverter=self._detect_decimal,
+ arraysize=cursor.arraysize)
# allow all strings to come back natively as Unicode
elif self.coerce_to_unicode and \
defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
@@ -856,7 +880,7 @@ class OracleDialect_cx_oracle(OracleDialect):
dsn=dsn,
threaded=self.threaded,
twophase=self.allow_twophase,
- )
+ )
if util.py2k:
if self._cx_oracle_with_unicode:
@@ -882,9 +906,9 @@ class OracleDialect_cx_oracle(OracleDialect):
def _get_server_version_info(self, connection):
return tuple(
- int(x)
- for x in connection.connection.version.split('.')
- )
+ int(x)
+ for x in connection.connection.version.split('.')
+ )
def is_disconnect(self, e, connection, cursor):
error, = e.args
@@ -924,11 +948,11 @@ class OracleDialect_cx_oracle(OracleDialect):
connection.info['cx_oracle_prepared'] = result
def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
self.do_rollback(connection.connection)
def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
+ recover=False):
if not is_prepared:
self.do_commit(connection.connection)
else:
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index 19a668a3e..82c8e2f0f 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -10,7 +10,8 @@
:name: zxJDBC for Jython
:dbapi: zxjdbc
:connectstring: oracle+zxjdbc://user:pass@host/dbname
- :driverurl: http://www.oracle.com/technology/software/tech/java/sqlj_jdbc/index.html.
+ :driverurl: http://www.oracle.com/technology/software/tech/java/\
+sqlj_jdbc/index.html.
"""
import decimal
@@ -18,7 +19,9 @@ import re
from sqlalchemy import sql, types as sqltypes, util
from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, OracleExecutionContext
+from sqlalchemy.dialects.oracle.base import (OracleCompiler,
+ OracleDialect,
+ OracleExecutionContext)
from sqlalchemy.engine import result as _result
from sqlalchemy.sql import expression
import collections
@@ -40,7 +43,7 @@ class _ZxJDBCDate(sqltypes.Date):
class _ZxJDBCNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
- #XXX: does the dialect return Decimal or not???
+ # XXX: does the dialect return Decimal or not???
# if it does (in all cases), we could use a None processor as well as
# the to_float generic processor
if self.asdecimal:
@@ -61,10 +64,12 @@ class _ZxJDBCNumeric(sqltypes.Numeric):
class OracleCompiler_zxjdbc(OracleCompiler):
def returning_clause(self, stmt, returning_cols):
- self.returning_cols = list(expression._select_iterables(returning_cols))
+ self.returning_cols = list(
+ expression._select_iterables(returning_cols))
# within_columns_clause=False so that labels (foo AS bar) don't render
- columns = [self.process(c, within_columns_clause=False, result_map=self.result_map)
+ columns = [self.process(c, within_columns_clause=False,
+ result_map=self.result_map)
for c in self.returning_cols]
if not hasattr(self, 'returning_parameters'):
@@ -72,12 +77,15 @@ class OracleCompiler_zxjdbc(OracleCompiler):
binds = []
for i, col in enumerate(self.returning_cols):
- dbtype = col.type.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
+ dbtype = col.type.dialect_impl(
+ self.dialect).get_dbapi_type(self.dialect.dbapi)
self.returning_parameters.append((i + 1, dbtype))
- bindparam = sql.bindparam("ret_%d" % i, value=ReturningParam(dbtype))
+ bindparam = sql.bindparam(
+ "ret_%d" % i, value=ReturningParam(dbtype))
self.binds[bindparam.key] = bindparam
- binds.append(self.bindparam_string(self._truncate_bindparam(bindparam)))
+ binds.append(
+ self.bindparam_string(self._truncate_bindparam(bindparam)))
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
@@ -98,13 +106,17 @@ class OracleExecutionContext_zxjdbc(OracleExecutionContext):
rrs = self.statement.__statement__.getReturnResultSet()
next(rrs)
except SQLException as sqle:
- msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
+ msg = '%s [SQLCode: %d]' % (
+ sqle.getMessage(), sqle.getErrorCode())
if sqle.getSQLState() is not None:
msg += ' [SQLState: %s]' % sqle.getSQLState()
raise zxJDBC.Error(msg)
else:
- row = tuple(self.cursor.datahandler.getPyObject(rrs, index, dbtype)
- for index, dbtype in self.compiled.returning_parameters)
+ row = tuple(
+ self.cursor.datahandler.getPyObject(
+ rrs, index, dbtype)
+ for index, dbtype in
+ self.compiled.returning_parameters)
return ReturningResultProxy(self, row)
finally:
if rrs is not None:
@@ -165,8 +177,8 @@ class ReturningParam(object):
def __repr__(self):
kls = self.__class__
- return '<%s.%s object at 0x%x type=%s>' % (kls.__module__, kls.__name__, id(self),
- self.type)
+ return '<%s.%s object at 0x%x type=%s>' % (
+ kls.__module__, kls.__name__, id(self), self.type)
class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
@@ -207,13 +219,16 @@ class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
def initialize(self, connection):
super(OracleDialect_zxjdbc, self).initialize(connection)
- self.implicit_returning = connection.connection.driverversion >= '10.2'
+ self.implicit_returning = \
+ connection.connection.driverversion >= '10.2'
def _create_jdbc_url(self, url):
- return 'jdbc:oracle:thin:@%s:%s:%s' % (url.host, url.port or 1521, url.database)
+ return 'jdbc:oracle:thin:@%s:%s:%s' % (
+ url.host, url.port or 1521, url.database)
def _get_server_version_info(self, connection):
- version = re.search(r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
+ version = re.search(
+ r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
return tuple(int(x) for x in version.split('.'))
dialect = OracleDialect_zxjdbc
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
index 046be760d..f813e0003 100644
--- a/lib/sqlalchemy/dialects/postgres.py
+++ b/lib/sqlalchemy/dialects/postgres.py
@@ -9,9 +9,10 @@
from sqlalchemy.util import warn_deprecated
warn_deprecated(
- "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'. "
- "The new URL format is postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
- )
+ "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to "
+ "'postgresql'. The new URL format is "
+ "postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
+)
from sqlalchemy.dialects.postgresql import *
from sqlalchemy.dialects.postgresql import base
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 8081f75dd..6f23a497b 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -50,22 +50,25 @@ Transaction Isolation Level
All Postgresql dialects support setting of transaction isolation level
both via a dialect-specific parameter ``isolation_level``
accepted by :func:`.create_engine`,
-as well as the ``isolation_level`` argument as passed to :meth:`.Connection.execution_options`.
-When using a non-psycopg2 dialect, this feature works by issuing the
-command ``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL
-<level>`` for each new connection.
+as well as the ``isolation_level`` argument as passed to
+:meth:`.Connection.execution_options`. When using a non-psycopg2 dialect,
+this feature works by issuing the command
+``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL <level>`` for
+each new connection.
To set isolation level using :func:`.create_engine`::
engine = create_engine(
- "postgresql+pg8000://scott:tiger@localhost/test",
- isolation_level="READ UNCOMMITTED"
- )
+ "postgresql+pg8000://scott:tiger@localhost/test",
+ isolation_level="READ UNCOMMITTED"
+ )
To set using per-connection execution options::
connection = engine.connect()
- connection = connection.execution_options(isolation_level="READ COMMITTED")
+ connection = connection.execution_options(
+ isolation_level="READ COMMITTED"
+ )
Valid values for ``isolation_level`` include:
@@ -93,12 +96,13 @@ The Postgresql dialect can reflect tables from any schema. The
:paramref:`.Table.schema` argument, or alternatively the
:paramref:`.MetaData.reflect.schema` argument determines which schema will
be searched for the table or tables. The reflected :class:`.Table` objects
-will in all cases retain this ``.schema`` attribute as was specified. However,
-with regards to tables which these :class:`.Table` objects refer to via
-foreign key constraint, a decision must be made as to how the ``.schema``
+will in all cases retain this ``.schema`` attribute as was specified.
+However, with regards to tables which these :class:`.Table` objects refer to
+via foreign key constraint, a decision must be made as to how the ``.schema``
is represented in those remote tables, in the case where that remote
schema name is also a member of the current
-`Postgresql search path <http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_.
+`Postgresql search path
+<http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_.
By default, the Postgresql dialect mimics the behavior encouraged by
Postgresql's own ``pg_get_constraintdef()`` builtin procedure. This function
@@ -115,7 +119,8 @@ illustrates this behavior::
CREATE TABLE
test=> SET search_path TO public, test_schema;
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
- test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+ test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n
+ test-> ON n.oid = c.relnamespace
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
test-> WHERE c.relname='referring' AND r.contype = 'f'
test-> ;
@@ -124,10 +129,11 @@ illustrates this behavior::
FOREIGN KEY (referred_id) REFERENCES referred(id)
(1 row)
-Above, we created a table ``referred`` as a member of the remote schema ``test_schema``, however
-when we added ``test_schema`` to the PG ``search_path`` and then asked ``pg_get_constraintdef()``
-for the ``FOREIGN KEY`` syntax, ``test_schema`` was not included in the
-output of the function.
+Above, we created a table ``referred`` as a member of the remote schema
+``test_schema``, however when we added ``test_schema`` to the
+PG ``search_path`` and then asked ``pg_get_constraintdef()`` for the
+``FOREIGN KEY`` syntax, ``test_schema`` was not included in the output of
+the function.
On the other hand, if we set the search path back to the typical default
of ``public``::
@@ -139,7 +145,8 @@ The same query against ``pg_get_constraintdef()`` now returns the fully
schema-qualified name for us::
test=> SELECT pg_catalog.pg_get_constraintdef(r.oid, true) FROM
- test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
+ test-> pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n
+ test-> ON n.oid = c.relnamespace
test-> JOIN pg_catalog.pg_constraint r ON c.oid = r.conrelid
test-> WHERE c.relname='referring' AND r.contype = 'f';
pg_get_constraintdef
@@ -157,7 +164,8 @@ reflection process as follows::
>>> with engine.connect() as conn:
... conn.execute("SET search_path TO test_schema, public")
... meta = MetaData()
- ... referring = Table('referring', meta, autoload=True, autoload_with=conn)
+ ... referring = Table('referring', meta,
+ ... autoload=True, autoload_with=conn)
...
<sqlalchemy.engine.result.ResultProxy object at 0x101612ed0>
@@ -167,16 +175,18 @@ The above process would deliver to the :attr:`.MetaData.tables` collection
>>> meta.tables['referred'].schema is None
True
-To alter the behavior of reflection such that the referred schema is maintained
-regardless of the ``search_path`` setting, use the ``postgresql_ignore_search_path``
-option, which can be specified as a dialect-specific argument to both
-:class:`.Table` as well as :meth:`.MetaData.reflect`::
+To alter the behavior of reflection such that the referred schema is
+maintained regardless of the ``search_path`` setting, use the
+``postgresql_ignore_search_path`` option, which can be specified as a
+dialect-specific argument to both :class:`.Table` as well as
+:meth:`.MetaData.reflect`::
>>> with engine.connect() as conn:
... conn.execute("SET search_path TO test_schema, public")
... meta = MetaData()
- ... referring = Table('referring', meta, autoload=True, autoload_with=conn,
- ... postgresql_ignore_search_path=True)
+ ... referring = Table('referring', meta, autoload=True,
+ ... autoload_with=conn,
+ ... postgresql_ignore_search_path=True)
...
<sqlalchemy.engine.result.ResultProxy object at 0x1016126d0>
@@ -187,29 +197,33 @@ We will now have ``test_schema.referred`` stored as schema-qualified::
.. sidebar:: Best Practices for Postgresql Schema reflection
- The description of Postgresql schema reflection behavior is complex, and is
- the product of many years of dealing with widely varied use cases and user preferences.
- But in fact, there's no need to understand any of it if you just stick to the simplest
- use pattern: leave the ``search_path`` set to its default of ``public`` only, never refer
- to the name ``public`` as an explicit schema name otherwise, and
- refer to all other schema names explicitly when building
- up a :class:`.Table` object. The options described here are only for those users
- who can't, or prefer not to, stay within these guidelines.
-
-Note that **in all cases**, the "default" schema is always reflected as ``None``.
-The "default" schema on Postgresql is that which is returned by the
-Postgresql ``current_schema()`` function. On a typical Postgresql installation,
-this is the name ``public``. So a table that refers to another which is
-in the ``public`` (i.e. default) schema will always have the ``.schema`` attribute
-set to ``None``.
+ The description of Postgresql schema reflection behavior is complex, and
+ is the product of many years of dealing with widely varied use cases and
+ user preferences. But in fact, there's no need to understand any of it if
+ you just stick to the simplest use pattern: leave the ``search_path`` set
+ to its default of ``public`` only, never refer to the name ``public`` as
+ an explicit schema name otherwise, and refer to all other schema names
+ explicitly when building up a :class:`.Table` object. The options
+ described here are only for those users who can't, or prefer not to, stay
+ within these guidelines.
+
+Note that **in all cases**, the "default" schema is always reflected as
+``None``. The "default" schema on Postgresql is that which is returned by the
+Postgresql ``current_schema()`` function. On a typical Postgresql
+installation, this is the name ``public``. So a table that refers to another
+which is in the ``public`` (i.e. default) schema will always have the
+``.schema`` attribute set to ``None``.
.. versionadded:: 0.9.2 Added the ``postgresql_ignore_search_path``
- dialect-level option accepted by :class:`.Table` and :meth:`.MetaData.reflect`.
+ dialect-level option accepted by :class:`.Table` and
+ :meth:`.MetaData.reflect`.
.. seealso::
- `The Schema Search Path <http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_ - on the Postgresql website.
+ `The Schema Search Path
+ <http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH>`_
+ - on the Postgresql website.
INSERT/UPDATE...RETURNING
-------------------------
@@ -273,19 +287,19 @@ produces a statement equivalent to::
SELECT CAST('some text' AS TSVECTOR) AS anon_1
Full Text Searches in Postgresql are influenced by a combination of: the
-PostgresSQL setting of ``default_text_search_config``, the ``regconfig`` used to
-build the GIN/GiST indexes, and the ``regconfig`` optionally passed in during a
-query.
+PostgresSQL setting of ``default_text_search_config``, the ``regconfig`` used
+to build the GIN/GiST indexes, and the ``regconfig`` optionally passed in
+during a query.
When performing a Full Text Search against a column that has a GIN or
-GiST index that is already pre-computed (which is common on full text searches)
-one may need to explicitly pass in a particular PostgresSQL ``regconfig`` value
-to ensure the query-planner utilizes the index and does not re-compute the
-column on demand.
+GiST index that is already pre-computed (which is common on full text
+searches) one may need to explicitly pass in a particular PostgresSQL
+``regconfig`` value to ensure the query-planner utilizes the index and does
+not re-compute the column on demand.
-In order to provide for this explicit query planning, or to use different search
-strategies, the ``match`` method accepts a ``postgresql_regconfig`` keyword
-argument.
+In order to provide for this explicit query planning, or to use different
+search strategies, the ``match`` method accepts a ``postgresql_regconfig``
+keyword argument.
select([mytable.c.id]).where(
mytable.c.title.match('somestring', postgresql_regconfig='english')
@@ -296,8 +310,8 @@ Emits the equivalent of::
SELECT mytable.id FROM mytable
WHERE mytable.title @@ to_tsquery('english', 'somestring')
-One can also specifically pass in a `'regconfig'` value to the ``to_tsvector()``
-command as the initial argument.
+One can also specifically pass in a `'regconfig'` value to the
+``to_tsvector()`` command as the initial argument.
select([mytable.c.id]).where(
func.to_tsvector('english', mytable.c.title )\
@@ -310,9 +324,9 @@ produces a statement equivalent to::
WHERE to_tsvector('english', mytable.title) @@
to_tsquery('english', 'somestring')
-It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from PostgresSQL
-to ensure that you are generating queries with SQLAlchemy that take full
-advantage of any indexes you may have created for full text search.
+It is recommended that you use the ``EXPLAIN ANALYZE...`` tool from
+PostgresSQL to ensure that you are generating queries with SQLAlchemy that
+take full advantage of any indexes you may have created for full text search.
FROM ONLY ...
------------------------
@@ -402,26 +416,26 @@ except ImportError:
_python_UUID = None
from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
- CHAR, TEXT, FLOAT, NUMERIC, \
- DATE, BOOLEAN, REAL
+ CHAR, TEXT, FLOAT, NUMERIC, \
+ DATE, BOOLEAN, REAL
RESERVED_WORDS = set(
["all", "analyse", "analyze", "and", "any", "array", "as", "asc",
- "asymmetric", "both", "case", "cast", "check", "collate", "column",
- "constraint", "create", "current_catalog", "current_date",
- "current_role", "current_time", "current_timestamp", "current_user",
- "default", "deferrable", "desc", "distinct", "do", "else", "end",
- "except", "false", "fetch", "for", "foreign", "from", "grant", "group",
- "having", "in", "initially", "intersect", "into", "leading", "limit",
- "localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset",
- "old", "on", "only", "or", "order", "placing", "primary", "references",
- "returning", "select", "session_user", "some", "symmetric", "table",
- "then", "to", "trailing", "true", "union", "unique", "user", "using",
- "variadic", "when", "where", "window", "with", "authorization",
- "between", "binary", "cross", "current_schema", "freeze", "full",
- "ilike", "inner", "is", "isnull", "join", "left", "like", "natural",
- "notnull", "outer", "over", "overlaps", "right", "similar", "verbose"
- ])
+ "asymmetric", "both", "case", "cast", "check", "collate", "column",
+ "constraint", "create", "current_catalog", "current_date",
+ "current_role", "current_time", "current_timestamp", "current_user",
+ "default", "deferrable", "desc", "distinct", "do", "else", "end",
+ "except", "false", "fetch", "for", "foreign", "from", "grant", "group",
+ "having", "in", "initially", "intersect", "into", "leading", "limit",
+ "localtime", "localtimestamp", "new", "not", "null", "of", "off",
+ "offset", "old", "on", "only", "or", "order", "placing", "primary",
+ "references", "returning", "select", "session_user", "some", "symmetric",
+ "table", "then", "to", "trailing", "true", "union", "unique", "user",
+ "using", "variadic", "when", "where", "window", "with", "authorization",
+ "between", "binary", "cross", "current_schema", "freeze", "full",
+ "ilike", "inner", "is", "isnull", "join", "left", "like", "natural",
+ "notnull", "outer", "over", "overlaps", "right", "similar", "verbose"
+ ])
_DECIMAL_TYPES = (1231, 1700)
_FLOAT_TYPES = (700, 701, 1021, 1022)
@@ -452,6 +466,7 @@ PGMacAddr = MACADDR
class OID(sqltypes.TypeEngine):
+
"""Provide the Postgresql OID type.
.. versionadded:: 0.9.5
@@ -461,18 +476,21 @@ class OID(sqltypes.TypeEngine):
class TIMESTAMP(sqltypes.TIMESTAMP):
+
def __init__(self, timezone=False, precision=None):
super(TIMESTAMP, self).__init__(timezone=timezone)
self.precision = precision
class TIME(sqltypes.TIME):
+
def __init__(self, timezone=False, precision=None):
super(TIME, self).__init__(timezone=timezone)
self.precision = precision
class INTERVAL(sqltypes.TypeEngine):
+
"""Postgresql INTERVAL type.
The INTERVAL type may not be supported on all DBAPIs.
@@ -511,6 +529,7 @@ PGBit = BIT
class UUID(sqltypes.TypeEngine):
+
"""Postgresql UUID type.
Represents the UUID column type, interpreting
@@ -534,7 +553,8 @@ class UUID(sqltypes.TypeEngine):
"""
if as_uuid and _python_UUID is None:
raise NotImplementedError(
- "This version of Python does not support the native UUID type."
+ "This version of Python does not support "
+ "the native UUID type."
)
self.as_uuid = as_uuid
@@ -560,7 +580,9 @@ class UUID(sqltypes.TypeEngine):
PGUuid = UUID
+
class TSVECTOR(sqltypes.TypeEngine):
+
"""The :class:`.postgresql.TSVECTOR` type implements the Postgresql
text search type TSVECTOR.
@@ -577,21 +599,21 @@ class TSVECTOR(sqltypes.TypeEngine):
__visit_name__ = 'TSVECTOR'
-
class _Slice(expression.ColumnElement):
__visit_name__ = 'slice'
type = sqltypes.NULLTYPE
def __init__(self, slice_, source_comparator):
self.start = source_comparator._check_literal(
- source_comparator.expr,
- operators.getitem, slice_.start)
+ source_comparator.expr,
+ operators.getitem, slice_.start)
self.stop = source_comparator._check_literal(
- source_comparator.expr,
- operators.getitem, slice_.stop)
+ source_comparator.expr,
+ operators.getitem, slice_.stop)
class Any(expression.ColumnElement):
+
"""Represent the clause ``left operator ANY (right)``. ``right`` must be
an array expression.
@@ -612,6 +634,7 @@ class Any(expression.ColumnElement):
class All(expression.ColumnElement):
+
"""Represent the clause ``left operator ALL (right)``. ``right`` must be
an array expression.
@@ -632,6 +655,7 @@ class All(expression.ColumnElement):
class array(expression.Tuple):
+
"""A Postgresql ARRAY literal.
This is used to produce ARRAY literals in SQL expressions, e.g.::
@@ -673,7 +697,7 @@ class array(expression.Tuple):
def _bind_param(self, operator, obj):
return array(*[
expression.BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
+ _compared_to_type=self.type, unique=True)
for o in obj
])
@@ -682,6 +706,7 @@ class array(expression.Tuple):
class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
+
"""Postgresql ARRAY type.
Represents values as Python lists.
@@ -757,6 +782,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
__visit_name__ = 'ARRAY'
class Comparator(sqltypes.Concatenable.Comparator):
+
"""Define comparison operations for :class:`.ARRAY`."""
def __getitem__(self, index):
@@ -775,7 +801,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
return_type = self.type.item_type
return self._binary_operate(self.expr, operators.getitem, index,
- result_type=return_type)
+ result_type=return_type)
def any(self, other, operator=operators.eq):
"""Return ``other operator ANY (array)`` clause.
@@ -902,7 +928,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
"""
if isinstance(item_type, ARRAY):
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
- "handles multi-dimensional arrays of basetype")
+ "handles multi-dimensional arrays of basetype")
if isinstance(item_type, type):
item_type = item_type()
self.item_type = item_type
@@ -921,59 +947,60 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
if dim is None:
arr = list(arr)
if dim == 1 or dim is None and (
- # this has to be (list, tuple), or at least
- # not hasattr('__iter__'), since Py3K strings
- # etc. have __iter__
- not arr or not isinstance(arr[0], (list, tuple))):
+ # this has to be (list, tuple), or at least
+ # not hasattr('__iter__'), since Py3K strings
+ # etc. have __iter__
+ not arr or not isinstance(arr[0], (list, tuple))):
if itemproc:
return collection(itemproc(x) for x in arr)
else:
return collection(arr)
else:
return collection(
- self._proc_array(
- x, itemproc,
- dim - 1 if dim is not None else None,
- collection)
- for x in arr
- )
+ self._proc_array(
+ x, itemproc,
+ dim - 1 if dim is not None else None,
+ collection)
+ for x in arr
+ )
def bind_processor(self, dialect):
item_proc = self.item_type.\
- dialect_impl(dialect).\
- bind_processor(dialect)
+ dialect_impl(dialect).\
+ bind_processor(dialect)
def process(value):
if value is None:
return value
else:
return self._proc_array(
- value,
- item_proc,
- self.dimensions,
- list)
+ value,
+ item_proc,
+ self.dimensions,
+ list)
return process
def result_processor(self, dialect, coltype):
item_proc = self.item_type.\
- dialect_impl(dialect).\
- result_processor(dialect, coltype)
+ dialect_impl(dialect).\
+ result_processor(dialect, coltype)
def process(value):
if value is None:
return value
else:
return self._proc_array(
- value,
- item_proc,
- self.dimensions,
- tuple if self.as_tuple else list)
+ value,
+ item_proc,
+ self.dimensions,
+ tuple if self.as_tuple else list)
return process
PGArray = ARRAY
class ENUM(sqltypes.Enum):
+
"""Postgresql ENUM type.
This is a subclass of :class:`.types.Enum` which includes
@@ -1047,7 +1074,8 @@ class ENUM(sqltypes.Enum):
return
if not checkfirst or \
- not bind.dialect.has_type(bind, self.name, schema=self.schema):
+ not bind.dialect.has_type(
+ bind, self.name, schema=self.schema):
bind.execute(CreateEnumType(self))
def drop(self, bind=None, checkfirst=True):
@@ -1069,7 +1097,7 @@ class ENUM(sqltypes.Enum):
return
if not checkfirst or \
- bind.dialect.has_type(bind, self.name, schema=self.schema):
+ bind.dialect.has_type(bind, self.name, schema=self.schema):
bind.execute(DropEnumType(self))
def _check_for_name_in_memos(self, checkfirst, kw):
@@ -1144,7 +1172,7 @@ ischema_names = {
'interval': INTERVAL,
'interval year to month': INTERVAL,
'interval day to second': INTERVAL,
- 'tsvector' : TSVECTOR
+ 'tsvector': TSVECTOR
}
@@ -1155,9 +1183,9 @@ class PGCompiler(compiler.SQLCompiler):
def visit_slice(self, element, **kw):
return "%s:%s" % (
- self.process(element.start, **kw),
- self.process(element.stop, **kw),
- )
+ self.process(element.start, **kw),
+ self.process(element.stop, **kw),
+ )
def visit_any(self, element, **kw):
return "%s%sANY (%s)" % (
@@ -1181,7 +1209,7 @@ class PGCompiler(compiler.SQLCompiler):
def visit_match_op_binary(self, binary, operator, **kw):
if "postgresql_regconfig" in binary.modifiers:
- regconfig = self.render_literal_value(\
+ regconfig = self.render_literal_value(
binary.modifiers['postgresql_regconfig'],
sqltypes.STRINGTYPE)
if regconfig:
@@ -1199,8 +1227,8 @@ class PGCompiler(compiler.SQLCompiler):
escape = binary.modifiers.get("escape", None)
return '%s ILIKE %s' % \
- (self.process(binary.left, **kw),
- self.process(binary.right, **kw)) \
+ (self.process(binary.left, **kw),
+ self.process(binary.right, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
@@ -1210,8 +1238,8 @@ class PGCompiler(compiler.SQLCompiler):
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
- (self.process(binary.left, **kw),
- self.process(binary.right, **kw)) \
+ (self.process(binary.left, **kw),
+ self.process(binary.right, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
@@ -1265,12 +1293,12 @@ class PGCompiler(compiler.SQLCompiler):
if select._for_update_arg.of:
tables = util.OrderedSet(
- c.table if isinstance(c, expression.ColumnClause)
- else c for c in select._for_update_arg.of)
+ c.table if isinstance(c, expression.ColumnClause)
+ else c for c in select._for_update_arg.of)
tmp += " OF " + ", ".join(
- self.process(table, ashint=True)
- for table in tables
- )
+ self.process(table, ashint=True)
+ for table in tables
+ )
if select._for_update_arg.nowait:
tmp += " NOWAIT"
@@ -1280,13 +1308,12 @@ class PGCompiler(compiler.SQLCompiler):
def returning_clause(self, stmt, returning_cols):
columns = [
- self._label_select_column(None, c, True, False, {})
- for c in expression._select_iterables(returning_cols)
- ]
+ self._label_select_column(None, c, True, False, {})
+ for c in expression._select_iterables(returning_cols)
+ ]
return 'RETURNING ' + ', '.join(columns)
-
def visit_substring_func(self, func, **kw):
s = self.process(func.clauses.clauses[0], **kw)
start = self.process(func.clauses.clauses[1], **kw)
@@ -1296,7 +1323,9 @@ class PGCompiler(compiler.SQLCompiler):
else:
return "SUBSTRING(%s FROM %s)" % (s, start)
+
class PGDDLCompiler(compiler.DDLCompiler):
+
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column)
@@ -1335,7 +1364,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
self.preparer.format_type(type_),
", ".join(
self.sql_compiler.process(sql.literal(e), literal_binds=True)
- for e in type_.enums)
+ for e in type_.enums)
)
def visit_drop_enum_type(self, drop):
@@ -1353,10 +1382,10 @@ class PGDDLCompiler(compiler.DDLCompiler):
if index.unique:
text += "UNIQUE "
text += "INDEX %s ON %s " % (
- self._prepared_index_name(index,
- include_schema=False),
- preparer.format_table(index.table)
- )
+ self._prepared_index_name(index,
+ include_schema=False),
+ preparer.format_table(index.table)
+ )
using = index.dialect_options['postgresql']['using']
if using:
@@ -1367,20 +1396,20 @@ class PGDDLCompiler(compiler.DDLCompiler):
% (
', '.join([
self.sql_compiler.process(
- expr.self_group()
- if not isinstance(expr, expression.ColumnClause)
- else expr,
- include_table=False, literal_binds=True) +
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr,
+ include_table=False, literal_binds=True) +
(c.key in ops and (' ' + ops[c.key]) or '')
for expr, c in zip(index.expressions, index.columns)])
- )
+ )
whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
where_compiled = self.sql_compiler.process(
- whereclause, include_table=False,
- literal_binds=True)
+ whereclause, include_table=False,
+ literal_binds=True)
text += " WHERE " + where_compiled
return text
@@ -1392,17 +1421,19 @@ class PGDDLCompiler(compiler.DDLCompiler):
elements = []
for c in constraint.columns:
op = constraint.operators[c.name]
- elements.append(self.preparer.quote(c.name) + ' WITH '+op)
- text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements))
+ elements.append(self.preparer.quote(c.name) + ' WITH ' + op)
+ text += "EXCLUDE USING %s (%s)" % (constraint.using,
+ ', '.join(elements))
if constraint.where is not None:
text += ' WHERE (%s)' % self.sql_compiler.process(
- constraint.where,
- literal_binds=True)
+ constraint.where,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
class PGTypeCompiler(compiler.GenericTypeCompiler):
+
def visit_TSVECTOR(self, type):
return "TSVECTOR"
@@ -1509,8 +1540,8 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_ARRAY(self, type_):
return self.process(type_.item_type) + ('[]' * (type_.dimensions
- if type_.dimensions
- is not None else 1))
+ if type_.dimensions
+ is not None else 1))
class PGIdentifierPreparer(compiler.IdentifierPreparer):
@@ -1520,7 +1551,7 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
def _unquote_identifier(self, value):
if value[0] == self.initial_quote:
value = value[1:-1].\
- replace(self.escape_to_quote, self.escape_quote)
+ replace(self.escape_to_quote, self.escape_quote)
return value
def format_type(self, type_, use_schema=True):
@@ -1554,21 +1585,25 @@ class DropEnumType(schema._CreateDropBase):
class PGExecutionContext(default.DefaultExecutionContext):
+
def fire_sequence(self, seq, type_):
- return self._execute_scalar(("select nextval('%s')" % \
- self.dialect.identifier_preparer.format_sequence(seq)), type_)
+ return self._execute_scalar((
+ "select nextval('%s')" %
+ self.dialect.identifier_preparer.format_sequence(seq)), type_)
def get_insert_default(self, column):
- if column.primary_key and column is column.table._autoincrement_column:
+ if column.primary_key and \
+ column is column.table._autoincrement_column:
if column.server_default and column.server_default.has_argument:
# pre-execute passive defaults on primary key columns
return self._execute_scalar("select %s" %
- column.server_default.arg, column.type)
+ column.server_default.arg,
+ column.type)
elif (column.default is None or
- (column.default.is_sequence and
- column.default.optional)):
+ (column.default.is_sequence and
+ column.default.optional)):
# execute the sequence associated with a SERIAL primary
# key column. for non-primary-key SERIAL, the ID just
@@ -1587,10 +1622,10 @@ class PGExecutionContext(default.DefaultExecutionContext):
sch = column.table.schema
if sch is not None:
exc = "select nextval('\"%s\".\"%s\"')" % \
- (sch, seq_name)
+ (sch, seq_name)
else:
exc = "select nextval('\"%s\"')" % \
- (seq_name, )
+ (seq_name, )
return self._execute_scalar(exc, column.type)
@@ -1643,7 +1678,7 @@ class PGDialect(default.DefaultDialect):
_backslash_escapes = True
def __init__(self, isolation_level=None, json_serializer=None,
- json_deserializer=None, **kwargs):
+ json_deserializer=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
self._json_deserializer = json_deserializer
@@ -1652,7 +1687,7 @@ class PGDialect(default.DefaultDialect):
def initialize(self, connection):
super(PGDialect, self).initialize(connection)
self.implicit_returning = self.server_version_info > (8, 2) and \
- self.__dict__.get('implicit_returning', True)
+ self.__dict__.get('implicit_returning', True)
self.supports_native_enum = self.server_version_info >= (8, 3)
if not self.supports_native_enum:
self.colspecs = self.colspecs.copy()
@@ -1665,9 +1700,9 @@ class PGDialect(default.DefaultDialect):
self.supports_smallserial = self.server_version_info >= (9, 2)
self._backslash_escapes = self.server_version_info < (8, 2) or \
- connection.scalar(
- "show standard_conforming_strings"
- ) == 'off'
+ connection.scalar(
+ "show standard_conforming_strings"
+ ) == 'off'
def on_connect(self):
if self.isolation_level is not None:
@@ -1677,8 +1712,8 @@ class PGDialect(default.DefaultDialect):
else:
return None
- _isolation_lookup = set(['SERIALIZABLE',
- 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
+ _isolation_lookup = set(['SERIALIZABLE', 'READ UNCOMMITTED',
+ 'READ COMMITTED', 'REPEATABLE READ'])
def set_isolation_level(self, connection, level):
level = level.replace('_', ' ')
@@ -1687,7 +1722,7 @@ class PGDialect(default.DefaultDialect):
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
cursor = connection.cursor()
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION "
@@ -1709,10 +1744,10 @@ class PGDialect(default.DefaultDialect):
connection.execute("PREPARE TRANSACTION '%s'" % xid)
def do_rollback_twophase(self, connection, xid,
- is_prepared=True, recover=False):
+ is_prepared=True, recover=False):
if is_prepared:
if recover:
- #FIXME: ugly hack to get out of transaction
+ # FIXME: ugly hack to get out of transaction
# context when committing recoverable transactions
# Must find out a way how to make the dbapi not
# open a transaction.
@@ -1724,7 +1759,7 @@ class PGDialect(default.DefaultDialect):
self.do_rollback(connection.connection)
def do_commit_twophase(self, connection, xid,
- is_prepared=True, recover=False):
+ is_prepared=True, recover=False):
if is_prepared:
if recover:
connection.execute("ROLLBACK")
@@ -1736,14 +1771,15 @@ class PGDialect(default.DefaultDialect):
def do_recover_twophase(self, connection):
resultset = connection.execute(
- sql.text("SELECT gid FROM pg_prepared_xacts"))
+ sql.text("SELECT gid FROM pg_prepared_xacts"))
return [row[0] for row in resultset]
def _get_default_schema_name(self, connection):
return connection.scalar("select current_schema()")
def has_schema(self, connection, schema):
- query = "select nspname from pg_namespace where lower(nspname)=:schema"
+ query = ("select nspname from pg_namespace "
+ "where lower(nspname)=:schema")
cursor = connection.execute(
sql.text(
query,
@@ -1761,25 +1797,27 @@ class PGDialect(default.DefaultDialect):
if schema is None:
cursor = connection.execute(
sql.text(
- "select relname from pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where n.nspname=current_schema() and "
- "relname=:name",
- bindparams=[
+ "select relname from pg_class c join pg_namespace n on "
+ "n.oid=c.relnamespace where n.nspname=current_schema() "
+ "and relname=:name",
+ bindparams=[
sql.bindparam('name', util.text_type(table_name),
- type_=sqltypes.Unicode)]
+ type_=sqltypes.Unicode)]
)
)
else:
cursor = connection.execute(
sql.text(
- "select relname from pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where n.nspname=:schema and "
- "relname=:name",
+ "select relname from pg_class c join pg_namespace n on "
+ "n.oid=c.relnamespace where n.nspname=:schema and "
+ "relname=:name",
bindparams=[
sql.bindparam('name',
- util.text_type(table_name), type_=sqltypes.Unicode),
+ util.text_type(table_name),
+ type_=sqltypes.Unicode),
sql.bindparam('schema',
- util.text_type(schema), type_=sqltypes.Unicode)]
+ util.text_type(schema),
+ type_=sqltypes.Unicode)]
)
)
return bool(cursor.first())
@@ -1794,23 +1832,24 @@ class PGDialect(default.DefaultDialect):
"and relname=:name",
bindparams=[
sql.bindparam('name', util.text_type(sequence_name),
- type_=sqltypes.Unicode)
+ type_=sqltypes.Unicode)
]
)
)
else:
cursor = connection.execute(
sql.text(
- "SELECT relname FROM pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where relkind='S' and "
- "n.nspname=:schema and relname=:name",
- bindparams=[
- sql.bindparam('name', util.text_type(sequence_name),
- type_=sqltypes.Unicode),
- sql.bindparam('schema',
- util.text_type(schema), type_=sqltypes.Unicode)
- ]
- )
+ "SELECT relname FROM pg_class c join pg_namespace n on "
+ "n.oid=c.relnamespace where relkind='S' and "
+ "n.nspname=:schema and relname=:name",
+ bindparams=[
+ sql.bindparam('name', util.text_type(sequence_name),
+ type_=sqltypes.Unicode),
+ sql.bindparam('schema',
+ util.text_type(schema),
+ type_=sqltypes.Unicode)
+ ]
+ )
)
return bool(cursor.first())
@@ -1836,14 +1875,14 @@ class PGDialect(default.DefaultDialect):
"""
query = sql.text(query)
query = query.bindparams(
- sql.bindparam('typname',
- util.text_type(type_name), type_=sqltypes.Unicode),
- )
+ sql.bindparam('typname',
+ util.text_type(type_name), type_=sqltypes.Unicode),
+ )
if schema is not None:
query = query.bindparams(
- sql.bindparam('nspname',
- util.text_type(schema), type_=sqltypes.Unicode),
- )
+ sql.bindparam('nspname',
+ util.text_type(schema), type_=sqltypes.Unicode),
+ )
cursor = connection.execute(query)
return bool(cursor.scalar())
@@ -1855,7 +1894,7 @@ class PGDialect(default.DefaultDialect):
v)
if not m:
raise AssertionError(
- "Could not determine version from string '%s'" % v)
+ "Could not determine version from string '%s'" % v)
return tuple([int(x) for x in m.group(1, 2, 3) if x is not None])
@reflection.cache
@@ -1905,11 +1944,11 @@ class PGDialect(default.DefaultDialect):
# what about system tables?
if util.py2k:
- schema_names = [row[0].decode(self.encoding) for row in rp \
- if not row[0].startswith('pg_')]
+ schema_names = [row[0].decode(self.encoding) for row in rp
+ if not row[0].startswith('pg_')]
else:
- schema_names = [row[0] for row in rp \
- if not row[0].startswith('pg_')]
+ schema_names = [row[0] for row in rp
+ if not row[0].startswith('pg_')]
return schema_names
@reflection.cache
@@ -1921,12 +1960,12 @@ class PGDialect(default.DefaultDialect):
result = connection.execute(
sql.text("SELECT relname FROM pg_class c "
- "WHERE relkind = 'r' "
- "AND '%s' = (select nspname from pg_namespace n "
- "where n.oid = c.relnamespace) " %
- current_schema,
- typemap={'relname': sqltypes.Unicode}
- )
+ "WHERE relkind = 'r' "
+ "AND '%s' = (select nspname from pg_namespace n "
+ "where n.oid = c.relnamespace) " %
+ current_schema,
+ typemap={'relname': sqltypes.Unicode}
+ )
)
return [row[0] for row in result]
@@ -1946,7 +1985,7 @@ class PGDialect(default.DefaultDialect):
if util.py2k:
view_names = [row[0].decode(self.encoding)
- for row in connection.execute(s)]
+ for row in connection.execute(s)]
else:
view_names = [row[0] for row in connection.execute(s)]
return view_names
@@ -1991,9 +2030,12 @@ class PGDialect(default.DefaultDialect):
ORDER BY a.attnum
"""
s = sql.text(SQL_COLS,
- bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)],
- typemap={'attname': sqltypes.Unicode, 'default': sqltypes.Unicode}
- )
+ bindparams=[
+ sql.bindparam('table_oid', type_=sqltypes.Integer)],
+ typemap={
+ 'attname': sqltypes.Unicode,
+ 'default': sqltypes.Unicode}
+ )
c = connection.execute(s, table_oid=table_oid)
rows = c.fetchall()
domains = self._load_domains(connection)
@@ -2009,7 +2051,7 @@ class PGDialect(default.DefaultDialect):
def _get_column_info(self, name, format_type, default,
notnull, domains, enums, schema):
- ## strip (*) from character varying(5), timestamp(5)
+ # strip (*) from character varying(5), timestamp(5)
# with time zone, geometry(POLYGON), etc.
attype = re.sub(r'\(.*\)', '', format_type)
@@ -2057,7 +2099,7 @@ class PGDialect(default.DefaultDialect):
else:
args = ()
elif attype in ('interval', 'interval year to month',
- 'interval day to second'):
+ 'interval day to second'):
if charlen:
kwargs['precision'] = int(charlen)
args = ()
@@ -2112,8 +2154,8 @@ class PGDialect(default.DefaultDialect):
# later be enhanced to obey quoting rules /
# "quote schema"
default = match.group(1) + \
- ('"%s"' % sch) + '.' + \
- match.group(2) + match.group(3)
+ ('"%s"' % sch) + '.' + \
+ match.group(2) + match.group(3)
column_info = dict(name=name, type=coltype, nullable=nullable,
default=default, autoincrement=autoincrement)
@@ -2169,7 +2211,7 @@ class PGDialect(default.DefaultDialect):
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None,
- postgresql_ignore_search_path=False, **kw):
+ postgresql_ignore_search_path=False, **kw):
preparer = self.identifier_preparer
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
@@ -2192,29 +2234,32 @@ class PGDialect(default.DefaultDialect):
FK_REGEX = re.compile(
r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
- r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
- r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(ON UPDATE '
+ r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(ON DELETE '
+ r'(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
)
t = sql.text(FK_SQL, typemap={
- 'conname': sqltypes.Unicode,
- 'condef': sqltypes.Unicode})
+ 'conname': sqltypes.Unicode,
+ 'condef': sqltypes.Unicode})
c = connection.execute(t, table=table_oid)
fkeys = []
for conname, condef, conschema in c.fetchall():
m = re.search(FK_REGEX, condef).groups()
constrained_columns, referred_schema, \
- referred_table, referred_columns, \
- _, match, _, onupdate, _, ondelete, \
- deferrable, _, initially = m
+ referred_table, referred_columns, \
+ _, match, _, onupdate, _, ondelete, \
+ deferrable, _, initially = m
if deferrable is not None:
deferrable = True if deferrable == 'DEFERRABLE' else False
constrained_columns = [preparer._unquote_identifier(x)
- for x in re.split(r'\s*,\s*', constrained_columns)]
+ for x in re.split(
+ r'\s*,\s*', constrained_columns)]
if postgresql_ignore_search_path:
# when ignoring search path, we use the actual schema
@@ -2228,7 +2273,7 @@ class PGDialect(default.DefaultDialect):
# pg_get_constraintdef(). If the schema is in the search
# path, pg_get_constraintdef() will give us None.
referred_schema = \
- preparer._unquote_identifier(referred_schema)
+ preparer._unquote_identifier(referred_schema)
elif schema is not None and schema == conschema:
# If the actual schema matches the schema of the table
# we're reflecting, then we will use that.
@@ -2236,7 +2281,8 @@ class PGDialect(default.DefaultDialect):
referred_table = preparer._unquote_identifier(referred_table)
referred_columns = [preparer._unquote_identifier(x)
- for x in re.split(r'\s*,\s', referred_columns)]
+ for x in
+ re.split(r'\s*,\s', referred_columns)]
fkey_d = {
'name': conname,
'constrained_columns': constrained_columns,
@@ -2263,9 +2309,9 @@ class PGDialect(default.DefaultDialect):
# for now.
# regards, tom lane"
return "(%s)" % " OR ".join(
- "%s[%d] = %s" % (compare_to, ind, col)
- for ind in range(0, 10)
- )
+ "%s[%d] = %s" % (compare_to, ind, col)
+ for ind in range(0, 10)
+ )
else:
return "%s = ANY(%s)" % (col, compare_to)
@@ -2297,12 +2343,12 @@ class PGDialect(default.DefaultDialect):
t.relname,
i.relname
""" % (
- # version 8.3 here was based on observing the
- # cast does not work in PG 8.2.4, does work in 8.3.0.
- # nothing in PG changelogs regarding this.
- "::varchar" if self.server_version_info >= (8, 3) else "",
- self._pg_index_any("a.attnum", "ix.indkey")
- )
+ # version 8.3 here was based on observing the
+ # cast does not work in PG 8.2.4, does work in 8.3.0.
+ # nothing in PG changelogs regarding this.
+ "::varchar" if self.server_version_info >= (8, 3) else "",
+ self._pg_index_any("a.attnum", "ix.indkey")
+ )
t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode})
c = connection.execute(t, table_oid=table_oid)
@@ -2316,16 +2362,16 @@ class PGDialect(default.DefaultDialect):
if expr:
if idx_name != sv_idx_name:
util.warn(
- "Skipped unsupported reflection of "
- "expression-based index %s"
- % idx_name)
+ "Skipped unsupported reflection of "
+ "expression-based index %s"
+ % idx_name)
sv_idx_name = idx_name
continue
if prd and not idx_name == sv_idx_name:
util.warn(
- "Predicate of partial index %s ignored during reflection"
- % idx_name)
+ "Predicate of partial index %s ignored during reflection"
+ % idx_name)
sv_idx_name = idx_name
index = indexes[idx_name]
@@ -2356,7 +2402,8 @@ class PGDialect(default.DefaultDialect):
FROM
pg_catalog.pg_constraint cons
join pg_attribute a
- on cons.conrelid = a.attrelid AND a.attnum = ANY(cons.conkey)
+ on cons.conrelid = a.attrelid AND
+ a.attnum = ANY(cons.conkey)
WHERE
cons.conrelid = :table_oid AND
cons.contype = 'u'
@@ -2381,7 +2428,7 @@ class PGDialect(default.DefaultDialect):
if not self.supports_native_enum:
return {}
- ## Load data types for enums:
+ # Load data types for enums:
SQL_ENUMS = """
SELECT t.typname as "name",
-- no enum defaults in 8.4 at least
@@ -2397,8 +2444,8 @@ class PGDialect(default.DefaultDialect):
"""
s = sql.text(SQL_ENUMS, typemap={
- 'attname': sqltypes.Unicode,
- 'label': sqltypes.Unicode})
+ 'attname': sqltypes.Unicode,
+ 'label': sqltypes.Unicode})
c = connection.execute(s)
enums = {}
@@ -2416,13 +2463,13 @@ class PGDialect(default.DefaultDialect):
enums[name]['labels'].append(enum['label'])
else:
enums[name] = {
- 'labels': [enum['label']],
- }
+ 'labels': [enum['label']],
+ }
return enums
def _load_domains(self, connection):
- ## Load data types for domains:
+ # Load data types for domains:
SQL_DOMAINS = """
SELECT t.typname as "name",
pg_catalog.format_type(t.typbasetype, t.typtypmod) as "attype",
@@ -2440,7 +2487,7 @@ class PGDialect(default.DefaultDialect):
domains = {}
for domain in c.fetchall():
- ## strip (30) from character varying(30)
+ # strip (30) from character varying(30)
attype = re.search('([^\(]+)', domain['attype']).group(1)
if domain['visible']:
# 'visible' just means whether or not the domain is in a
@@ -2452,9 +2499,9 @@ class PGDialect(default.DefaultDialect):
name = "%s.%s" % (domain['schema'], domain['name'])
domains[name] = {
- 'attype': attype,
- 'nullable': domain['nullable'],
- 'default': domain['default']
- }
+ 'attype': attype,
+ 'nullable': domain['nullable'],
+ 'default': domain['default']
+ }
return domains
diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py
index 2eed2fb36..e8ebc75dd 100644
--- a/lib/sqlalchemy/dialects/postgresql/constraints.py
+++ b/lib/sqlalchemy/dialects/postgresql/constraints.py
@@ -6,13 +6,15 @@
from sqlalchemy.schema import ColumnCollectionConstraint
from sqlalchemy.sql import expression
+
class ExcludeConstraint(ColumnCollectionConstraint):
"""A table-level EXCLUDE constraint.
Defines an EXCLUDE constraint as described in the `postgres
documentation`__.
- __ http://www.postgresql.org/docs/9.0/static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
+ __ http://www.postgresql.org/docs/9.0/\
+static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
"""
__visit_name__ = 'exclude_constraint'
@@ -52,7 +54,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
name=kw.get('name'),
deferrable=kw.get('deferrable'),
initially=kw.get('initially')
- )
+ )
self.operators = {}
for col_or_string, op in elements:
name = getattr(col_or_string, 'name', col_or_string)
@@ -60,15 +62,14 @@ class ExcludeConstraint(ColumnCollectionConstraint):
self.using = kw.get('using', 'gist')
where = kw.get('where')
if where:
- self.where = expression._literal_as_text(where)
+ self.where = expression._literal_as_text(where)
def copy(self, **kw):
elements = [(col, self.operators[col])
for col in self.columns.keys()]
c = self.__class__(*elements,
- name=self.name,
- deferrable=self.deferrable,
- initially=self.initially)
+ name=self.name,
+ deferrable=self.deferrable,
+ initially=self.initially)
c.dispatch._update(self.dispatch)
return c
-
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index f1fb3d308..9601edc41 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -69,11 +69,13 @@ def _parse_hstore(hstore_str):
pair_match = HSTORE_PAIR_RE.match(hstore_str)
while pair_match is not None:
- key = pair_match.group('key').replace(r'\"', '"').replace("\\\\", "\\")
+ key = pair_match.group('key').replace(r'\"', '"').replace(
+ "\\\\", "\\")
if pair_match.group('value_null'):
value = None
else:
- value = pair_match.group('value').replace(r'\"', '"').replace("\\\\", "\\")
+ value = pair_match.group('value').replace(
+ r'\"', '"').replace("\\\\", "\\")
result[key] = value
pos += pair_match.end()
@@ -140,15 +142,16 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
data_table.c.data + {"k1": "v1"}
- For a full list of special methods see :class:`.HSTORE.comparator_factory`.
+ For a full list of special methods see
+ :class:`.HSTORE.comparator_factory`.
For usage with the SQLAlchemy ORM, it may be desirable to combine
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
now part of the :mod:`sqlalchemy.ext.mutable`
extension. This extension will allow "in-place" changes to the
dictionary, e.g. addition of new keys or replacement/removal of existing
- keys to/from the current dictionary, to produce events which will be detected
- by the unit of work::
+ keys to/from the current dictionary, to produce events which will be
+ detected by the unit of work::
from sqlalchemy.ext.mutable import MutableDict
@@ -167,9 +170,9 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
session.commit()
When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
- will not be alerted to any changes to the contents of an existing dictionary,
- unless that dictionary value is re-assigned to the HSTORE-attribute itself,
- thus generating a change event.
+ will not be alerted to any changes to the contents of an existing
+ dictionary, unless that dictionary value is re-assigned to the
+ HSTORE-attribute itself, thus generating a change event.
.. versionadded:: 0.8
@@ -272,6 +275,7 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
def bind_processor(self, dialect):
if util.py2k:
encoding = dialect.encoding
+
def process(value):
if isinstance(value, dict):
return _serialize_hstore(value).encode(encoding)
@@ -288,6 +292,7 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
def result_processor(self, dialect, coltype):
if util.py2k:
encoding = dialect.encoding
+
def process(value):
if value is not None:
return _parse_hstore(value.decode(encoding))
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
index 902d0a80d..25ac342af 100644
--- a/lib/sqlalchemy/dialects/postgresql/json.py
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -27,24 +27,28 @@ class JSONElement(elements.BinaryExpression):
expr = mytable.c.json_data['some_key']
The expression typically compiles to a JSON access such as ``col -> key``.
- Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast`
- and :attr:`.JSONElement.astext`.
+ Modifiers are then available for typing behavior, including
+ :meth:`.JSONElement.cast` and :attr:`.JSONElement.astext`.
"""
- def __init__(self, left, right, astext=False, opstring=None, result_type=None):
+
+ def __init__(self, left, right, astext=False,
+ opstring=None, result_type=None):
self._astext = astext
if opstring is None:
if hasattr(right, '__iter__') and \
- not isinstance(right, util.string_types):
+ not isinstance(right, util.string_types):
opstring = "#>"
- right = "{%s}" % (", ".join(util.text_type(elem) for elem in right))
+ right = "{%s}" % (
+ ", ".join(util.text_type(elem) for elem in right))
else:
opstring = "->"
self._json_opstring = opstring
operator = custom_op(opstring, precedence=5)
right = left._check_literal(left, operator, right)
- super(JSONElement, self).__init__(left, right, operator, type_=result_type)
+ super(JSONElement, self).__init__(
+ left, right, operator, type_=result_type)
@property
def astext(self):
@@ -64,12 +68,12 @@ class JSONElement(elements.BinaryExpression):
return self
else:
return JSONElement(
- self.left,
- self.right,
- astext=True,
- opstring=self._json_opstring + ">",
- result_type=sqltypes.String(convert_unicode=True)
- )
+ self.left,
+ self.right,
+ astext=True,
+ opstring=self._json_opstring + ">",
+ result_type=sqltypes.String(convert_unicode=True)
+ )
def cast(self, type_):
"""Convert this :class:`.JSONElement` to apply both the 'astext' operator
@@ -126,15 +130,16 @@ class JSON(sqltypes.TypeEngine):
* Path index operations returning text (required for text comparison)::
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\
+ 'some value'
- Index operations return an instance of :class:`.JSONElement`, which represents
- an expression such as ``column -> index``. This element then defines
- methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
- for setting up type behavior.
+ Index operations return an instance of :class:`.JSONElement`, which
+ represents an expression such as ``column -> index``. This element then
+ defines methods such as :attr:`.JSONElement.astext` and
+ :meth:`.JSONElement.cast` for setting up type behavior.
- The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
- in-place mutations to the structure. In order to detect these, the
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
+ detect in-place mutations to the structure. In order to detect these, the
:mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
allow "in-place" changes to the datastructure to produce events which
will be detected by the unit of work. See the example at :class:`.HSTORE`
@@ -178,6 +183,7 @@ class JSON(sqltypes.TypeEngine):
json_serializer = dialect._json_serializer or json.dumps
if util.py2k:
encoding = dialect.encoding
+
def process(value):
return json_serializer(value).encode(encoding)
else:
@@ -189,6 +195,7 @@ class JSON(sqltypes.TypeEngine):
json_deserializer = dialect._json_deserializer or json.loads
if util.py2k:
encoding = dialect.encoding
+
def process(value):
return json_deserializer(value.decode(encoding))
else:
@@ -200,7 +207,6 @@ class JSON(sqltypes.TypeEngine):
ischema_names['json'] = JSON
-
class JSONB(JSON):
"""Represent the Postgresql JSONB type.
@@ -237,15 +243,16 @@ class JSONB(JSON):
* Path index operations returning text (required for text comparison)::
- data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == \\
+ 'some value'
- Index operations return an instance of :class:`.JSONElement`, which represents
- an expression such as ``column -> index``. This element then defines
- methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
- for setting up type behavior.
+ Index operations return an instance of :class:`.JSONElement`, which
+ represents an expression such as ``column -> index``. This element then
+ defines methods such as :attr:`.JSONElement.astext` and
+ :meth:`.JSONElement.cast` for setting up type behavior.
- The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
- in-place mutations to the structure. In order to detect these, the
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not
+ detect in-place mutations to the structure. In order to detect these, the
:mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
allow "in-place" changes to the datastructure to produce events which
will be detected by the unit of work. See the example at :class:`.HSTORE`
@@ -280,7 +287,8 @@ class JSONB(JSON):
return JSONElement(self.expr, other)
def _adapt_expression(self, op, other_comparator):
- # How does one do equality?? jsonb also has "=" eg. '[1,2,3]'::jsonb = '[1,2,3]'::jsonb
+ # How does one do equality?? jsonb also has "=" eg.
+ # '[1,2,3]'::jsonb = '[1,2,3]'::jsonb
if isinstance(op, custom_op):
if op.opstring in ['?', '?&', '?|', '@>', '<@']:
return op, sqltypes.Boolean
@@ -317,4 +325,4 @@ class JSONB(JSON):
"""
return self.expr.op('<@')(other)
-ischema_names['jsonb'] = JSONB \ No newline at end of file
+ischema_names['jsonb'] = JSONB
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index dc5ed6e73..512f3e1b0 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -165,6 +165,6 @@ class PGDialect_pg8000(PGDialect):
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s or AUTOCOMMIT" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
dialect = PGDialect_pg8000
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 0ab4abb09..e6450c97f 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -9,7 +9,8 @@
.. dialect:: postgresql+psycopg2
:name: psycopg2
:dbapi: psycopg2
- :connectstring: postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]
+ :connectstring: postgresql+psycopg2://user:password@host:port/dbname\
+[?key=value&key=value...]
:url: http://pypi.python.org/pypi/psycopg2/
psycopg2 Connect Arguments
@@ -21,9 +22,9 @@ psycopg2-specific keyword arguments which are accepted by
* ``server_side_cursors``: Enable the usage of "server side cursors" for SQL
statements which support this feature. What this essentially means from a
psycopg2 point of view is that the cursor is created using a name, e.g.
- ``connection.cursor('some name')``, which has the effect that result rows are
- not immediately pre-fetched and buffered after statement execution, but are
- instead left on the server and only retrieved as needed. SQLAlchemy's
+ ``connection.cursor('some name')``, which has the effect that result rows
+ are not immediately pre-fetched and buffered after statement execution, but
+ are instead left on the server and only retrieved as needed. SQLAlchemy's
:class:`~sqlalchemy.engine.ResultProxy` uses special row-buffering
behavior when this feature is enabled, such that groups of 100 rows at a
time are fetched over the wire to reduce conversational overhead.
@@ -54,7 +55,8 @@ using ``host`` as an additional keyword argument::
See also:
-`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
+`PQconnectdbParams <http://www.postgresql.org/docs/9.1/static\
+/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_
Per-Statement/Connection Execution Options
-------------------------------------------
@@ -90,11 +92,13 @@ Typically, this can be changed to ``utf-8``, as a more useful default::
A second way to affect the client encoding is to set it within Psycopg2
locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
-method (see: http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
+method (see:
+http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
- engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8')
+ engine = create_engine("postgresql://user:pass@host/dbname",
+ client_encoding='utf8')
This overrides the encoding specified in the Postgresql client configuration.
@@ -128,11 +132,12 @@ Psycopg2 Transaction Isolation Level
As discussed in :ref:`postgresql_isolation_level`,
all Postgresql dialects support setting of transaction isolation level
both via the ``isolation_level`` parameter passed to :func:`.create_engine`,
-as well as the ``isolation_level`` argument used by :meth:`.Connection.execution_options`.
-When using the psycopg2 dialect, these options make use of
-psycopg2's ``set_isolation_level()`` connection method, rather than
-emitting a Postgresql directive; this is because psycopg2's API-level
-setting is always emitted at the start of each transaction in any case.
+as well as the ``isolation_level`` argument used by
+:meth:`.Connection.execution_options`. When using the psycopg2 dialect, these
+options make use of psycopg2's ``set_isolation_level()`` connection method,
+rather than emitting a Postgresql directive; this is because psycopg2's
+API-level setting is always emitted at the start of each transaction in any
+case.
The psycopg2 dialect supports these constants for isolation level:
@@ -166,35 +171,38 @@ The psycopg2 dialect will log Postgresql NOTICE messages via the
HSTORE type
------------
-The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of the
-HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
+The ``psycopg2`` DBAPI includes an extension to natively handle marshalling of
+the HSTORE type. The SQLAlchemy psycopg2 dialect will enable this extension
by default when it is detected that the target database has the HSTORE
type set up for use. In other words, when the dialect makes the first
connection, a sequence like the following is performed:
-1. Request the available HSTORE oids using ``psycopg2.extras.HstoreAdapter.get_oids()``.
- If this function returns a list of HSTORE identifiers, we then determine that
- the ``HSTORE`` extension is present.
+1. Request the available HSTORE oids using
+ ``psycopg2.extras.HstoreAdapter.get_oids()``.
+ If this function returns a list of HSTORE identifiers, we then determine
+ that the ``HSTORE`` extension is present.
2. If the ``use_native_hstore`` flag is at its default of ``True``, and
we've detected that ``HSTORE`` oids are available, the
``psycopg2.extensions.register_hstore()`` extension is invoked for all
connections.
-The ``register_hstore()`` extension has the effect of **all Python dictionaries
-being accepted as parameters regardless of the type of target column in SQL**.
-The dictionaries are converted by this extension into a textual HSTORE expression.
-If this behavior is not desired, disable the
-use of the hstore extension by setting ``use_native_hstore`` to ``False`` as follows::
+The ``register_hstore()`` extension has the effect of **all Python
+dictionaries being accepted as parameters regardless of the type of target
+column in SQL**. The dictionaries are converted by this extension into a
+textual HSTORE expression. If this behavior is not desired, disable the
+use of the hstore extension by setting ``use_native_hstore`` to ``False`` as
+follows::
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test",
use_native_hstore=False)
-The ``HSTORE`` type is **still supported** when the ``psycopg2.extensions.register_hstore()``
-extension is not used. It merely means that the coercion between Python dictionaries and the HSTORE
+The ``HSTORE`` type is **still supported** when the
+``psycopg2.extensions.register_hstore()`` extension is not used. It merely
+means that the coercion between Python dictionaries and the HSTORE
string format, on both the parameter side and the result side, will take
-place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2`` which
-may be more performant.
+place within SQLAlchemy's own marshalling logic, and not that of ``psycopg2``
+which may be more performant.
"""
from __future__ import absolute_import
@@ -209,9 +217,9 @@ from ...engine import result as _result
from ...sql import expression
from ... import types as sqltypes
from .base import PGDialect, PGCompiler, \
- PGIdentifierPreparer, PGExecutionContext, \
- ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
- _INT_TYPES
+ PGIdentifierPreparer, PGExecutionContext, \
+ ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
+ _INT_TYPES
from .hstore import HSTORE
from .json import JSON
@@ -227,14 +235,14 @@ class _PGNumeric(sqltypes.Numeric):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(
- decimal.Decimal,
- self._effective_decimal_return_scale)
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
+ "Unknown PG numeric type: %d" % coltype)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
@@ -243,7 +251,7 @@ class _PGNumeric(sqltypes.Numeric):
return processors.to_float
else:
raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
+ "Unknown PG numeric type: %d" % coltype)
class _PGEnum(ENUM):
@@ -255,6 +263,7 @@ class _PGEnum(ENUM):
self.convert_unicode = "force_nocheck"
return super(_PGEnum, self).result_processor(dialect, coltype)
+
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
if dialect._has_native_hstore:
@@ -293,13 +302,16 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
if self.dialect.server_side_cursors:
is_server_side = \
self.execution_options.get('stream_results', True) and (
- (self.compiled and isinstance(self.compiled.statement, expression.Selectable) \
- or \
- (
+ (self.compiled and isinstance(self.compiled.statement,
+ expression.Selectable)
+ or
+ (
(not self.compiled or
- isinstance(self.compiled.statement, expression.TextClause))
- and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement))
- )
+ isinstance(self.compiled.statement,
+ expression.TextClause))
+ and self.statement and SERVER_SIDE_CURSOR_RE.match(
+ self.statement))
+ )
)
else:
is_server_side = \
@@ -309,7 +321,8 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
if is_server_side:
# use server-side cursors:
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
- ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:])
+ ident = "c_%s_%s" % (hex(id(self))[2:],
+ hex(_server_side_id())[2:])
return self._dbapi_connection.cursor(ident)
else:
return self._dbapi_connection.cursor()
@@ -336,7 +349,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
class PGCompiler_psycopg2(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return self.process(binary.left, **kw) + " %% " + \
- self.process(binary.right, **kw)
+ self.process(binary.right, **kw)
def post_process_text(self, text):
return text.replace('%', '%%')
@@ -354,7 +367,8 @@ class PGDialect_psycopg2(PGDialect):
supports_unicode_statements = False
default_paramstyle = 'pyformat'
- supports_sane_multi_rowcount = False # set to true based on psycopg2 version
+ # set to true based on psycopg2 version
+ supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_psycopg2
statement_compiler = PGCompiler_psycopg2
preparer = PGIdentifierPreparer_psycopg2
@@ -375,9 +389,9 @@ class PGDialect_psycopg2(PGDialect):
)
def __init__(self, server_side_cursors=False, use_native_unicode=True,
- client_encoding=None,
- use_native_hstore=True,
- **kwargs):
+ client_encoding=None,
+ use_native_hstore=True,
+ **kwargs):
PGDialect.__init__(self, **kwargs)
self.server_side_cursors = server_side_cursors
self.use_native_unicode = use_native_unicode
@@ -386,18 +400,18 @@ class PGDialect_psycopg2(PGDialect):
self.client_encoding = client_encoding
if self.dbapi and hasattr(self.dbapi, '__version__'):
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
- self.dbapi.__version__)
+ self.dbapi.__version__)
if m:
self.psycopg2_version = tuple(
- int(x)
- for x in m.group(1, 2, 3)
- if x is not None)
+ int(x)
+ for x in m.group(1, 2, 3)
+ if x is not None)
def initialize(self, connection):
super(PGDialect_psycopg2, self).initialize(connection)
self._has_native_hstore = self.use_native_hstore and \
- self._hstore_oids(connection.connection) \
- is not None
+ self._hstore_oids(connection.connection) \
+ is not None
self._has_native_json = self.psycopg2_version >= (2, 5)
# http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9
@@ -427,7 +441,7 @@ class PGDialect_psycopg2(PGDialect):
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
connection.set_isolation_level(level)
@@ -458,16 +472,17 @@ class PGDialect_psycopg2(PGDialect):
oid, array_oid = hstore_oids
if util.py2k:
extras.register_hstore(conn, oid=oid,
- array_oid=array_oid,
- unicode=True)
+ array_oid=array_oid,
+ unicode=True)
else:
extras.register_hstore(conn, oid=oid,
- array_oid=array_oid)
+ array_oid=array_oid)
fns.append(on_connect)
if self.dbapi and self._json_deserializer:
def on_connect(conn):
- extras.register_default_json(conn, loads=self._json_deserializer)
+ extras.register_default_json(
+ conn, loads=self._json_deserializer)
fns.append(on_connect)
if fns:
diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
index fc785d450..3ebd0135f 100644
--- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
@@ -9,7 +9,8 @@
.. dialect:: postgresql+pypostgresql
:name: py-postgresql
:dbapi: pypostgresql
- :connectstring: postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...]
+ :connectstring: postgresql+pypostgresql://user:password@host:port/dbname\
+[?key=value&key=value...]
:url: http://python.projects.pgfoundry.org/
diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py
index 31434743c..28f80d000 100644
--- a/lib/sqlalchemy/dialects/postgresql/ranges.py
+++ b/lib/sqlalchemy/dialects/postgresql/ranges.py
@@ -9,6 +9,7 @@ from ... import types as sqltypes
__all__ = ('INT4RANGE', 'INT8RANGE', 'NUMRANGE')
+
class RangeOperators(object):
"""
This mixin provides functionality for the Range Operators
@@ -94,6 +95,7 @@ class RangeOperators(object):
"""
return self.expr.op('+')(other)
+
class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql INT4RANGE type.
@@ -105,6 +107,7 @@ class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
ischema_names['int4range'] = INT4RANGE
+
class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql INT8RANGE type.
@@ -116,6 +119,7 @@ class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
ischema_names['int8range'] = INT8RANGE
+
class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql NUMRANGE type.
@@ -127,6 +131,7 @@ class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
ischema_names['numrange'] = NUMRANGE
+
class DATERANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql DATERANGE type.
@@ -138,6 +143,7 @@ class DATERANGE(RangeOperators, sqltypes.TypeEngine):
ischema_names['daterange'] = DATERANGE
+
class TSRANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql TSRANGE type.
@@ -149,6 +155,7 @@ class TSRANGE(RangeOperators, sqltypes.TypeEngine):
ischema_names['tsrange'] = TSRANGE
+
class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
"""Represent the Postgresql TSTZRANGE type.
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 8daada528..306f45023 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -39,8 +39,8 @@ Two things to note:
one column, if the table has a composite (i.e. multi-column) primary key.
This is regardless of the AUTOINCREMENT keyword being present or not.
-To specifically render the AUTOINCREMENT keyword on the primary key column when
-rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table
+To specifically render the AUTOINCREMENT keyword on the primary key column
+when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table
construct::
Table('sometable', metadata,
@@ -63,29 +63,29 @@ Database Locking Behavior / Concurrency
Note that SQLite is not designed for a high level of concurrency. The database
itself, being a file, is locked completely during write operations and within
transactions, meaning exactly one connection has exclusive access to the
-database during this period - all other connections will be blocked during this
-time.
-
-The Python DBAPI specification also calls for a connection model that is always
-in a transaction; there is no BEGIN method, only commit and rollback. This
-implies that a SQLite DBAPI driver would technically allow only serialized
-access to a particular database file at all times. The pysqlite driver attempts
-to ameliorate this by deferring the actual BEGIN statement until the first DML
-(INSERT, UPDATE, or DELETE) is received within a transaction. While this breaks
-serializable isolation, it at least delays the exclusive locking inherent in
-SQLite's design.
-
-SQLAlchemy's default mode of usage with the ORM is known as "autocommit=False",
-which means the moment the :class:`.Session` begins to be used, a transaction
-is begun. As the :class:`.Session` is used, the autoflush feature, also on by
-default, will flush out pending changes to the database before each query. The
-effect of this is that a :class:`.Session` used in its default mode will often
-emit DML early on, long before the transaction is actually committed. This
-again will have the effect of serializing access to the SQLite database. If
-highly concurrent reads are desired against the SQLite database, it is advised
-that the autoflush feature be disabled, and potentially even that autocommit be
-re-enabled, which has the effect of each SQL statement and flush committing
-changes immediately.
+database during this period - all other connections will be blocked during
+this time.
+
+The Python DBAPI specification also calls for a connection model that is
+always in a transaction; there is no BEGIN method, only commit and rollback.
+This implies that a SQLite DBAPI driver would technically allow only
+serialized access to a particular database file at all times. The pysqlite
+driver attempts to ameliorate this by deferring the actual BEGIN statement
+until the first DML (INSERT, UPDATE, or DELETE) is received within a
+transaction. While this breaks serializable isolation, it at least delays the
+exclusive locking inherent in SQLite's design.
+
+SQLAlchemy's default mode of usage with the ORM is known as
+"autocommit=False", which means the moment the :class:`.Session` begins to be
+used, a transaction is begun. As the :class:`.Session` is used, the autoflush
+feature, also on by default, will flush out pending changes to the database
+before each query. The effect of this is that a :class:`.Session` used in its
+default mode will often emit DML early on, long before the transaction is
+actually committed. This again will have the effect of serializing access to
+the SQLite database. If highly concurrent reads are desired against the SQLite
+database, it is advised that the autoflush feature be disabled, and
+potentially even that autocommit be re-enabled, which has the effect of each
+SQL statement and flush committing changes immediately.
For more information on SQLite's lack of concurrency by design, please see
`Situations Where Another RDBMS May Work Better - High Concurrency
@@ -105,8 +105,8 @@ Constraint checking on SQLite has three prerequisites:
* At least version 3.6.19 of SQLite must be in use
* The SQLite library must be compiled *without* the SQLITE_OMIT_FOREIGN_KEY
or SQLITE_OMIT_TRIGGER symbols enabled.
-* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all connections
- before use.
+* The ``PRAGMA foreign_keys = ON`` statement must be emitted on all
+ connections before use.
SQLAlchemy allows for the ``PRAGMA`` statement to be emitted automatically for
new connections through the usage of events::
@@ -122,8 +122,8 @@ new connections through the usage of events::
.. seealso::
- `SQLite Foreign Key Support <http://www.sqlite.org/foreignkeys.html>`_ - on
- the SQLite web site.
+ `SQLite Foreign Key Support <http://www.sqlite.org/foreignkeys.html>`_
+ - on the SQLite web site.
:ref:`event_toplevel` - SQLAlchemy event API.
@@ -189,8 +189,9 @@ from ... import util
from ...engine import default, reflection
from ...sql import compiler
-from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT, INTEGER, REAL,
- NUMERIC, SMALLINT, TEXT, TIMESTAMP, VARCHAR)
+from ...types import (BLOB, BOOLEAN, CHAR, DATE, DECIMAL, FLOAT,
+ INTEGER, REAL, NUMERIC, SMALLINT, TEXT,
+ TIMESTAMP, VARCHAR)
class _DateTimeMixin(object):
@@ -214,6 +215,7 @@ class _DateTimeMixin(object):
def literal_processor(self, dialect):
bp = self.bind_processor(dialect)
+
def process(value):
return "'%s'" % bp(value)
return process
@@ -224,7 +226,8 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
The default string storage format is::
- "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(second)02d.%(microsecond)06d"
+ "%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:\
+%(second)02d.%(microsecond)06d"
e.g.::
@@ -237,12 +240,13 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
from sqlalchemy.dialects.sqlite import DATETIME
dt = DATETIME(
- storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
+ storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:\
+%(min)02d:%(second)02d",
regexp=r"(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)"
)
- :param storage_format: format string which will be applied to the dict with
- keys year, month, day, hour, minute, second, and microsecond.
+ :param storage_format: format string which will be applied to the dict
+ with keys year, month, day, hour, minute, second, and microsecond.
:param regexp: regular expression which will be applied to incoming result
rows. If the regexp contains named groups, the resulting match dict is
@@ -390,12 +394,13 @@ class TIME(_DateTimeMixin, sqltypes.Time):
from sqlalchemy.dialects.sqlite import TIME
t = TIME(
- storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d",
+ storage_format="%(hour)02d-%(minute)02d-%(second)02d-\
+%(microsecond)06d",
regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
)
- :param storage_format: format string which will be applied to the dict with
- keys hour, minute, second, and microsecond.
+ :param storage_format: format string which will be applied to the dict
+ with keys hour, minute, second, and microsecond.
:param regexp: regular expression which will be applied to incoming result
rows. If the regexp contains named groups, the resulting match dict is
@@ -581,8 +586,9 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
if local_table.schema != remote_table.schema:
return None
else:
- return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint(
- constraint)
+ return super(
+ SQLiteDDLCompiler,
+ self).visit_foreign_key_constraint(constraint)
def define_constraint_remote_table(self, constraint, table, preparer):
"""Format the remote table clause of a CREATE CONSTRAINT clause."""
@@ -619,7 +625,7 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
'temporary', 'then', 'to', 'transaction', 'trigger', 'true', 'union',
'unique', 'update', 'using', 'vacuum', 'values', 'view', 'virtual',
'when', 'where',
- ])
+ ])
def format_index(self, index, use_schema=True, name=None):
"""Prepare a quoted index and schema name."""
@@ -630,8 +636,8 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
if (not self.omit_schema and
use_schema and
getattr(index.table, "schema", None)):
- result = self.quote_schema(index.table.schema,
- index.table.quote_schema) + "." + result
+ result = self.quote_schema(
+ index.table.schema, index.table.quote_schema) + "." + result
return result
@@ -641,8 +647,8 @@ class SQLiteExecutionContext(default.DefaultExecutionContext):
return self.execution_options.get("sqlite_raw_colnames", False)
def _translate_colname(self, colname):
- # adjust for dotted column names. SQLite in the case of UNION may store
- # col names as "tablename.colname" in cursor.description
+ # adjust for dotted column names. SQLite in the case of UNION may
+ # store col names as "tablename.colname" in cursor.description
if not self._preserve_raw_colnames and "." in colname:
return colname.split(".")[1], colname
else:
@@ -685,9 +691,10 @@ class SQLiteDialect(default.DefaultDialect):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
- # this flag used by pysqlite dialect, and perhaps others in the future,
- # to indicate the driver is handling date/timestamp conversions (and
- # perhaps datetime/time as well on some hypothetical driver ?)
+ # this flag used by pysqlite dialect, and perhaps others in the
+ # future, to indicate the driver is handling date/timestamp
+ # conversions (and perhaps datetime/time as well on some hypothetical
+ # driver ?)
self.native_datetime = native_datetime
if self.dbapi is not None:
@@ -716,7 +723,7 @@ class SQLiteDialect(default.DefaultDialect):
"Invalid value '%s' for isolation_level. "
"Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
cursor = connection.cursor()
cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level)
cursor.close()
@@ -918,9 +925,9 @@ class SQLiteDialect(default.DefaultDialect):
coltype = coltype(*[int(a) for a in args])
except TypeError:
util.warn(
- "Could not instantiate type %s with "
- "reflected arguments %s; using no arguments." %
- (coltype, args))
+ "Could not instantiate type %s with "
+ "reflected arguments %s; using no arguments." %
+ (coltype, args))
coltype = coltype()
else:
coltype = coltype()
@@ -952,7 +959,8 @@ class SQLiteDialect(default.DefaultDialect):
row = c.fetchone()
if row is None:
break
- (numerical_id, rtbl, lcol, rcol) = (row[0], row[2], row[3], row[4])
+ (numerical_id, rtbl, lcol, rcol) = (
+ row[0], row[2], row[3], row[4])
self._parse_fk(fks, fkeys, numerical_id, rtbl, lcol, rcol)
return fkeys
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index 51e5f0cdf..c67333283 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -88,7 +88,8 @@ nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
can be forced if one configures "native_datetime=True" on create_engine()::
engine = create_engine('sqlite://',
- connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
+ connect_args={'detect_types':
+ sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
native_datetime=True
)
@@ -96,7 +97,8 @@ With this flag enabled, the DATE and TIMESTAMP types (but note - not the
DATETIME or TIME types...confused yet ?) will not perform any bind parameter
or result processing. Execution of "func.current_date()" will return a string.
"func.current_timestamp()" is registered as returning a DATETIME type in
-SQLAlchemy, so this function still receives SQLAlchemy-level result processing.
+SQLAlchemy, so this function still receives SQLAlchemy-level result
+processing.
.. _pysqlite_threading_pooling:
@@ -111,12 +113,12 @@ did not allow a ``:memory:`` database to be used in multiple threads
under any circumstances.
Pysqlite does include a now-undocumented flag known as
-``check_same_thread`` which will disable this check, however note that pysqlite
-connections are still not safe to use in concurrently in multiple threads.
-In particular, any statement execution calls would need to be externally
-mutexed, as Pysqlite does not provide for thread-safe propagation of error
-messages among other things. So while even ``:memory:`` databases can be
-shared among threads in modern SQLite, Pysqlite doesn't provide enough
+``check_same_thread`` which will disable this check, however note that
+pysqlite connections are still not safe to use in concurrently in multiple
+threads. In particular, any statement execution calls would need to be
+externally mutexed, as Pysqlite does not provide for thread-safe propagation
+of error messages among other things. So while even ``:memory:`` databases
+can be shared among threads in modern SQLite, Pysqlite doesn't provide enough
thread-safety to make this usage worth it.
SQLAlchemy sets up pooling to work with Pysqlite's default behavior:
@@ -142,8 +144,8 @@ SQLAlchemy sets up pooling to work with Pysqlite's default behavior:
Using a Memory Database in Multiple Threads
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-To use a ``:memory:`` database in a multithreaded scenario, the same connection
-object must be shared among threads, since the database exists
+To use a ``:memory:`` database in a multithreaded scenario, the same
+connection object must be shared among threads, since the database exists
only within the scope of that connection. The
:class:`.StaticPool` implementation will maintain a single connection
globally, and the ``check_same_thread`` flag can be passed to Pysqlite
@@ -164,10 +166,10 @@ Due to the way SQLite deals with temporary tables, if you wish to use a
temporary table in a file-based SQLite database across multiple checkouts
from the connection pool, such as when using an ORM :class:`.Session` where
the temporary table should continue to remain after :meth:`.Session.commit` or
-:meth:`.Session.rollback` is called, a pool which maintains a single connection must
-be used. Use :class:`.SingletonThreadPool` if the scope is only needed
-within the current thread, or :class:`.StaticPool` is scope is needed within
-multiple threads for this case::
+:meth:`.Session.rollback` is called, a pool which maintains a single
+connection must be used. Use :class:`.SingletonThreadPool` if the scope is
+only needed within the current thread, or :class:`.StaticPool` is scope is
+needed within multiple threads for this case::
# maintain the same connection per thread
from sqlalchemy.pool import SingletonThreadPool
@@ -215,7 +217,8 @@ a :meth:`.ConnectionEvents.begin` handler to achieve this::
from sqlalchemy import create_engine, event
- engine = create_engine("sqlite:///myfile.db", isolation_level='SERIALIZABLE')
+ engine = create_engine("sqlite:///myfile.db",
+ isolation_level='SERIALIZABLE')
@event.listens_for(engine, "begin")
def do_begin(conn):
@@ -331,6 +334,6 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
def is_disconnect(self, e, connection, cursor):
return isinstance(e, self.dbapi.ProgrammingError) and \
- "Cannot operate on a closed database." in str(e)
+ "Cannot operate on a closed database." in str(e)
dialect = SQLiteDialect_pysqlite
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index a9263dc3f..eb313592b 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -11,11 +11,11 @@ from sqlalchemy.dialects.sybase import base, pysybase, pyodbc
base.dialect = pyodbc.dialect
from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
- TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
- BIGINT, INT, INTEGER, SMALLINT, BINARY,\
- VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
- IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\
- dialect
+ TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
+ BIGINT, INT, INTEGER, SMALLINT, BINARY,\
+ VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
+ IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\
+ dialect
__all__ = (
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 38f665838..26f5ef04a 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -32,10 +32,10 @@ from sqlalchemy import schema as sa_schema
from sqlalchemy import util, sql, exc
from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
- TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
- BIGINT, INT, INTEGER, SMALLINT, BINARY,\
- VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
- UnicodeText, REAL
+ TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
+ BIGINT, INT, INTEGER, SMALLINT, BINARY,\
+ VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
+ UnicodeText, REAL
RESERVED_WORDS = set([
"add", "all", "alter", "and",
@@ -94,7 +94,7 @@ RESERVED_WORDS = set([
"when", "where", "while", "window",
"with", "with_cube", "with_lparen", "with_rollup",
"within", "work", "writetext",
- ])
+])
class _SybaseUnitypeMixin(object):
@@ -225,7 +225,7 @@ ischema_names = {
'image': IMAGE,
'bit': BIT,
-# not in documentation for ASE 15.7
+ # not in documentation for ASE 15.7
'long varchar': TEXT, # TODO
'timestamp': TIMESTAMP,
'uniqueidentifier': UNIQUEIDENTIFIER,
@@ -268,12 +268,13 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
if insert_has_sequence:
self._enable_identity_insert = \
- seq_column.key in self.compiled_parameters[0]
+ seq_column.key in self.compiled_parameters[0]
else:
self._enable_identity_insert = False
if self._enable_identity_insert:
- self.cursor.execute("SET IDENTITY_INSERT %s ON" %
+ self.cursor.execute(
+ "SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl))
if self.isddl:
@@ -282,15 +283,15 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
# include a note about that.
if not self.should_autocommit:
raise exc.InvalidRequestError(
- "The Sybase dialect only supports "
- "DDL in 'autocommit' mode at this time.")
+ "The Sybase dialect only supports "
+ "DDL in 'autocommit' mode at this time.")
self.root_connection.engine.logger.info(
- "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
+ "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
self.set_ddl_autocommit(
- self.root_connection.connection.connection,
- True)
+ self.root_connection.connection.connection,
+ True)
def post_exec(self):
if self.isddl:
@@ -298,10 +299,10 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
if self._enable_identity_insert:
self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.
- format_table(self.compiled.statement.table)
- )
+ "SET IDENTITY_INSERT %s OFF" %
+ self.dialect.identifier_preparer.
+ format_table(self.compiled.statement.table)
+ )
def get_lastrowid(self):
cursor = self.create_cursor()
@@ -317,10 +318,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
{
- 'doy': 'dayofyear',
- 'dow': 'weekday',
- 'milliseconds': 'millisecond'
- })
+ 'doy': 'dayofyear',
+ 'dow': 'weekday',
+ 'milliseconds': 'millisecond'
+ })
def get_select_precolumns(self, select):
s = select._distinct and "DISTINCT " or ""
@@ -328,10 +329,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
# bind params for FIRST / TOP
limit = select._limit
if limit:
- #if select._limit == 1:
- #s += "FIRST "
- #else:
- #s += "TOP %s " % (select._limit,)
+ # if select._limit == 1:
+ # s += "FIRST "
+ # else:
+ # s += "TOP %s " % (select._limit,)
s += "TOP %s " % (limit,)
offset = select._offset
if offset:
@@ -352,7 +353,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
def visit_extract(self, extract, **kw):
field = self.extract_map.get(extract.field, extract.field)
return 'DATEPART("%s", %s)' % (
- field, self.process(extract.expr, **kw))
+ field, self.process(extract.expr, **kw))
def visit_now_func(self, fn, **kw):
return "GETDATE()"
@@ -376,21 +377,21 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
class SybaseDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + \
- self.dialect.type_compiler.process(column.type)
+ self.dialect.type_compiler.process(column.type)
if column.table is None:
raise exc.CompileError(
- "The Sybase dialect requires Table-bound "
- "columns in order to generate DDL")
+ "The Sybase dialect requires Table-bound "
+ "columns in order to generate DDL")
seq_col = column.table._autoincrement_column
# install a IDENTITY Sequence if we have an implicit IDENTITY column
if seq_col is column:
sequence = isinstance(column.default, sa_schema.Sequence) \
- and column.default
+ and column.default
if sequence:
start, increment = sequence.start or 1, \
- sequence.increment or 1
+ sequence.increment or 1
else:
start, increment = 1, 1
if (start, increment) == (1, 1):
@@ -416,8 +417,8 @@ class SybaseDDLCompiler(compiler.DDLCompiler):
return "\nDROP INDEX %s.%s" % (
self.preparer.quote_identifier(index.table.name),
self._prepared_index_name(drop.element,
- include_schema=False)
- )
+ include_schema=False)
+ )
class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
@@ -447,14 +448,14 @@ class SybaseDialect(default.DefaultDialect):
def _get_default_schema_name(self, connection):
return connection.scalar(
- text("SELECT user_name() as user_name",
- typemap={'user_name': Unicode})
- )
+ text("SELECT user_name() as user_name",
+ typemap={'user_name': Unicode})
+ )
def initialize(self, connection):
super(SybaseDialect, self).initialize(connection)
if self.server_version_info is not None and\
- self.server_version_info < (15, ):
+ self.server_version_info < (15, ):
self.max_identifier_length = 30
else:
self.max_identifier_length = 255
@@ -520,14 +521,15 @@ class SybaseDialect(default.DefaultDialect):
for (name, type_, nullable, autoincrement, default, precision, scale,
length) in results:
col_info = self._get_column_info(name, type_, bool(nullable),
- bool(autoincrement), default, precision, scale,
- length)
+ bool(autoincrement),
+ default, precision, scale,
+ length)
columns.append(col_info)
return columns
def _get_column_info(self, name, type_, nullable, autoincrement, default,
- precision, scale, length):
+ precision, scale, length):
coltype = self.ischema_names.get(type_, None)
@@ -544,8 +546,8 @@ class SybaseDialect(default.DefaultDialect):
if coltype:
coltype = coltype(*args, **kwargs)
- #is this necessary
- #if is_array:
+ # is this necessary
+ # if is_array:
# coltype = ARRAY(coltype)
else:
util.warn("Did not recognize type '%s' of column '%s'" %
@@ -643,12 +645,12 @@ class SybaseDialect(default.DefaultDialect):
referred_columns.append(reftable_columns[r["refkey%i" % i]])
fk_info = {
- "constrained_columns": constrained_columns,
- "referred_schema": reftable["schema"],
- "referred_table": reftable["name"],
- "referred_columns": referred_columns,
- "name": r["name"]
- }
+ "constrained_columns": constrained_columns,
+ "referred_schema": reftable["schema"],
+ "referred_table": reftable["name"],
+ "referred_columns": referred_columns,
+ "name": r["name"]
+ }
foreign_keys.append(fk_info)
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 3b849a680..cb76d1379 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -9,7 +9,8 @@
.. dialect:: sybase+pyodbc
:name: PyODBC
:dbapi: pyodbc
- :connectstring: sybase+pyodbc://<username>:<password>@<dsnname>[/<database>]
+ :connectstring: sybase+pyodbc://<username>:<password>@<dsnname>\
+[/<database>]
:url: http://pypi.python.org/pypi/pyodbc/
@@ -34,7 +35,7 @@ Currently *not* supported are::
"""
from sqlalchemy.dialects.sybase.base import SybaseDialect,\
- SybaseExecutionContext
+ SybaseExecutionContext
from sqlalchemy.connectors.pyodbc import PyODBCConnector
from sqlalchemy import types as sqltypes, processors
import decimal
@@ -51,7 +52,7 @@ class _SybNumeric_pyodbc(sqltypes.Numeric):
def bind_processor(self, dialect):
super_process = super(_SybNumeric_pyodbc, self).\
- bind_processor(dialect)
+ bind_processor(dialect)
def process(value):
if self.asdecimal and \
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index 678c146d3..6843eb480 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -9,7 +9,8 @@
.. dialect:: sybase+pysybase
:name: Python-Sybase
:dbapi: Sybase
- :connectstring: sybase+pysybase://<username>:<password>@<dsn>/[database name]
+ :connectstring: sybase+pysybase://<username>:<password>@<dsn>/\
+[database name]
:url: http://python-sybase.sourceforge.net/
Unicode Support
@@ -22,7 +23,7 @@ kind at this time.
from sqlalchemy import types as sqltypes, processors
from sqlalchemy.dialects.sybase.base import SybaseDialect, \
- SybaseExecutionContext, SybaseSQLCompiler
+ SybaseExecutionContext, SybaseSQLCompiler
class _SybNumeric(sqltypes.Numeric):
@@ -62,8 +63,8 @@ class SybaseDialect_pysybase(SybaseDialect):
statement_compiler = SybaseSQLCompiler_pysybase
colspecs = {
- sqltypes.Numeric: _SybNumeric,
- sqltypes.Float: sqltypes.Float
+ sqltypes.Numeric: _SybNumeric,
+ sqltypes.Float: sqltypes.Float
}
@classmethod
@@ -90,7 +91,7 @@ class SybaseDialect_pysybase(SybaseDialect):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, (self.dbapi.OperationalError,
- self.dbapi.ProgrammingError)):
+ self.dbapi.ProgrammingError)):
msg = str(e)
return ('Unable to complete network request to host' in msg or
'Invalid connection state' in msg or
diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py
index ddbf05cfe..270e95c9c 100644
--- a/lib/sqlalchemy/event/api.py
+++ b/lib/sqlalchemy/event/api.py
@@ -25,7 +25,8 @@ def _event_key(target, identifier, fn):
return _EventKey(target, identifier, fn, tgt)
else:
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
- (identifier, target))
+ (identifier, target))
+
def listen(target, identifier, fn, *args, **kw):
"""Register a listener function for the given target.
@@ -114,14 +115,15 @@ def remove(target, identifier, fn):
event.remove(SomeMappedClass, "before_insert", my_listener_function)
Above, the listener function associated with ``SomeMappedClass`` was also
- propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function
- will revert all of these operations.
+ propagated to subclasses of ``SomeMappedClass``; the :func:`.remove`
+ function will revert all of these operations.
.. versionadded:: 0.9.0
"""
_event_key(target, identifier, fn).remove()
+
def contains(target, identifier, fn):
"""Return True if the given target/ident/fn is set up to listen.
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
index 57adc4208..7641b595a 100644
--- a/lib/sqlalchemy/event/attr.py
+++ b/lib/sqlalchemy/event/attr.py
@@ -44,6 +44,7 @@ class RefCollection(object):
def ref(self):
return weakref.ref(self, registry._collection_gced)
+
class _DispatchDescriptor(RefCollection):
"""Class-level attributes on :class:`._Dispatch` classes."""
@@ -53,13 +54,13 @@ class _DispatchDescriptor(RefCollection):
self.arg_names = argspec.args[1:]
self.has_kw = bool(argspec.keywords)
self.legacy_signatures = list(reversed(
- sorted(
- getattr(fn, '_legacy_signatures', []),
- key=lambda s: s[0]
- )
- ))
+ sorted(
+ getattr(fn, '_legacy_signatures', []),
+ key=lambda s: s[0]
+ )
+ ))
self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
- self, parent_dispatch_cls, fn)
+ self, parent_dispatch_cls, fn)
self._clslevel = weakref.WeakKeyDictionary()
self._empty_listeners = weakref.WeakKeyDictionary()
@@ -83,11 +84,10 @@ class _DispatchDescriptor(RefCollection):
return fn(**argdict)
return wrap_kw
-
def insert(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
- "Class-level Event targets must be classes."
+ "Class-level Event targets must be classes."
stack = [target]
while stack:
cls = stack.pop(0)
@@ -103,7 +103,7 @@ class _DispatchDescriptor(RefCollection):
def append(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
- "Class-level Event targets must be classes."
+ "Class-level Event targets must be classes."
stack = [target]
while stack:
@@ -170,10 +170,12 @@ class _DispatchDescriptor(RefCollection):
obj.__dict__[self.__name__] = ret
return ret
+
class _HasParentDispatchDescriptor(object):
def _adjust_fn_spec(self, fn, named):
return self.parent._adjust_fn_spec(fn, named)
+
class _EmptyListener(_HasParentDispatchDescriptor):
"""Serves as a class-level interface to the events
served by a _DispatchDescriptor, when there are no
@@ -183,6 +185,7 @@ class _EmptyListener(_HasParentDispatchDescriptor):
events are added.
"""
+
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
@@ -192,7 +195,6 @@ class _EmptyListener(_HasParentDispatchDescriptor):
self.propagate = frozenset()
self.listeners = ()
-
def for_modify(self, obj):
"""Return an event collection which can be modified.
@@ -268,6 +270,7 @@ class _CompoundListener(_HasParentDispatchDescriptor):
__nonzero__ = __bool__
+
class _ListenerCollection(RefCollection, _CompoundListener):
"""Instance-level attributes on instances of :class:`._Dispatch`.
@@ -304,10 +307,10 @@ class _ListenerCollection(RefCollection, _CompoundListener):
existing_listener_set = set(existing_listeners)
self.propagate.update(other.propagate)
other_listeners = [l for l
- in other.listeners
- if l not in existing_listener_set
- and not only_propagate or l in self.propagate
- ]
+ in other.listeners
+ if l not in existing_listener_set
+ and not only_propagate or l in self.propagate
+ ]
existing_listeners.extend(other_listeners)
@@ -346,9 +349,9 @@ class _JoinedDispatchDescriptor(object):
return self
else:
obj.__dict__[self.name] = ret = _JoinedListener(
- obj.parent, self.name,
- getattr(obj.local, self.name)
- )
+ obj.parent, self.name,
+ getattr(obj.local, self.name)
+ )
return ret
@@ -383,5 +386,3 @@ class _JoinedListener(_CompoundListener):
def clear(self):
raise NotImplementedError()
-
-
diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py
index 3aacee2eb..4925f6ffa 100644
--- a/lib/sqlalchemy/event/base.py
+++ b/lib/sqlalchemy/event/base.py
@@ -18,7 +18,8 @@ instances of ``_Dispatch``.
from __future__ import absolute_import
from .. import util
-from .attr import _JoinedDispatchDescriptor, _EmptyListener, _DispatchDescriptor
+from .attr import _JoinedDispatchDescriptor, \
+ _EmptyListener, _DispatchDescriptor
_registrars = util.defaultdict(list)
@@ -32,6 +33,7 @@ class _UnpickleDispatch(object):
:class:`_Dispatch` given a particular :class:`.Events` subclass.
"""
+
def __call__(self, _parent_cls):
for cls in _parent_cls.__mro__:
if 'dispatch' in cls.__dict__:
@@ -80,9 +82,9 @@ class _Dispatch(object):
"""
if '_joined_dispatch_cls' not in self.__class__.__dict__:
cls = type(
- "Joined%s" % self.__class__.__name__,
- (_JoinedDispatcher, self.__class__), {}
- )
+ "Joined%s" % self.__class__.__name__,
+ (_JoinedDispatcher, self.__class__), {}
+ )
for ls in _event_descriptors(self):
setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
@@ -131,7 +133,7 @@ def _create_dispatcher_class(cls, classname, bases, dict_):
# of the Event class, this is the straight monkeypatch.
dispatch_base = getattr(cls, 'dispatch', _Dispatch)
dispatch_cls = type("%sDispatch" % classname,
- (dispatch_base, ), {})
+ (dispatch_base, ), {})
cls._set_dispatch(cls, dispatch_cls)
for k in dict_:
@@ -150,6 +152,7 @@ def _remove_dispatcher(cls):
if not _registrars[k]:
del _registrars[k]
+
class Events(util.with_metaclass(_EventMeta, object)):
"""Define event listening functions for a particular target type."""
@@ -163,16 +166,15 @@ class Events(util.with_metaclass(_EventMeta, object)):
cls.dispatch = dispatch_cls
dispatch_cls._events = cls
-
@classmethod
def _accept_with(cls, target):
# Mapper, ClassManager, Session override this to
# also accept classes, scoped_sessions, sessionmakers, etc.
if hasattr(target, 'dispatch') and (
- isinstance(target.dispatch, cls.dispatch) or \
- isinstance(target.dispatch, type) and \
- issubclass(target.dispatch, cls.dispatch)
- ):
+ isinstance(target.dispatch, cls.dispatch) or
+ isinstance(target.dispatch, type) and
+ issubclass(target.dispatch, cls.dispatch)
+ ):
return target
else:
return None
@@ -206,6 +208,7 @@ class dispatcher(object):
instances.
"""
+
def __init__(self, events):
self.dispatch_cls = events.dispatch
self.events = events
@@ -215,4 +218,3 @@ class dispatcher(object):
return self.dispatch_cls
obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
return disp
-
diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py
index fc3aa288e..3b1519cb6 100644
--- a/lib/sqlalchemy/event/legacy.py
+++ b/lib/sqlalchemy/event/legacy.py
@@ -12,6 +12,7 @@ generation of deprecation notes and docstrings.
from .. import util
+
def _legacy_signature(since, argnames, converter=None):
def leg(fn):
if not hasattr(fn, '_legacy_signatures'):
@@ -20,6 +21,7 @@ def _legacy_signature(since, argnames, converter=None):
return fn
return leg
+
def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
for since, argnames, conv in dispatch_descriptor.legacy_signatures:
if argnames[-1] == "**kw":
@@ -29,10 +31,11 @@ def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
has_kw = False
if len(argnames) == len(argspec.args) \
- and has_kw is bool(argspec.keywords):
+ and has_kw is bool(argspec.keywords):
if conv:
assert not has_kw
+
def wrap_leg(*args):
return fn(*conv(*args))
else:
@@ -47,38 +50,42 @@ def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
else:
return fn
+
def _indent(text, indent):
return "\n".join(
- indent + line
- for line in text.split("\n")
- )
+ indent + line
+ for line in text.split("\n")
+ )
+
def _standard_listen_example(dispatch_descriptor, sample_target, fn):
example_kw_arg = _indent(
- "\n".join(
- "%(arg)s = kw['%(arg)s']" % {"arg": arg}
- for arg in dispatch_descriptor.arg_names[0:2]
- ),
- " ")
+ "\n".join(
+ "%(arg)s = kw['%(arg)s']" % {"arg": arg}
+ for arg in dispatch_descriptor.arg_names[0:2]
+ ),
+ " ")
if dispatch_descriptor.legacy_signatures:
current_since = max(since for since, args, conv
in dispatch_descriptor.legacy_signatures)
else:
current_since = None
text = (
- "from sqlalchemy import event\n\n"
- "# standard decorator style%(current_since)s\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "\n # ... (event handling logic) ...\n"
+ "from sqlalchemy import event\n\n"
+ "# standard decorator style%(current_since)s\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
+ "def receive_%(event_name)s("
+ "%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "\n # ... (event handling logic) ...\n"
)
if len(dispatch_descriptor.arg_names) > 3:
text += (
"\n# named argument style (new in 0.9)\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
+ "@event.listens_for("
+ "%(sample_target)s, '%(event_name)s', named=True)\n"
"def receive_%(event_name)s(**kw):\n"
" \"listen for the '%(event_name)s' event\"\n"
"%(example_kw_arg)s\n"
@@ -86,72 +93,77 @@ def _standard_listen_example(dispatch_descriptor, sample_target, fn):
)
text %= {
- "current_since": " (arguments as of %s)" %
- current_since if current_since else "",
- "event_name": fn.__name__,
- "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
- "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
- "example_kw_arg": example_kw_arg,
- "sample_target": sample_target
- }
+ "current_since": " (arguments as of %s)" %
+ current_since if current_since else "",
+ "event_name": fn.__name__,
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "example_kw_arg": example_kw_arg,
+ "sample_target": sample_target
+ }
return text
+
def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
text = ""
for since, args, conv in dispatch_descriptor.legacy_signatures:
text += (
"\n# legacy calling style (pre-%(since)s)\n"
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ "def receive_%(event_name)s("
+ "%(named_event_arguments)s%(has_kw_arguments)s):\n"
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n" % {
"since": since,
"event_name": fn.__name__,
- "has_kw_arguments": " **kw" if dispatch_descriptor.has_kw else "",
+ "has_kw_arguments": " **kw"
+ if dispatch_descriptor.has_kw else "",
"named_event_arguments": ", ".join(args),
"sample_target": sample_target
}
)
return text
+
def _version_signature_changes(dispatch_descriptor):
since, args, conv = dispatch_descriptor.legacy_signatures[0]
return (
- "\n.. versionchanged:: %(since)s\n"
- " The ``%(event_name)s`` event now accepts the \n"
- " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
- " Listener functions which accept the previous argument \n"
- " signature(s) listed above will be automatically \n"
- " adapted to the new signature." % {
- "since": since,
- "event_name": dispatch_descriptor.__name__,
- "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
- "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
- }
- )
+ "\n.. versionchanged:: %(since)s\n"
+ " The ``%(event_name)s`` event now accepts the \n"
+ " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
+ " Listener functions which accept the previous argument \n"
+ " signature(s) listed above will be automatically \n"
+ " adapted to the new signature." % {
+ "since": since,
+ "event_name": dispatch_descriptor.__name__,
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
+ }
+ )
+
def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
header = ".. container:: event_signatures\n\n"\
- " Example argument forms::\n"\
- "\n"
+ " Example argument forms::\n"\
+ "\n"
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
text = (
- header +
- _indent(
- _standard_listen_example(
- dispatch_descriptor, sample_target, fn),
- " " * 8)
- )
+ header +
+ _indent(
+ _standard_listen_example(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
+ )
if dispatch_descriptor.legacy_signatures:
text += _indent(
- _legacy_listen_examples(
- dispatch_descriptor, sample_target, fn),
- " " * 8)
+ _legacy_listen_examples(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
text += _version_signature_changes(dispatch_descriptor)
return util.inject_docstring_text(fn.__doc__,
- text,
- 1
- )
+ text,
+ 1
+ )
diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py
index 11f5cb579..a34de3cd7 100644
--- a/lib/sqlalchemy/event/registry.py
+++ b/lib/sqlalchemy/event/registry.py
@@ -47,6 +47,7 @@ ref(listenercollection) -> {
}
"""
+
def _collection_gced(ref):
# defaultdict, so can't get a KeyError
if not _collection_to_key or ref not in _collection_to_key:
@@ -60,6 +61,7 @@ def _collection_gced(ref):
if not dispatch_reg:
_key_to_collection.pop(key)
+
def _stored_in_collection(event_key, owner):
key = event_key._key
@@ -76,6 +78,7 @@ def _stored_in_collection(event_key, owner):
listener_to_key = _collection_to_key[owner_ref]
listener_to_key[listen_ref] = key
+
def _removed_from_collection(event_key, owner):
key = event_key._key
@@ -92,6 +95,7 @@ def _removed_from_collection(event_key, owner):
listener_to_key = _collection_to_key[owner_ref]
listener_to_key.pop(listen_ref)
+
def _stored_in_collection_multi(newowner, oldowner, elements):
if not elements:
return
@@ -113,6 +117,7 @@ def _stored_in_collection_multi(newowner, oldowner, elements):
new_listener_to_key[listen_ref] = key
+
def _clear(owner, elements):
if not elements:
return
@@ -133,8 +138,8 @@ class _EventKey(object):
"""Represent :func:`.listen` arguments.
"""
-
- def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None):
+ def __init__(self, target, identifier,
+ fn, dispatch_target, _fn_wrap=None):
self.target = target
self.identifier = identifier
self.fn = fn
@@ -159,7 +164,7 @@ class _EventKey(object):
self.fn,
self.dispatch_target,
_fn_wrap=fn_wrap
- )
+ )
def with_dispatch_target(self, dispatch_target):
if dispatch_target is self.dispatch_target:
@@ -171,12 +176,13 @@ class _EventKey(object):
self.fn,
dispatch_target,
_fn_wrap=self.fn_wrap
- )
+ )
def listen(self, *args, **kw):
once = kw.pop("once", False)
if once:
- self.with_wrapper(util.only_once(self._listen_fn)).listen(*args, **kw)
+ self.with_wrapper(
+ util.only_once(self._listen_fn)).listen(*args, **kw)
else:
self.dispatch_target.dispatch._listen(self, *args, **kw)
@@ -185,9 +191,9 @@ class _EventKey(object):
if key not in _key_to_collection:
raise exc.InvalidRequestError(
- "No listeners found for event %s / %r / %s " %
- (self.target, self.identifier, self.fn)
- )
+ "No listeners found for event %s / %r / %s " %
+ (self.target, self.identifier, self.fn)
+ )
dispatch_reg = _key_to_collection.pop(key)
for collection_ref, listener_ref in dispatch_reg.items():
@@ -202,7 +208,7 @@ class _EventKey(object):
return self._key in _key_to_collection
def base_listen(self, propagate=False, insert=False,
- named=False):
+ named=False):
target, identifier, fn = \
self.dispatch_target, self.identifier, self._listen_fn
@@ -214,10 +220,10 @@ class _EventKey(object):
if insert:
dispatch_descriptor.\
- for_modify(target.dispatch).insert(self, propagate)
+ for_modify(target.dispatch).insert(self, propagate)
else:
dispatch_descriptor.\
- for_modify(target.dispatch).append(self, propagate)
+ for_modify(target.dispatch).append(self, propagate)
@property
def _listen_fn(self):
@@ -238,5 +244,3 @@ class _EventKey(object):
def prepend_to_list(self, owner, list_):
_stored_in_collection(self, owner)
list_.insert(0, self._listen_fn)
-
-
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 6cd9dfcb1..741e79b9d 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -15,40 +15,40 @@ documentation for an overview of how this module is used.
from . import exc
from .mapper import (
- Mapper,
- _mapper_registry,
- class_mapper,
- configure_mappers,
- reconstructor,
- validates
- )
+ Mapper,
+ _mapper_registry,
+ class_mapper,
+ configure_mappers,
+ reconstructor,
+ validates
+)
from .interfaces import (
- EXT_CONTINUE,
- EXT_STOP,
- PropComparator,
- )
+ EXT_CONTINUE,
+ EXT_STOP,
+ PropComparator,
+)
from .deprecated_interfaces import (
- MapperExtension,
- SessionExtension,
- AttributeExtension,
+ MapperExtension,
+ SessionExtension,
+ AttributeExtension,
)
from .util import (
- aliased,
- join,
- object_mapper,
- outerjoin,
- polymorphic_union,
- was_deleted,
- with_parent,
- with_polymorphic,
- )
+ aliased,
+ join,
+ object_mapper,
+ outerjoin,
+ polymorphic_union,
+ was_deleted,
+ with_parent,
+ with_polymorphic,
+)
from .properties import ColumnProperty
from .relationships import RelationshipProperty
from .descriptor_props import (
- ComparableProperty,
- CompositeProperty,
- SynonymProperty,
- )
+ ComparableProperty,
+ CompositeProperty,
+ SynonymProperty,
+)
from .relationships import (
foreign,
remote,
@@ -69,6 +69,7 @@ from ..util.langhelpers import public_factory
from .. import util as _sa_util
from . import strategies as _strategies
+
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
with no automation enabled by default.
@@ -107,6 +108,7 @@ def create_session(bind=None, **kwargs):
relationship = public_factory(RelationshipProperty, ".orm.relationship")
+
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
@@ -144,7 +146,8 @@ def backref(name, **kwargs):
Used with the ``backref`` keyword argument to :func:`relationship` in
place of a string argument, e.g.::
- 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery'))
+ 'items':relationship(
+ SomeItem, backref=backref('parent', lazy='subquery'))
"""
return (name, kwargs)
@@ -158,7 +161,8 @@ def deferred(*columns, **kw):
:class:`.Column` object, however a collection is supported in order
to support multiple columns mapped under the same attribute.
- :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`.
+ :param \**kw: additional keyword arguments passed to
+ :class:`.ColumnProperty`.
.. seealso::
@@ -173,11 +177,11 @@ mapper = public_factory(Mapper, ".orm.mapper")
synonym = public_factory(SynonymProperty, ".orm.synonym")
comparable_property = public_factory(ComparableProperty,
- ".orm.comparable_property")
+ ".orm.comparable_property")
@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
- "is renamed to :func:`.configure_mappers`")
+ "is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have
been defined.
@@ -196,14 +200,14 @@ def clear_mappers():
:func:`.clear_mappers` is *not* for normal use, as there is literally no
valid usage for it outside of very specific testing scenarios. Normally,
mappers are permanent structural components of user-defined classes, and
- are never discarded independently of their class. If a mapped class itself
- is garbage collected, its mapper is automatically disposed of as well. As
- such, :func:`.clear_mappers` is only for usage in test suites that re-use
- the same classes with different mappings, which is itself an extremely rare
- use case - the only such use case is in fact SQLAlchemy's own test suite,
- and possibly the test suites of other ORM extension libraries which
- intend to test various combinations of mapper construction upon a fixed
- set of classes.
+ are never discarded independently of their class. If a mapped class
+ itself is garbage collected, its mapper is automatically disposed of as
+ well. As such, :func:`.clear_mappers` is only for usage in test suites
+ that re-use the same classes with different mappings, which is itself an
+ extremely rare use case - the only such use case is in fact SQLAlchemy's
+ own test suite, and possibly the test suites of other ORM extension
+ libraries which intend to test various combinations of mapper construction
+ upon a fixed set of classes.
"""
mapperlib._CONFIGURE_MUTEX.acquire()
@@ -237,6 +241,7 @@ defaultload = strategy_options.defaultload._unbound_fn
from .strategy_options import Load
+
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
return joinedload(*args, **kwargs)
@@ -247,12 +252,9 @@ def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
-
-
contains_alias = public_factory(AliasOption, ".orm.contains_alias")
-
def __go(lcls):
global __all__
from .. import util as sa_util
@@ -261,9 +263,8 @@ def __go(lcls):
import inspect as _inspect
__all__ = sorted(name for name, obj in lcls.items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
_sa_util.dependencies.resolve_all("sqlalchemy.orm")
__go(locals())
-
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 329367473..67e4dca9b 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -21,16 +21,17 @@ from . import interfaces, collections, exc as orm_exc
from .base import instance_state, instance_dict, manager_of_class
from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
- NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
- INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
- PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
- PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH
+ NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
+ INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
+ PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
+ PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT, NO_AUTOFLUSH
from .base import state_str, instance_str
+
@inspection._self_inspects
class QueryableAttribute(interfaces._MappedAttribute,
- interfaces._InspectionAttr,
- interfaces.PropComparator):
+ interfaces._InspectionAttr,
+ interfaces.PropComparator):
"""Base class for :term:`descriptor` objects that intercept
attribute events on behalf of a :class:`.MapperProperty`
object. The actual :class:`.MapperProperty` is accessible
@@ -52,8 +53,8 @@ class QueryableAttribute(interfaces._MappedAttribute,
is_attribute = True
def __init__(self, class_, key, impl=None,
- comparator=None, parententity=None,
- of_type=None):
+ comparator=None, parententity=None,
+ of_type=None):
self.class_ = class_
self.key = key
self.impl = impl
@@ -76,13 +77,12 @@ class QueryableAttribute(interfaces._MappedAttribute,
def get_history(self, instance, passive=PASSIVE_OFF):
return self.impl.get_history(instance_state(instance),
- instance_dict(instance), passive)
+ instance_dict(instance), passive)
def __selectable__(self):
# TODO: conditionally attach this method based on clause_element ?
return self
-
@util.memoized_property
def info(self):
"""Return the 'info' dictionary for the underlying SQL element.
@@ -97,22 +97,23 @@ class QueryableAttribute(interfaces._MappedAttribute,
* If the attribute is a :class:`.ColumnProperty` but is mapped to
any other kind of SQL expression other than a :class:`.Column`,
- the attribute will refer to the :attr:`.MapperProperty.info` dictionary
- associated directly with the :class:`.ColumnProperty`, assuming the SQL
- expression itself does not have its own ``.info`` attribute
- (which should be the case, unless a user-defined SQL construct
- has defined one).
-
- * If the attribute refers to any other kind of :class:`.MapperProperty`,
- including :class:`.RelationshipProperty`, the attribute will refer
- to the :attr:`.MapperProperty.info` dictionary associated with
- that :class:`.MapperProperty`.
-
- * To access the :attr:`.MapperProperty.info` dictionary of the :class:`.MapperProperty`
- unconditionally, including for a :class:`.ColumnProperty` that's
- associated directly with a :class:`.schema.Column`, the attribute
- can be referred to using :attr:`.QueryableAttribute.property`
- attribute, as ``MyClass.someattribute.property.info``.
+ the attribute will refer to the :attr:`.MapperProperty.info`
+ dictionary associated directly with the :class:`.ColumnProperty`,
+ assuming the SQL expression itself does not have its own ``.info``
+ attribute (which should be the case, unless a user-defined SQL
+ construct has defined one).
+
+ * If the attribute refers to any other kind of
+ :class:`.MapperProperty`, including :class:`.RelationshipProperty`,
+ the attribute will refer to the :attr:`.MapperProperty.info`
+ dictionary associated with that :class:`.MapperProperty`.
+
+ * To access the :attr:`.MapperProperty.info` dictionary of the
+ :class:`.MapperProperty` unconditionally, including for a
+ :class:`.ColumnProperty` that's associated directly with a
+ :class:`.schema.Column`, the attribute can be referred to using
+ :attr:`.QueryableAttribute.property` attribute, as
+ ``MyClass.someattribute.property.info``.
.. versionadded:: 0.8.0
@@ -152,18 +153,20 @@ class QueryableAttribute(interfaces._MappedAttribute,
def adapt_to_entity(self, adapt_to_entity):
assert not self._of_type
- return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
- comparator=self.comparator.adapt_to_entity(adapt_to_entity),
- parententity=adapt_to_entity)
+ return self.__class__(adapt_to_entity.entity,
+ self.key, impl=self.impl,
+ comparator=self.comparator.adapt_to_entity(
+ adapt_to_entity),
+ parententity=adapt_to_entity)
def of_type(self, cls):
return QueryableAttribute(
- self.class_,
- self.key,
- self.impl,
- self.comparator.of_type(cls),
- self._parententity,
- of_type=cls)
+ self.class_,
+ self.key,
+ self.impl,
+ self.comparator.of_type(cls),
+ self._parententity,
+ of_type=cls)
def label(self, name):
return self._query_clause_element().label(name)
@@ -182,8 +185,8 @@ class QueryableAttribute(interfaces._MappedAttribute,
return getattr(self.comparator, key)
except AttributeError:
raise AttributeError(
- 'Neither %r object nor %r object associated with %s '
- 'has an attribute %r' % (
+ 'Neither %r object nor %r object associated with %s '
+ 'has an attribute %r' % (
type(self).__name__,
type(self.comparator).__name__,
self,
@@ -218,7 +221,7 @@ class InstrumentedAttribute(QueryableAttribute):
def __set__(self, instance, value):
self.impl.set(instance_state(instance),
- instance_dict(instance), value, None)
+ instance_dict(instance), value, None)
def __delete__(self, instance):
self.impl.delete(instance_state(instance), instance_dict(instance))
@@ -252,9 +255,9 @@ def create_proxied_attribute(descriptor):
"""
def __init__(self, class_, key, descriptor,
- comparator,
- adapt_to_entity=None, doc=None,
- original_property=None):
+ comparator,
+ adapt_to_entity=None, doc=None,
+ original_property=None):
self.class_ = class_
self.key = key
self.descriptor = descriptor
@@ -273,13 +276,15 @@ def create_proxied_attribute(descriptor):
self._comparator = self._comparator()
if self._adapt_to_entity:
self._comparator = self._comparator.adapt_to_entity(
- self._adapt_to_entity)
+ self._adapt_to_entity)
return self._comparator
def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
- self._comparator,
- adapt_to_entity)
+ return self.__class__(adapt_to_entity.entity,
+ self.key,
+ self.descriptor,
+ self._comparator,
+ adapt_to_entity)
def __get__(self, instance, owner):
if instance is None:
@@ -303,10 +308,10 @@ def create_proxied_attribute(descriptor):
raise AttributeError(
'Neither %r object nor %r object associated with %s '
'has an attribute %r' % (
- type(descriptor).__name__,
- type(self.comparator).__name__,
- self,
- attribute)
+ type(descriptor).__name__,
+ type(self.comparator).__name__,
+ self,
+ attribute)
)
Proxy.__name__ = type(descriptor).__name__ + 'Proxy'
@@ -320,6 +325,7 @@ OP_REMOVE = util.symbol("REMOVE")
OP_APPEND = util.symbol("APPEND")
OP_REPLACE = util.symbol("REPLACE")
+
class Event(object):
"""A token propagated throughout the course of a chain of attribute
events.
@@ -360,6 +366,7 @@ class Event(object):
return isinstance(other, Event) and \
other.impl is self.impl and \
other.op == self.op
+
@property
def key(self):
return self.impl.key
@@ -367,15 +374,16 @@ class Event(object):
def hasparent(self, state):
return self.impl.hasparent(state)
+
class AttributeImpl(object):
"""internal implementation for instrumented attributes."""
def __init__(self, class_, key,
- callable_, dispatch, trackparent=False, extension=None,
- compare_function=None, active_history=False,
- parent_token=None, expire_missing=True,
- send_modified_events=True,
- **kwargs):
+ callable_, dispatch, trackparent=False, extension=None,
+ compare_function=None, active_history=False,
+ parent_token=None, expire_missing=True,
+ send_modified_events=True,
+ **kwargs):
"""Construct an AttributeImpl.
\class_
@@ -419,8 +427,8 @@ class AttributeImpl(object):
for this key.
send_modified_events
- if False, the InstanceState._modified_event method will have no effect;
- this means the attribute will never show up as changed in a
+ if False, the InstanceState._modified_event method will have no
+ effect; this means the attribute will never show up as changed in a
history entry.
"""
self.class_ = class_
@@ -480,7 +488,7 @@ class AttributeImpl(object):
assert self.trackparent, msg
return state.parents.get(id(self.parent_token), optimistic) \
- is not False
+ is not False
def sethasparent(self, state, parent_state, value):
"""Set a boolean flag on the given item corresponding to
@@ -499,7 +507,7 @@ class AttributeImpl(object):
last_parent = state.parents[id_]
if last_parent is not False and \
- last_parent.key != parent_state.key:
+ last_parent.key != parent_state.key:
if last_parent.obj() is None:
raise orm_exc.StaleDataError(
@@ -509,8 +517,8 @@ class AttributeImpl(object):
"has gone stale, can't be sure this "
"is the most recent parent." %
(state_str(state),
- state_str(parent_state),
- self.key))
+ state_str(parent_state),
+ self.key))
return
@@ -591,9 +599,9 @@ class AttributeImpl(object):
except KeyError:
# TODO: no test coverage here.
raise KeyError(
- "Deferred loader for attribute "
- "%r failed to populate "
- "correctly" % key)
+ "Deferred loader for attribute "
+ "%r failed to populate "
+ "correctly" % key)
elif value is not ATTR_EMPTY:
return self.set_committed_value(state, dict_, value)
@@ -608,14 +616,14 @@ class AttributeImpl(object):
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, None, initiator,
- passive=passive, check_old=value)
+ passive=passive, check_old=value)
def pop(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, None, initiator,
- passive=passive, check_old=value, pop=True)
+ passive=passive, check_old=value, pop=True)
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
raise NotImplementedError()
def get_committed_value(self, state, dict_, passive=PASSIVE_OFF):
@@ -672,7 +680,7 @@ class ScalarAttributeImpl(AttributeImpl):
return History.from_scalar_attribute(self, state, current)
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
if self.dispatch._active_history:
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
else:
@@ -680,7 +688,7 @@ class ScalarAttributeImpl(AttributeImpl):
if self.dispatch.set:
value = self.fire_replace_event(state, dict_,
- value, old, initiator)
+ value, old, initiator)
state._modified_event(dict_, self, old)
dict_[self.key] = value
@@ -698,7 +706,8 @@ class ScalarAttributeImpl(AttributeImpl):
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self._replace_token)
+ value = fn(
+ state, value, previous, initiator or self._replace_token)
return value
def fire_remove_event(self, state, dict_, value, initiator):
@@ -767,32 +776,32 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
return ret
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, check_old=None, pop=False):
+ passive=PASSIVE_OFF, check_old=None, pop=False):
"""Set a value on the given InstanceState.
"""
if self.dispatch._active_history:
- old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
+ old = self.get(
+ state, dict_, passive=PASSIVE_ONLY_PERSISTENT | NO_AUTOFLUSH)
else:
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH ^ INIT_OK)
if check_old is not None and \
- old is not PASSIVE_NO_RESULT and \
- check_old is not old:
+ old is not PASSIVE_NO_RESULT and \
+ check_old is not old:
if pop:
return
else:
raise ValueError(
"Object %s not associated with %s on attribute '%s'" % (
- instance_str(check_old),
- state_str(state),
- self.key
- ))
+ instance_str(check_old),
+ state_str(state),
+ self.key
+ ))
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
-
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), state, False)
@@ -809,7 +818,8 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
self.sethasparent(instance_state(previous), state, False)
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self._replace_token)
+ value = fn(
+ state, value, previous, initiator or self._replace_token)
state._modified_event(dict_, self, previous)
@@ -837,16 +847,16 @@ class CollectionAttributeImpl(AttributeImpl):
collection = True
def __init__(self, class_, key, callable_, dispatch,
- typecallable=None, trackparent=False, extension=None,
- copy_function=None, compare_function=None, **kwargs):
+ typecallable=None, trackparent=False, extension=None,
+ copy_function=None, compare_function=None, **kwargs):
super(CollectionAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- trackparent=trackparent,
- extension=extension,
- compare_function=compare_function,
- **kwargs)
+ class_,
+ key,
+ callable_, dispatch,
+ trackparent=trackparent,
+ extension=extension,
+ compare_function=compare_function,
+ **kwargs)
if copy_function is None:
copy_function = self.__copy
@@ -876,11 +886,11 @@ class CollectionAttributeImpl(AttributeImpl):
original = state.committed_state[self.key]
if original not in (NO_VALUE, NEVER_SET):
current_states = [((c is not None) and
- instance_state(c) or None, c)
- for c in current]
+ instance_state(c) or None, c)
+ for c in current]
original_states = [((c is not None) and
instance_state(c) or None, c)
- for c in original]
+ for c in original]
current_set = dict(current_states)
original_set = dict(original_states)
@@ -953,7 +963,7 @@ class CollectionAttributeImpl(AttributeImpl):
if collection is PASSIVE_NO_RESULT:
value = self.fire_append_event(state, dict_, value, initiator)
assert self.key not in dict_, \
- "Collection was loaded during event handling."
+ "Collection was loaded during event handling."
state._get_pending_mutation(self.key).append(value)
else:
collection.append_with_event(value, initiator)
@@ -963,7 +973,7 @@ class CollectionAttributeImpl(AttributeImpl):
if collection is PASSIVE_NO_RESULT:
self.fire_remove_event(state, dict_, value, initiator)
assert self.key not in dict_, \
- "Collection was loaded during event handling."
+ "Collection was loaded during event handling."
state._get_pending_mutation(self.key).remove(value)
else:
collection.remove_with_event(value, initiator)
@@ -978,7 +988,7 @@ class CollectionAttributeImpl(AttributeImpl):
pass
def set(self, state, dict_, value, initiator,
- passive=PASSIVE_OFF, pop=False):
+ passive=PASSIVE_OFF, pop=False):
"""Set a value on the given object.
"""
@@ -1055,7 +1065,7 @@ class CollectionAttributeImpl(AttributeImpl):
return user_data
def get_collection(self, state, dict_,
- user_data=None, passive=PASSIVE_OFF):
+ user_data=None, passive=PASSIVE_OFF):
"""Retrieve the CollectionAdapter associated with the given state.
Creates a new CollectionAdapter if one does not exist.
@@ -1099,20 +1109,20 @@ def backref_listeners(attribute, key, uselist):
# With lazy=None, there's no guarantee that the full collection is
# present when updating via a backref.
old_state, old_dict = instance_state(oldchild),\
- instance_dict(oldchild)
+ instance_dict(oldchild)
impl = old_state.manager[key].impl
if initiator.impl is not impl or \
initiator.op not in (OP_REPLACE, OP_REMOVE):
impl.pop(old_state,
- old_dict,
- state.obj(),
- parent_impl._append_token,
- passive=PASSIVE_NO_FETCH)
+ old_dict,
+ state.obj(),
+ parent_impl._append_token,
+ passive=PASSIVE_NO_FETCH)
if child is not None:
child_state, child_dict = instance_state(child),\
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
@@ -1120,11 +1130,11 @@ def backref_listeners(attribute, key, uselist):
elif initiator.impl is not child_impl or \
initiator.op not in (OP_APPEND, OP_REPLACE):
child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_append_event(state, child, initiator):
@@ -1132,7 +1142,7 @@ def backref_listeners(attribute, key, uselist):
return
child_state, child_dict = instance_state(child), \
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.parent_token is not parent_token and \
@@ -1141,48 +1151,48 @@ def backref_listeners(attribute, key, uselist):
elif initiator.impl is not child_impl or \
initiator.op not in (OP_APPEND, OP_REPLACE):
child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_remove_event(state, child, initiator):
if child is not None:
child_state, child_dict = instance_state(child),\
- instance_dict(child)
+ instance_dict(child)
child_impl = child_state.manager[key].impl
if initiator.impl is not child_impl or \
initiator.op not in (OP_REMOVE, OP_REPLACE):
child_impl.pop(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
if uselist:
event.listen(attribute, "append",
- emit_backref_from_collection_append_event,
- retval=True, raw=True)
+ emit_backref_from_collection_append_event,
+ retval=True, raw=True)
else:
event.listen(attribute, "set",
- emit_backref_from_scalar_set_event,
- retval=True, raw=True)
+ emit_backref_from_scalar_set_event,
+ retval=True, raw=True)
# TODO: need coverage in test/orm/ of remove event
event.listen(attribute, "remove",
- emit_backref_from_collection_remove_event,
- retval=True, raw=True)
+ emit_backref_from_collection_remove_event,
+ retval=True, raw=True)
_NO_HISTORY = util.symbol('NO_HISTORY')
_NO_STATE_SYMBOLS = frozenset([
- id(PASSIVE_NO_RESULT),
- id(NO_VALUE),
- id(NEVER_SET)])
+ id(PASSIVE_NO_RESULT),
+ id(NO_VALUE),
+ id(NEVER_SET)])
History = util.namedtuple("History", [
- "added", "unchanged", "deleted"
+ "added", "unchanged", "deleted"
])
@@ -1222,28 +1232,28 @@ class History(History):
"""
return not bool(
- (self.added or self.deleted)
- or self.unchanged
- )
+ (self.added or self.deleted)
+ or self.unchanged
+ )
def sum(self):
"""Return a collection of added + unchanged + deleted."""
return (self.added or []) +\
- (self.unchanged or []) +\
- (self.deleted or [])
+ (self.unchanged or []) +\
+ (self.deleted or [])
def non_deleted(self):
"""Return a collection of added + unchanged."""
return (self.added or []) +\
- (self.unchanged or [])
+ (self.unchanged or [])
def non_added(self):
"""Return a collection of unchanged + deleted."""
return (self.unchanged or []) +\
- (self.deleted or [])
+ (self.deleted or [])
def has_changes(self):
"""Return True if this :class:`.History` has changes."""
@@ -1261,7 +1271,7 @@ class History(History):
[(c is not None)
and instance_state(c) or None
for c in self.deleted],
- )
+ )
@classmethod
def from_scalar_attribute(cls, attribute, state, current):
@@ -1331,13 +1341,13 @@ class History(History):
else:
current_states = [((c is not None) and instance_state(c)
- or None, c)
- for c in current
- ]
+ or None, c)
+ for c in current
+ ]
original_states = [((c is not None) and instance_state(c)
or None, c)
- for c in original
- ]
+ for c in original
+ ]
current_set = dict(current_states)
original_set = dict(original_states)
@@ -1369,11 +1379,11 @@ def get_history(obj, key, passive=PASSIVE_OFF):
"""
if passive is True:
util.warn_deprecated("Passing True for 'passive' is deprecated. "
- "Use attributes.PASSIVE_NO_INITIALIZE")
+ "Use attributes.PASSIVE_NO_INITIALIZE")
passive = PASSIVE_NO_INITIALIZE
elif passive is False:
util.warn_deprecated("Passing False for 'passive' is "
- "deprecated. Use attributes.PASSIVE_OFF")
+ "deprecated. Use attributes.PASSIVE_OFF")
passive = PASSIVE_OFF
return get_state_history(instance_state(obj), key, passive)
@@ -1395,15 +1405,15 @@ def register_attribute(class_, key, **kw):
parententity = kw.pop('parententity', None)
doc = kw.pop('doc', None)
desc = register_descriptor(class_, key,
- comparator, parententity, doc=doc)
+ comparator, parententity, doc=doc)
register_attribute_impl(class_, key, **kw)
return desc
def register_attribute_impl(class_, key,
- uselist=False, callable_=None,
- useobject=False,
- impl_class=None, backref=None, **kw):
+ uselist=False, callable_=None,
+ useobject=False,
+ impl_class=None, backref=None, **kw):
manager = manager_of_class(class_)
if uselist:
@@ -1422,7 +1432,7 @@ def register_attribute_impl(class_, key,
typecallable=typecallable, **kw)
elif useobject:
impl = ScalarObjectAttributeImpl(class_, key, callable_,
- dispatch, **kw)
+ dispatch, **kw)
else:
impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
@@ -1436,11 +1446,11 @@ def register_attribute_impl(class_, key,
def register_descriptor(class_, key, comparator=None,
- parententity=None, doc=None):
+ parententity=None, doc=None):
manager = manager_of_class(class_)
descriptor = InstrumentedAttribute(class_, key, comparator=comparator,
- parententity=parententity)
+ parententity=parententity)
descriptor.__doc__ = doc
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
index de103bf71..a85f59f37 100644
--- a/lib/sqlalchemy/orm/base.py
+++ b/lib/sqlalchemy/orm/base.py
@@ -14,102 +14,127 @@ from ..sql import expression
from . import exc
import operator
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
-"""Symbol returned by a loader callable or other attribute/history
-retrieval operation when a value could not be determined, based
-on loader callable flags.
-"""
+PASSIVE_NO_RESULT = util.symbol(
+ 'PASSIVE_NO_RESULT',
+ """Symbol returned by a loader callable or other attribute/history
+ retrieval operation when a value could not be determined, based
+ on loader callable flags.
+ """
)
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
-"""Symbol returned by a loader callable to indicate the
-retrieved value, or values, were assigned to their attributes
-on the target object.
-""")
+ATTR_WAS_SET = util.symbol(
+ 'ATTR_WAS_SET',
+ """Symbol returned by a loader callable to indicate the
+ retrieved value, or values, were assigned to their attributes
+ on the target object.
+ """
+)
-ATTR_EMPTY = util.symbol('ATTR_EMPTY',
-"""Symbol used internally to indicate an attribute had no callable.
-""")
+ATTR_EMPTY = util.symbol(
+ 'ATTR_EMPTY',
+ """Symbol used internally to indicate an attribute had no callable."""
+)
-NO_VALUE = util.symbol('NO_VALUE',
-"""Symbol which may be placed as the 'previous' value of an attribute,
-indicating no value was loaded for an attribute when it was modified,
-and flags indicated we were not to load it.
-"""
+NO_VALUE = util.symbol(
+ 'NO_VALUE',
+ """Symbol which may be placed as the 'previous' value of an attribute,
+ indicating no value was loaded for an attribute when it was modified,
+ and flags indicated we were not to load it.
+ """
)
-NEVER_SET = util.symbol('NEVER_SET',
-"""Symbol which may be placed as the 'previous' value of an attribute
-indicating that the attribute had not been assigned to previously.
-"""
+NEVER_SET = util.symbol(
+ 'NEVER_SET',
+ """Symbol which may be placed as the 'previous' value of an attribute
+ indicating that the attribute had not been assigned to previously.
+ """
)
-NO_CHANGE = util.symbol("NO_CHANGE",
-"""No callables or SQL should be emitted on attribute access
-and no state should change""", canonical=0
+NO_CHANGE = util.symbol(
+ "NO_CHANGE",
+ """No callables or SQL should be emitted on attribute access
+ and no state should change
+ """, canonical=0
)
-CALLABLES_OK = util.symbol("CALLABLES_OK",
-"""Loader callables can be fired off if a value
-is not present.""", canonical=1
+CALLABLES_OK = util.symbol(
+ "CALLABLES_OK",
+ """Loader callables can be fired off if a value
+ is not present.
+ """, canonical=1
)
-SQL_OK = util.symbol("SQL_OK",
-"""Loader callables can emit SQL at least on scalar value
-attributes.""", canonical=2)
+SQL_OK = util.symbol(
+ "SQL_OK",
+ """Loader callables can emit SQL at least on scalar value attributes.""",
+ canonical=2
+)
-RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
-"""callables can use SQL to load related objects as well
-as scalar value attributes.
-""", canonical=4
+RELATED_OBJECT_OK = util.symbol(
+ "RELATED_OBJECT_OK",
+ """Callables can use SQL to load related objects as well
+ as scalar value attributes.
+ """, canonical=4
)
-INIT_OK = util.symbol("INIT_OK",
-"""Attributes should be initialized with a blank
-value (None or an empty collection) upon get, if no other
-value can be obtained.
-""", canonical=8
+INIT_OK = util.symbol(
+ "INIT_OK",
+ """Attributes should be initialized with a blank
+ value (None or an empty collection) upon get, if no other
+ value can be obtained.
+ """, canonical=8
)
-NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
-"""callables can be emitted if the parent is not persistent.""",
-canonical=16
+NON_PERSISTENT_OK = util.symbol(
+ "NON_PERSISTENT_OK",
+ """Callables can be emitted if the parent is not persistent.""",
+ canonical=16
)
-LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
-"""callables should use committed values as primary/foreign keys during a load
-""", canonical=32
+LOAD_AGAINST_COMMITTED = util.symbol(
+ "LOAD_AGAINST_COMMITTED",
+ """Callables should use committed values as primary/foreign keys during a
+ load.
+ """, canonical=32
)
-NO_AUTOFLUSH = util.symbol("NO_AUTOFLUSH",
-"""loader callables should disable autoflush.
-""", canonical=64)
+NO_AUTOFLUSH = util.symbol(
+ "NO_AUTOFLUSH",
+ """Loader callables should disable autoflush.""",
+ canonical=64
+)
# pre-packaged sets of flags used as inputs
-PASSIVE_OFF = util.symbol("PASSIVE_OFF",
+PASSIVE_OFF = util.symbol(
+ "PASSIVE_OFF",
"Callables can be emitted in all cases.",
canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
- INIT_OK | CALLABLES_OK | SQL_OK)
+ INIT_OK | CALLABLES_OK | SQL_OK)
)
-PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
- """PASSIVE_OFF ^ INIT_OK""",
- canonical=PASSIVE_OFF ^ INIT_OK
+PASSIVE_RETURN_NEVER_SET = util.symbol(
+ "PASSIVE_RETURN_NEVER_SET",
+ """PASSIVE_OFF ^ INIT_OK""",
+ canonical=PASSIVE_OFF ^ INIT_OK
)
-PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
- "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
- canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
+PASSIVE_NO_INITIALIZE = util.symbol(
+ "PASSIVE_NO_INITIALIZE",
+ "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
+ canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
)
-PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
- "PASSIVE_OFF ^ SQL_OK",
- canonical=PASSIVE_OFF ^ SQL_OK
+PASSIVE_NO_FETCH = util.symbol(
+ "PASSIVE_NO_FETCH",
+ "PASSIVE_OFF ^ SQL_OK",
+ canonical=PASSIVE_OFF ^ SQL_OK
)
-PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
- "PASSIVE_OFF ^ RELATED_OBJECT_OK",
- canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
+PASSIVE_NO_FETCH_RELATED = util.symbol(
+ "PASSIVE_NO_FETCH_RELATED",
+ "PASSIVE_OFF ^ RELATED_OBJECT_OK",
+ canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
)
-PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
- "PASSIVE_OFF ^ NON_PERSISTENT_OK",
- canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
+PASSIVE_ONLY_PERSISTENT = util.symbol(
+ "PASSIVE_ONLY_PERSISTENT",
+ "PASSIVE_OFF ^ NON_PERSISTENT_OK",
+ canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
)
DEFAULT_MANAGER_ATTR = '_sa_class_manager'
@@ -120,7 +145,7 @@ EXT_CONTINUE = util.symbol('EXT_CONTINUE')
EXT_STOP = util.symbol('EXT_STOP')
ONETOMANY = util.symbol('ONETOMANY',
-"""Indicates the one-to-many direction for a :func:`.relationship`.
+ """Indicates the one-to-many direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -128,7 +153,7 @@ certain API features.
""")
MANYTOONE = util.symbol('MANYTOONE',
-"""Indicates the many-to-one direction for a :func:`.relationship`.
+ """Indicates the many-to-one direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -136,7 +161,7 @@ certain API features.
""")
MANYTOMANY = util.symbol('MANYTOMANY',
-"""Indicates the many-to-many direction for a :func:`.relationship`.
+ """Indicates the many-to-many direction for a :func:`.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
@@ -144,7 +169,7 @@ certain API features.
""")
NOT_EXTENSION = util.symbol('NOT_EXTENSION',
-"""Symbol indicating an :class:`_InspectionAttr` that's
+ """Symbol indicating an :class:`_InspectionAttr` that's
not part of sqlalchemy.ext.
Is assigned to the :attr:`._InspectionAttr.extension_type`
@@ -177,11 +202,13 @@ instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
instance_dict = operator.attrgetter('__dict__')
+
def instance_str(instance):
"""Return a string describing an instance."""
return state_str(instance_state(instance))
+
def state_str(state):
"""Return a string describing an instance via its InstanceState."""
@@ -190,8 +217,11 @@ def state_str(state):
else:
return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
+
def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
+ """Return a string describing an instance's class via its
+ InstanceState.
+ """
if state is None:
return "None"
@@ -206,6 +236,7 @@ def attribute_str(instance, attribute):
def state_attribute_str(state, attribute):
return state_str(state) + "." + attribute
+
def object_mapper(instance):
"""Given an object, return the primary Mapper associated with the object
instance.
@@ -261,7 +292,6 @@ def _inspect_mapped_object(instance):
return None
-
def _class_to_mapper(class_or_mapper):
insp = inspection.inspect(class_or_mapper, False)
if insp is not None:
@@ -272,7 +302,8 @@ def _class_to_mapper(class_or_mapper):
def _mapper_or_none(entity):
"""Return the :class:`.Mapper` for the given class or None if the
- class is not mapped."""
+ class is not mapped.
+ """
insp = inspection.inspect(entity, False)
if insp is not None:
@@ -283,7 +314,8 @@ def _mapper_or_none(entity):
def _is_mapped_class(entity):
"""Return True if the given object is a mapped class,
- :class:`.Mapper`, or :class:`.AliasedClass`."""
+ :class:`.Mapper`, or :class:`.AliasedClass`.
+ """
insp = inspection.inspect(entity, False)
return insp is not None and \
@@ -293,6 +325,7 @@ def _is_mapped_class(entity):
or insp.is_aliased_class
)
+
def _attr_as_key(attr):
if hasattr(attr, 'key'):
return attr.key
@@ -300,7 +333,6 @@ def _attr_as_key(attr):
return expression._column_as_key(attr)
-
def _orm_columns(entity):
insp = inspection.inspect(entity, False)
if hasattr(insp, 'selectable'):
@@ -309,7 +341,6 @@ def _orm_columns(entity):
return [entity]
-
def _is_aliased_class(entity):
insp = inspection.inspect(entity, False)
return insp is not None and \
@@ -339,12 +370,13 @@ def _entity_descriptor(entity, key):
return getattr(entity, key)
except AttributeError:
raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (description, key)
- )
+ "Entity '%s' has no property '%s'" %
+ (description, key)
+ )
_state_mapper = util.dottedgetter('manager.mapper')
+
@inspection._inspects(type)
def _inspect_mapped_class(class_, configure=False):
try:
@@ -381,7 +413,7 @@ def class_mapper(class_, configure=True):
if mapper is None:
if not isinstance(class_, type):
raise sa_exc.ArgumentError(
- "Class object expected, got '%r'." % (class_, ))
+ "Class object expected, got '%r'." % (class_, ))
raise exc.UnmappedClassError(class_)
else:
return mapper
@@ -452,6 +484,7 @@ class _InspectionAttr(object):
"""
+
class _MappedAttribute(object):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 9741895db..698677a0b 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -128,6 +128,7 @@ class _PlainColumnGetter(object):
and some rare caveats.
"""
+
def __init__(self, cols):
self.cols = cols
self.composite = len(cols) > 1
@@ -159,6 +160,7 @@ class _SerializableColumnGetter(object):
Remains here for pickle compatibility with 0.7.6.
"""
+
def __init__(self, colkeys):
self.colkeys = colkeys
self.composite = len(colkeys) > 1
@@ -170,9 +172,9 @@ class _SerializableColumnGetter(object):
state = base.instance_state(value)
m = base._state_mapper(state)
key = [m._get_state_attr_by_column(
- state, state.dict,
- m.mapped_table.columns[k])
- for k in self.colkeys]
+ state, state.dict,
+ m.mapped_table.columns[k])
+ for k in self.colkeys]
if self.composite:
return tuple(key)
else:
@@ -213,8 +215,8 @@ class _SerializableColumnGetterV2(_PlainColumnGetter):
metadata = getattr(mapper.local_table, 'metadata', None)
for (ckey, tkey) in self.colkeys:
if tkey is None or \
- metadata is None or \
- tkey not in metadata:
+ metadata is None or \
+ tkey not in metadata:
cols.append(mapper.local_table.c[ckey])
else:
cols.append(metadata.tables[tkey].c[ckey])
@@ -235,7 +237,7 @@ def column_mapped_collection(mapping_spec):
"""
cols = [expression._only_column_elements(q, "mapping_spec")
- for q in util.to_list(mapping_spec)
+ for q in util.to_list(mapping_spec)
]
keyfunc = _PlainColumnGetter(cols)
return lambda: MappedCollection(keyfunc)
@@ -534,9 +536,9 @@ class collection(object):
def removes_return():
"""Mark the method as removing an entity in the collection.
- Adds "remove from collection" handling to the method. The return value
- of the method, if any, is considered the value to remove. The method
- arguments are not inspected::
+ Adds "remove from collection" handling to the method. The return
+ value of the method, if any, is considered the value to remove. The
+ method arguments are not inspected::
@collection.removes_return()
def pop(self): ...
@@ -594,7 +596,6 @@ class CollectionAdapter(object):
if data._sa_linker:
data._sa_linker(self)
-
def unlink(self, data):
"""Unlink a collection from any adapter"""
@@ -632,7 +633,7 @@ class CollectionAdapter(object):
raise TypeError(
"Incompatible collection type: %s is not %s-like" % (
- given, wanted))
+ given, wanted))
# If the object is an adapted collection, return the (iterable)
# adapter.
@@ -710,9 +711,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
return self.attr.fire_append_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ item, initiator)
else:
return item
@@ -728,9 +729,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
self.attr.fire_remove_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ item, initiator)
def fire_pre_remove_event(self, initiator=None):
"""Notify that an entity is about to be removed from the collection.
@@ -742,9 +743,9 @@ class CollectionAdapter(object):
if self.invalidated:
self._warn_invalidated()
self.attr.fire_pre_remove_event(
- self.owner_state,
- self.owner_state.dict,
- initiator=initiator)
+ self.owner_state,
+ self.owner_state.dict,
+ initiator=initiator)
def __getstate__(self):
return {'key': self._key,
@@ -848,6 +849,7 @@ def __converting_factory(specimen_cls, original_factory):
return wrapper
+
def _instrument_class(cls):
"""Modify methods in a class and install instrumentation."""
@@ -906,7 +908,7 @@ def _instrument_class(cls):
for method, decorator in decorators.items():
fn = getattr(cls, method, None)
if (fn and method not in methods and
- not hasattr(fn, '_sa_instrumented')):
+ not hasattr(fn, '_sa_instrumented')):
setattr(cls, method, decorator(fn))
# ensure all roles are present, and apply implicit instrumentation if
@@ -951,11 +953,12 @@ def _instrument_class(cls):
def _instrument_membership_mutator(method, before, argument, after):
- """Route method args and/or return value through the collection adapter."""
+ """Route method args and/or return value through the collection
+ adapter."""
# This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
if before:
fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0]))
- if type(argument) is int:
+ if isinstance(argument, int):
pos_arg = argument
named_arg = len(fn_args) > argument and fn_args[argument] or None
else:
@@ -1145,8 +1148,8 @@ def _list_decorators():
def __iadd__(fn):
def __iadd__(self, iterable):
- # list.__iadd__ takes any iterable and seems to let TypeError raise
- # as-is instead of returning NotImplemented
+ # list.__iadd__ takes any iterable and seems to let TypeError
+ # raise as-is instead of returning NotImplemented
for value in iterable:
self.append(value)
return self
@@ -1251,7 +1254,7 @@ def _dict_decorators():
if hasattr(__other, 'keys'):
for key in list(__other):
if (key not in self or
- self[key] is not __other[key]):
+ self[key] is not __other[key]):
self[key] = __other[key]
else:
for key, value in __other:
@@ -1447,23 +1450,23 @@ __canned_instrumentation = {
list: InstrumentedList,
set: InstrumentedSet,
dict: InstrumentedDict,
- }
+}
__interfaces = {
list: (
{'appender': 'append', 'remover': 'remove',
- 'iterator': '__iter__'}, _list_decorators()
- ),
+ 'iterator': '__iter__'}, _list_decorators()
+ ),
set: ({'appender': 'add',
- 'remover': 'remove',
- 'iterator': '__iter__'}, _set_decorators()
- ),
+ 'remover': 'remove',
+ 'iterator': '__iter__'}, _set_decorators()
+ ),
# decorators are required for dicts and object collections.
dict: ({'iterator': 'values'}, _dict_decorators()) if util.py3k
- else ({'iterator': 'itervalues'}, _dict_decorators()),
- }
+ else ({'iterator': 'itervalues'}, _dict_decorators()),
+}
class MappedCollection(dict):
@@ -1538,7 +1541,7 @@ class MappedCollection(dict):
"Found incompatible key %r for value %r; this "
"collection's "
"keying function requires a key of %r for this value." % (
- incoming_key, value, new_key))
+ incoming_key, value, new_key))
yield value
# ensure instrumentation is associated with
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 3864eb1bd..c1cf66f14 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -11,7 +11,7 @@
from .. import sql, util, exc as sa_exc
from . import attributes, exc, sync, unitofwork, \
- util as mapperutil
+ util as mapperutil
from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
@@ -39,10 +39,10 @@ class DependencyProcessor(object):
self.key = prop.key
if not self.prop.synchronize_pairs:
raise sa_exc.ArgumentError(
- "Can't build a DependencyProcessor for relationship %s. "
- "No target attributes to populate between parent and "
- "child are present" %
- self.prop)
+ "Can't build a DependencyProcessor for relationship %s. "
+ "No target attributes to populate between parent and "
+ "child are present" %
+ self.prop)
@classmethod
def from_relationship(cls, prop):
@@ -70,31 +70,31 @@ class DependencyProcessor(object):
before_delete = unitofwork.ProcessAll(uow, self, True, True)
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.primary_base_mapper
- )
+ uow,
+ self.parent.primary_base_mapper
+ )
child_saves = unitofwork.SaveUpdateAll(
- uow,
- self.mapper.primary_base_mapper
- )
+ uow,
+ self.mapper.primary_base_mapper
+ )
parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.primary_base_mapper
- )
+ uow,
+ self.parent.primary_base_mapper
+ )
child_deletes = unitofwork.DeleteAll(
- uow,
- self.mapper.primary_base_mapper
- )
+ uow,
+ self.mapper.primary_base_mapper
+ )
self.per_property_dependencies(uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- )
+ parent_saves,
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete
+ )
def per_state_flush_actions(self, uow, states, isdelete):
"""establish actions and dependencies related to a flush.
@@ -141,15 +141,15 @@ class DependencyProcessor(object):
# check if the "parent" side is part of the cycle
if not isdelete:
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
parent_deletes = before_delete = None
if parent_saves in uow.cycles:
parent_in_cycles = True
else:
parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
parent_saves = after_save = None
if parent_deletes in uow.cycles:
parent_in_cycles = True
@@ -162,28 +162,28 @@ class DependencyProcessor(object):
# case of deletes we may try to load missing items here as well.
sum_ = state.manager[self.key].impl.get_all_pending(
state, state.dict,
- self._passive_delete_flag
- if isdelete
- else attributes.PASSIVE_NO_INITIALIZE)
+ self._passive_delete_flag
+ if isdelete
+ else attributes.PASSIVE_NO_INITIALIZE)
if not sum_:
continue
if isdelete:
before_delete = unitofwork.ProcessState(uow,
- self, True, state)
+ self, True, state)
if parent_in_cycles:
parent_deletes = unitofwork.DeleteState(
- uow,
- state,
- parent_base_mapper)
+ uow,
+ state,
+ parent_base_mapper)
else:
after_save = unitofwork.ProcessState(uow, self, False, state)
if parent_in_cycles:
parent_saves = unitofwork.SaveUpdateState(
- uow,
- state,
- parent_base_mapper)
+ uow,
+ state,
+ parent_base_mapper)
if child_in_cycles:
child_actions = []
@@ -194,26 +194,26 @@ class DependencyProcessor(object):
(deleted, listonly) = uow.states[child_state]
if deleted:
child_action = (
- unitofwork.DeleteState(
- uow, child_state,
- child_base_mapper),
- True)
+ unitofwork.DeleteState(
+ uow, child_state,
+ child_base_mapper),
+ True)
else:
child_action = (
- unitofwork.SaveUpdateState(
- uow, child_state,
- child_base_mapper),
- False)
+ unitofwork.SaveUpdateState(
+ uow, child_state,
+ child_base_mapper),
+ False)
child_actions.append(child_action)
# establish dependencies between our possibly per-state
# parent action and our possibly per-state child action.
for child_action, childisdelete in child_actions:
self.per_state_dependencies(uow, parent_saves,
- parent_deletes,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete)
+ parent_deletes,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete)
def presort_deletes(self, uowcommit, states):
return False
@@ -240,9 +240,9 @@ class DependencyProcessor(object):
# to InstanceState which returns: attribute
# has a non-None value, or had one
history = uowcommit.get_attribute_history(
- s,
- self.key,
- passive)
+ s,
+ self.key,
+ passive)
if history and not history.empty():
return True
else:
@@ -253,27 +253,27 @@ class DependencyProcessor(object):
def _verify_canload(self, state):
if self.prop.uselist and state is None:
raise exc.FlushError(
- "Can't flush None value found in "
- "collection %s" % (self.prop, ))
+ "Can't flush None value found in "
+ "collection %s" % (self.prop, ))
elif state is not None and \
- not self.mapper._canload(state,
- allow_subtypes=not self.enable_typechecks):
+ not self.mapper._canload(
+ state, allow_subtypes=not self.enable_typechecks):
if self.mapper._canload(state, allow_subtypes=True):
raise exc.FlushError('Attempting to flush an item of type '
- '%(x)s as a member of collection '
- '"%(y)s". Expected an object of type '
- '%(z)s or a polymorphic subclass of '
- 'this type. If %(x)s is a subclass of '
- '%(z)s, configure mapper "%(zm)s" to '
- 'load this subtype polymorphically, or '
- 'set enable_typechecks=False to allow '
- 'any subtype to be accepted for flush. '
- % {
- 'x': state.class_,
- 'y': self.prop,
- 'z': self.mapper.class_,
- 'zm': self.mapper,
- })
+ '%(x)s as a member of collection '
+ '"%(y)s". Expected an object of type '
+ '%(z)s or a polymorphic subclass of '
+ 'this type. If %(x)s is a subclass of '
+ '%(z)s, configure mapper "%(zm)s" to '
+ 'load this subtype polymorphically, or '
+ 'set enable_typechecks=False to allow '
+ 'any subtype to be accepted for flush. '
+ % {
+ 'x': state.class_,
+ 'y': self.prop,
+ 'z': self.mapper.class_,
+ 'zm': self.mapper,
+ })
else:
raise exc.FlushError(
'Attempting to flush an item of type '
@@ -287,7 +287,7 @@ class DependencyProcessor(object):
})
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit):
+ clearkeys, uowcommit):
raise NotImplementedError()
def _get_reversed_processed_set(self, uow):
@@ -295,20 +295,20 @@ class DependencyProcessor(object):
return None
process_key = tuple(sorted(
- [self.key] +
- [p.key for p in self.prop._reverse_property]
- ))
+ [self.key] +
+ [p.key for p in self.prop._reverse_property]
+ ))
return uow.memo(
- ('reverse_key', process_key),
- set
- )
+ ('reverse_key', process_key),
+ set
+ )
def _post_update(self, state, uowcommit, related):
for x in related:
if x is not None:
uowcommit.issue_post_update(
- state,
- [r for l, r in self.prop.synchronize_pairs]
+ state,
+ [r for l, r in self.prop.synchronize_pairs]
)
break
@@ -322,21 +322,21 @@ class DependencyProcessor(object):
class OneToManyDP(DependencyProcessor):
def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete,
- ):
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete,
+ ):
if self.post_update:
child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
+ uow,
+ self.mapper.primary_base_mapper,
+ False)
child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
+ uow,
+ self.mapper.primary_base_mapper,
+ True)
uow.dependencies.update([
(child_saves, after_save),
@@ -362,22 +362,22 @@ class OneToManyDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if self.post_update:
child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
+ uow,
+ self.mapper.primary_base_mapper,
+ False)
child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
+ uow,
+ self.mapper.primary_base_mapper,
+ True)
# TODO: this whole block is not covered
# by any tests
@@ -421,13 +421,13 @@ class OneToManyDP(DependencyProcessor):
# child objects the child objects have to have their
# foreign key to the parent set to NULL
should_null_fks = not self.cascade.delete and \
- not self.passive_deletes == 'all'
+ not self.passive_deletes == 'all'
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if child is not None and self.hasparent(child) is False:
@@ -439,8 +439,8 @@ class OneToManyDP(DependencyProcessor):
if should_null_fks:
for child in history.unchanged:
if child is not None:
- uowcommit.register_object(child,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, operation="delete", prop=self.prop)
def presort_saves(self, uowcommit, states):
children_added = uowcommit.memo(('children_added', self), set)
@@ -454,28 +454,29 @@ class OneToManyDP(DependencyProcessor):
passive = attributes.PASSIVE_OFF
history = uowcommit.get_attribute_history(
- state,
- self.key,
- passive)
+ state,
+ self.key,
+ passive)
if history:
for child in history.added:
if child is not None:
uowcommit.register_object(child, cancel_delete=True,
- operation="add",
- prop=self.prop)
+ operation="add",
+ prop=self.prop)
children_added.update(history.added)
for child in history.deleted:
if not self.cascade.delete_orphan:
uowcommit.register_object(child, isdelete=False,
- operation='delete',
- prop=self.prop)
+ operation='delete',
+ prop=self.prop)
elif self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete', child):
+ 'delete', child):
uowcommit.register_object(
st_,
isdelete=True)
@@ -485,11 +486,11 @@ class OneToManyDP(DependencyProcessor):
for child in history.unchanged:
if child is not None:
uowcommit.register_object(
- child,
- False,
- self.passive_updates,
- operation="pk change",
- prop=self.prop)
+ child,
+ False,
+ self.passive_updates,
+ operation="pk change",
+ prop=self.prop)
def process_deletes(self, uowcommit, states):
# head object is being deleted, and we manage its list of
@@ -503,67 +504,67 @@ class OneToManyDP(DependencyProcessor):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if child is not None and \
- self.hasparent(child) is False:
+ self.hasparent(child) is False:
self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
+ state,
+ child,
+ None, True,
+ uowcommit, False)
if self.post_update and child:
self._post_update(child, uowcommit, [state])
if self.post_update or not self.cascade.delete:
for child in set(history.unchanged).\
- difference(children_added):
+ difference(children_added):
if child is not None:
self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
+ state,
+ child,
+ None, True,
+ uowcommit, False)
if self.post_update and child:
self._post_update(child,
- uowcommit,
- [state])
+ uowcommit,
+ [state])
# technically, we can even remove each child from the
# collection here too. but this would be a somewhat
# inconsistent behavior since it wouldn't happen
- #if the old parent wasn't deleted but child was moved.
+ # if the old parent wasn't deleted but child was moved.
def process_saves(self, uowcommit, states):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
for child in history.added:
self._synchronize(state, child, None,
- False, uowcommit, False)
+ False, uowcommit, False)
if child is not None and self.post_update:
self._post_update(child, uowcommit, [state])
for child in history.deleted:
if not self.cascade.delete_orphan and \
- not self.hasparent(child):
+ not self.hasparent(child):
self._synchronize(state, child, None, True,
- uowcommit, False)
+ uowcommit, False)
if self._pks_changed(uowcommit, state):
for child in history.unchanged:
self._synchronize(state, child, None,
- False, uowcommit, True)
+ False, uowcommit, True)
def _synchronize(self, state, child,
- associationrow, clearkeys, uowcommit,
- pks_changed):
+ associationrow, clearkeys, uowcommit,
+ pks_changed):
source = state
dest = child
self._verify_canload(child)
@@ -574,15 +575,15 @@ class OneToManyDP(DependencyProcessor):
sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
else:
sync.populate(source, self.parent, dest, self.mapper,
- self.prop.synchronize_pairs, uowcommit,
- self.passive_updates and pks_changed)
+ self.prop.synchronize_pairs, uowcommit,
+ self.passive_updates and pks_changed)
def _pks_changed(self, uowcommit, state):
return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
+ uowcommit,
+ state,
+ self.parent,
+ self.prop.synchronize_pairs)
class ManyToOneDP(DependencyProcessor):
@@ -591,22 +592,22 @@ class ManyToOneDP(DependencyProcessor):
self.mapper._dependency_processors.append(DetectKeySwitch(prop))
def per_property_dependencies(self, uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete):
+ parent_saves,
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete):
if self.post_update:
parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
+ uow,
+ self.parent.primary_base_mapper,
+ False)
parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
+ uow,
+ self.parent.primary_base_mapper,
+ True)
uow.dependencies.update([
(child_saves, after_save),
@@ -627,19 +628,19 @@ class ManyToOneDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if self.post_update:
if not isdelete:
parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
+ uow,
+ self.parent.primary_base_mapper,
+ False)
if childisdelete:
uow.dependencies.update([
(after_save, parent_post_updates),
@@ -654,9 +655,9 @@ class ManyToOneDP(DependencyProcessor):
])
else:
parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
+ uow,
+ self.parent.primary_base_mapper,
+ True)
uow.dependencies.update([
(before_delete, parent_pre_updates),
@@ -685,9 +686,9 @@ class ManyToOneDP(DependencyProcessor):
if self.cascade.delete or self.cascade.delete_orphan:
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
if self.cascade.delete_orphan:
todelete = history.sum()
@@ -696,8 +697,9 @@ class ManyToOneDP(DependencyProcessor):
for child in todelete:
if child is None:
continue
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
t = self.mapper.cascade_iterator('delete', child)
for c, m, st_, dct_ in t:
uowcommit.register_object(
@@ -708,14 +710,15 @@ class ManyToOneDP(DependencyProcessor):
uowcommit.register_object(state, operation="add", prop=self.prop)
if self.cascade.delete_orphan:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.deleted:
if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
t = self.mapper.cascade_iterator('delete', child)
for c, m, st_, dct_ in t:
@@ -733,35 +736,35 @@ class ManyToOneDP(DependencyProcessor):
self._synchronize(state, None, None, True, uowcommit)
if state and self.post_update:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
self._post_update(state, uowcommit, history.sum())
def process_saves(self, uowcommit, states):
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
if history.added:
for child in history.added:
self._synchronize(state, child, None, False,
- uowcommit, "add")
+ uowcommit, "add")
if self.post_update:
self._post_update(state, uowcommit, history.sum())
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation=None):
+ clearkeys, uowcommit, operation=None):
if state is None or \
- (not self.post_update and uowcommit.is_deleted(state)):
+ (not self.post_update and uowcommit.is_deleted(state)):
return
if operation is not None and \
- child is not None and \
- not uowcommit.session._contains_state(child):
+ child is not None and \
+ not uowcommit.session._contains_state(child):
util.warn(
"Object of type %s not in session, %s "
"operation along '%s' won't proceed" %
@@ -773,10 +776,10 @@ class ManyToOneDP(DependencyProcessor):
else:
self._verify_canload(child)
sync.populate(child, self.mapper, state,
- self.parent,
- self.prop.synchronize_pairs,
- uowcommit,
- False)
+ self.parent,
+ self.prop.synchronize_pairs,
+ uowcommit,
+ False)
class DetectKeySwitch(DependencyProcessor):
@@ -796,16 +799,16 @@ class DetectKeySwitch(DependencyProcessor):
if self.passive_updates:
return
else:
- if False in (prop.passive_updates for \
- prop in self.prop._reverse_property):
+ if False in (prop.passive_updates for
+ prop in self.prop._reverse_property):
return
uow.register_preprocessor(self, False)
def per_property_flush_actions(self, uow):
parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
+ uow,
+ self.parent.base_mapper)
after_save = unitofwork.ProcessAll(uow, self, False, False)
uow.dependencies.update([
(parent_saves, after_save)
@@ -844,8 +847,8 @@ class DetectKeySwitch(DependencyProcessor):
def _key_switchers(self, uow, states):
switched, notswitched = uow.memo(
('pk_switchers', self),
- lambda: (set(), set())
- )
+ lambda: (set(), set())
+ )
allstates = switched.union(notswitched)
for s in states:
@@ -865,37 +868,35 @@ class DetectKeySwitch(DependencyProcessor):
if not issubclass(state.class_, self.parent.class_):
continue
dict_ = state.dict
- related = state.get_impl(self.key).get(state, dict_,
- passive=self._passive_update_flag)
+ related = state.get_impl(self.key).get(
+ state, dict_, passive=self._passive_update_flag)
if related is not attributes.PASSIVE_NO_RESULT and \
related is not None:
related_state = attributes.instance_state(dict_[self.key])
if related_state in switchers:
uowcommit.register_object(state,
- False,
- self.passive_updates)
+ False,
+ self.passive_updates)
sync.populate(
- related_state,
- self.mapper, state,
- self.parent, self.prop.synchronize_pairs,
- uowcommit, self.passive_updates)
+ related_state,
+ self.mapper, state,
+ self.parent, self.prop.synchronize_pairs,
+ uowcommit, self.passive_updates)
def _pks_changed(self, uowcommit, state):
- return bool(state.key) and sync.source_modified(uowcommit,
- state,
- self.mapper,
- self.prop.synchronize_pairs)
+ return bool(state.key) and sync.source_modified(
+ uowcommit, state, self.mapper, self.prop.synchronize_pairs)
class ManyToManyDP(DependencyProcessor):
def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- ):
+ child_saves,
+ parent_deletes,
+ child_deletes,
+ after_save,
+ before_delete
+ ):
uow.dependencies.update([
(parent_saves, after_save),
@@ -915,11 +916,11 @@ class ManyToManyDP(DependencyProcessor):
])
def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
+ save_parent,
+ delete_parent,
+ child_action,
+ after_save, before_delete,
+ isdelete, childisdelete):
if not isdelete:
if childisdelete:
uow.dependencies.update([
@@ -946,9 +947,9 @@ class ManyToManyDP(DependencyProcessor):
# returns True
for state in states:
uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
def presort_saves(self, uowcommit, states):
if not self.passive_updates:
@@ -958,9 +959,9 @@ class ManyToManyDP(DependencyProcessor):
for state in states:
if self._pks_changed(uowcommit, state):
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_OFF)
+ state,
+ self.key,
+ attributes.PASSIVE_OFF)
if not self.cascade.delete_orphan:
return
@@ -969,17 +970,18 @@ class ManyToManyDP(DependencyProcessor):
# if delete_orphan check is turned on.
for state in states:
history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ self.key,
+ attributes.PASSIVE_NO_INITIALIZE)
if history:
for child in history.deleted:
if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
+ uowcommit.register_object(
+ child, isdelete=True,
+ operation="delete", prop=self.prop)
for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete',
- child):
+ 'delete',
+ child):
uowcommit.register_object(
st_, isdelete=True)
@@ -994,9 +996,9 @@ class ManyToManyDP(DependencyProcessor):
# this history should be cached already, as
# we loaded it in preprocess_deletes
history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
+ state,
+ self.key,
+ self._passive_delete_flag)
if history:
for child in history.non_added():
if child is None or \
@@ -1005,10 +1007,10 @@ class ManyToManyDP(DependencyProcessor):
continue
associationrow = {}
if not self._synchronize(
- state,
- child,
- associationrow,
- False, uowcommit, "delete"):
+ state,
+ child,
+ associationrow,
+ False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
@@ -1018,7 +1020,7 @@ class ManyToManyDP(DependencyProcessor):
processed.update(tmp)
self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
+ secondary_update, secondary_delete)
def process_saves(self, uowcommit, states):
secondary_delete = []
@@ -1030,23 +1032,23 @@ class ManyToManyDP(DependencyProcessor):
for state in states:
need_cascade_pks = not self.passive_updates and \
- self._pks_changed(uowcommit, state)
+ self._pks_changed(uowcommit, state)
if need_cascade_pks:
passive = attributes.PASSIVE_OFF
else:
passive = attributes.PASSIVE_NO_INITIALIZE
history = uowcommit.get_attribute_history(state, self.key,
- passive)
+ passive)
if history:
for child in history.added:
if (processed is not None and
- (state, child) in processed):
+ (state, child) in processed):
continue
associationrow = {}
if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "add"):
+ child,
+ associationrow,
+ False, uowcommit, "add"):
continue
secondary_insert.append(associationrow)
for child in history.deleted:
@@ -1055,14 +1057,14 @@ class ManyToManyDP(DependencyProcessor):
continue
associationrow = {}
if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "delete"):
+ child,
+ associationrow,
+ False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
tmp.update((c, state)
- for c in history.added + history.deleted)
+ for c in history.added + history.deleted)
if need_cascade_pks:
@@ -1085,45 +1087,45 @@ class ManyToManyDP(DependencyProcessor):
processed.update(tmp)
self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
+ secondary_update, secondary_delete)
def _run_crud(self, uowcommit, secondary_insert,
- secondary_update, secondary_delete):
+ secondary_update, secondary_delete):
connection = uowcommit.transaction.connection(self.mapper)
if secondary_delete:
associationrow = secondary_delete[0]
statement = self.secondary.delete(sql.and_(*[
- c == sql.bindparam(c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
+ c == sql.bindparam(c.key, type_=c.type)
+ for c in self.secondary.c
+ if c.key in associationrow
+ ]))
result = connection.execute(statement, secondary_delete)
if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_delete):
+ result.rowcount != len(secondary_delete):
raise exc.StaleDataError(
"DELETE statement on table '%s' expected to delete "
"%d row(s); Only %d were matched." %
(self.secondary.description, len(secondary_delete),
- result.rowcount)
+ result.rowcount)
)
if secondary_update:
associationrow = secondary_update[0]
statement = self.secondary.update(sql.and_(*[
- c == sql.bindparam("old_" + c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
+ c == sql.bindparam("old_" + c.key, type_=c.type)
+ for c in self.secondary.c
+ if c.key in associationrow
+ ]))
result = connection.execute(statement, secondary_update)
if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_update):
+ result.rowcount != len(secondary_update):
raise exc.StaleDataError(
"UPDATE statement on table '%s' expected to update "
"%d row(s); Only %d were matched." %
(self.secondary.description, len(secondary_update),
- result.rowcount)
+ result.rowcount)
)
if secondary_insert:
@@ -1131,7 +1133,7 @@ class ManyToManyDP(DependencyProcessor):
connection.execute(statement, secondary_insert)
def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation):
+ clearkeys, uowcommit, operation):
# this checks for None if uselist=True
self._verify_canload(child)
@@ -1150,18 +1152,18 @@ class ManyToManyDP(DependencyProcessor):
return False
sync.populate_dict(state, self.parent, associationrow,
- self.prop.synchronize_pairs)
+ self.prop.synchronize_pairs)
sync.populate_dict(child, self.mapper, associationrow,
- self.prop.secondary_synchronize_pairs)
+ self.prop.secondary_synchronize_pairs)
return True
def _pks_changed(self, uowcommit, state):
return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
+ uowcommit,
+ state,
+ self.parent,
+ self.prop.synchronize_pairs)
_direction_to_processor = {
ONETOMANY: OneToManyDP,
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index cd918cafe..fa693c968 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -8,6 +8,7 @@
from .. import event, util
from .interfaces import EXT_CONTINUE
+
@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class MapperExtension(object):
"""Base implementation for :class:`.Mapper` event hooks.
@@ -64,20 +65,20 @@ class MapperExtension(object):
cls._adapt_listener_methods(
self, listener,
(
- 'init_instance',
- 'init_failed',
- 'translate_row',
- 'create_instance',
- 'append_result',
- 'populate_instance',
- 'reconstruct_instance',
- 'before_insert',
- 'after_insert',
- 'before_update',
- 'after_update',
- 'before_delete',
- 'after_delete'
- ))
+ 'init_instance',
+ 'init_failed',
+ 'translate_row',
+ 'create_instance',
+ 'append_result',
+ 'populate_instance',
+ 'reconstruct_instance',
+ 'before_insert',
+ 'after_insert',
+ 'before_update',
+ 'after_update',
+ 'before_delete',
+ 'after_delete'
+ ))
@classmethod
def _adapt_listener_methods(cls, self, listener, methods):
@@ -93,29 +94,30 @@ class MapperExtension(object):
ls_meth(self, instance)
return reconstruct
event.listen(self.class_manager, 'load',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
elif meth == 'init_instance':
def go(ls_meth):
def init_instance(instance, args, kwargs):
ls_meth(self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
+ self.class_manager.original_init,
+ instance, args, kwargs)
return init_instance
event.listen(self.class_manager, 'init',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
elif meth == 'init_failed':
def go(ls_meth):
def init_failed(instance, args, kwargs):
- util.warn_exception(ls_meth, self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
+ util.warn_exception(
+ ls_meth, self, self.class_,
+ self.class_manager.original_init,
+ instance, args, kwargs)
return init_failed
event.listen(self.class_manager, 'init_failure',
- go(ls_meth), raw=False, propagate=True)
+ go(ls_meth), raw=False, propagate=True)
else:
event.listen(self, "%s" % meth, ls_meth,
- raw=False, retval=True, propagate=True)
+ raw=False, retval=True, propagate=True)
def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed, and has
@@ -198,7 +200,7 @@ class MapperExtension(object):
return EXT_CONTINUE
def append_result(self, mapper, selectcontext, row, instance,
- result, **flags):
+ result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
@@ -232,7 +234,7 @@ class MapperExtension(object):
return EXT_CONTINUE
def populate_instance(self, mapper, selectcontext, row,
- instance, **flags):
+ instance, **flags):
"""Receive an instance before that instance has
its attributes populated.
@@ -555,14 +557,14 @@ class AttributeExtension(object):
@classmethod
def _adapt_listener(cls, self, listener):
event.listen(self, 'append', listener.append,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
event.listen(self, 'remove', listener.remove,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
event.listen(self, 'set', listener.set,
- active_history=listener.active_history,
- raw=True, retval=True)
+ active_history=listener.active_history,
+ raw=True, retval=True)
def append(self, state, value, initiator):
"""Receive a collection append event.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index 4c335a71c..5ed24b8c0 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -39,7 +39,7 @@ class DescriptorProperty(MapperProperty):
if hasattr(prop, 'get_history'):
def get_history(self, state, dict_,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
return prop.get_history(state, dict_, passive)
if self.descriptor is None:
@@ -63,16 +63,15 @@ class DescriptorProperty(MapperProperty):
fdel=fdel,
)
- proxy_attr = attributes.\
- create_proxied_attribute(self.descriptor)\
- (
- self.parent.class_,
- self.key,
- self.descriptor,
- lambda: self._comparator_factory(mapper),
- doc=self.doc,
- original_property=self
- )
+ proxy_attr = attributes.create_proxied_attribute(
+ self.descriptor)(
+ self.parent.class_,
+ self.key,
+ self.descriptor,
+ lambda: self._comparator_factory(mapper),
+ doc=self.doc,
+ original_property=self
+ )
proxy_attr.impl = _ProxyImpl(self.key)
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
@@ -90,11 +89,12 @@ class CompositeProperty(DescriptorProperty):
:ref:`mapper_composite`
"""
+
def __init__(self, class_, *attrs, **kwargs):
"""Return a composite column-based property for use with a Mapper.
- See the mapping documentation section :ref:`mapper_composite` for a full
- usage example.
+ See the mapping documentation section :ref:`mapper_composite` for a
+ full usage example.
The :class:`.MapperProperty` returned by :func:`.composite`
is the :class:`.CompositeProperty`.
@@ -118,13 +118,14 @@ class CompositeProperty(DescriptorProperty):
A group name for this property when marked as deferred.
:param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
+ When True, the column property is "deferred", meaning that it does
+ not load immediately, and is instead loaded when the attribute is
+ first accessed on an instance. See also
+ :func:`~sqlalchemy.orm.deferred`.
:param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
+ :class:`.CompositeProperty.Comparator` which provides custom SQL
+ clause generation for comparison operations.
:param doc:
optional string that will be applied as the doc on the
@@ -138,8 +139,8 @@ class CompositeProperty(DescriptorProperty):
:param extension:
an :class:`.AttributeExtension` instance,
or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
+ attribute listeners for the resulting descriptor placed on the
+ class. **Deprecated.** Please see :class:`.AttributeEvents`.
"""
@@ -149,14 +150,13 @@ class CompositeProperty(DescriptorProperty):
self.deferred = kwargs.get('deferred', False)
self.group = kwargs.get('group', None)
self.comparator_factory = kwargs.pop('comparator_factory',
- self.__class__.Comparator)
+ self.__class__.Comparator)
if 'info' in kwargs:
self.info = kwargs.pop('info')
util.set_creation_order(self)
self._create_descriptor()
-
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
@@ -241,16 +241,17 @@ class CompositeProperty(DescriptorProperty):
props = []
for attr in self.attrs:
if isinstance(attr, str):
- prop = self.parent.get_property(attr, _configure_mappers=False)
+ prop = self.parent.get_property(
+ attr, _configure_mappers=False)
elif isinstance(attr, schema.Column):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
else:
raise sa_exc.ArgumentError(
- "Composite expects Column objects or mapped "
- "attributes/attribute names as arguments, got: %r"
- % (attr,))
+ "Composite expects Column objects or mapped "
+ "attributes/attribute names as arguments, got: %r"
+ % (attr,))
props.append(prop)
return props
@@ -268,8 +269,8 @@ class CompositeProperty(DescriptorProperty):
if self.deferred:
prop.deferred = self.deferred
prop.strategy_class = prop._strategy_lookup(
- ("deferred", True),
- ("instrument", True))
+ ("deferred", True),
+ ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
@@ -288,11 +289,11 @@ class CompositeProperty(DescriptorProperty):
if k not in dict_:
return
- #assert self.key not in dict_
+ # assert self.key not in dict_
dict_[self.key] = self.composite_class(
- *[state.dict[key] for key in
- self._attribute_keys]
- )
+ *[state.dict[key] for key in
+ self._attribute_keys]
+ )
def expire_handler(state, keys):
if keys is None or set(self._attribute_keys).intersection(keys):
@@ -309,15 +310,15 @@ class CompositeProperty(DescriptorProperty):
state.dict.pop(self.key, None)
event.listen(self.parent, 'after_insert',
- insert_update_handler, raw=True)
+ insert_update_handler, raw=True)
event.listen(self.parent, 'after_update',
- insert_update_handler, raw=True)
+ insert_update_handler, raw=True)
event.listen(self.parent, 'load',
- load_handler, raw=True, propagate=True)
+ load_handler, raw=True, propagate=True)
event.listen(self.parent, 'refresh',
- load_handler, raw=True, propagate=True)
+ load_handler, raw=True, propagate=True)
event.listen(self.parent, 'expire',
- expire_handler, raw=True, propagate=True)
+ expire_handler, raw=True, propagate=True)
# TODO: need a deserialize hook here
@@ -368,14 +369,14 @@ class CompositeProperty(DescriptorProperty):
def __init__(self, property, expr):
self.property = property
super(CompositeProperty.CompositeBundle, self).__init__(
- property.key, *expr)
+ property.key, *expr)
def create_row_processor(self, query, procs, labels):
def proc(row, result):
- return self.property.composite_class(*[proc(row, result) for proc in procs])
+ return self.property.composite_class(
+ *[proc(row, result) for proc in procs])
return proc
-
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
@@ -395,7 +396,6 @@ class CompositeProperty(DescriptorProperty):
"""
-
__hash__ = None
@property
@@ -403,20 +403,22 @@ class CompositeProperty(DescriptorProperty):
return self.__clause_element__()
def __clause_element__(self):
- return expression.ClauseList(group=False, *self._comparable_elements)
+ return expression.ClauseList(
+ group=False, *self._comparable_elements)
def _query_clause_element(self):
- return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
+ return CompositeProperty.CompositeBundle(
+ self.prop, self.__clause_element__())
@util.memoized_property
def _comparable_elements(self):
if self._adapt_to_entity:
return [
- getattr(
- self._adapt_to_entity.entity,
- prop.key
- ) for prop in self.prop._comparable_elements
- ]
+ getattr(
+ self._adapt_to_entity.entity,
+ prop.key
+ ) for prop in self.prop._comparable_elements
+ ]
else:
return self.prop._comparable_elements
@@ -471,9 +473,9 @@ class ConcreteInheritedProperty(DescriptorProperty):
def __init__(self):
def warn():
raise AttributeError("Concrete %s does not implement "
- "attribute %r at the instance level. Add this "
- "property explicitly to %s." %
- (self.parent, self.key, self.parent))
+ "attribute %r at the instance level. Add "
+ "this property explicitly to %s." %
+ (self.parent, self.key, self.parent))
class NoninheritedConcreteProp(object):
def __set__(s, obj, value):
@@ -493,8 +495,8 @@ class ConcreteInheritedProperty(DescriptorProperty):
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
- descriptor=None, comparator_factory=None,
- doc=None):
+ descriptor=None, comparator_factory=None,
+ doc=None):
"""Denote an attribute name as a synonym to a mapped property,
in that the attribute will mirror the value and expression behavior
of another attribute.
@@ -523,11 +525,11 @@ class SynonymProperty(DescriptorProperty):
job_status = synonym("_job_status", map_column=True)
The above class ``MyClass`` will now have the ``job_status``
- :class:`.Column` object mapped to the attribute named ``_job_status``,
- and the attribute named ``job_status`` will refer to the synonym
- itself. This feature is typically used in conjunction with the
- ``descriptor`` argument in order to link a user-defined descriptor
- as a "wrapper" for an existing column.
+ :class:`.Column` object mapped to the attribute named
+ ``_job_status``, and the attribute named ``job_status`` will refer
+ to the synonym itself. This feature is typically used in
+ conjunction with the ``descriptor`` argument in order to link a
+ user-defined descriptor as a "wrapper" for an existing column.
:param comparator_factory: A subclass of :class:`.PropComparator`
that will provide custom comparison behavior at the SQL expression
@@ -580,12 +582,12 @@ class SynonymProperty(DescriptorProperty):
raise sa_exc.ArgumentError(
"Can't compile synonym '%s': no column on table "
"'%s' named '%s'"
- % (self.name, parent.mapped_table.description, self.key))
+ % (self.name, parent.mapped_table.description, self.key))
elif parent.mapped_table.c[self.key] in \
parent._columntoproperty and \
parent._columntoproperty[
- parent.mapped_table.c[self.key]
- ].key == self.name:
+ parent.mapped_table.c[self.key]
+ ].key == self.name:
raise sa_exc.ArgumentError(
"Can't call map_column=True for synonym %r=%r, "
"a ColumnProperty already exists keyed to the name "
@@ -594,9 +596,9 @@ class SynonymProperty(DescriptorProperty):
)
p = properties.ColumnProperty(parent.mapped_table.c[self.key])
parent._configure_property(
- self.name, p,
- init=init,
- setparent=True)
+ self.name, p,
+ init=init,
+ setparent=True)
p._mapped_by_synonym = self.key
self.parent = parent
@@ -646,7 +648,8 @@ class ComparableProperty(DescriptorProperty):
id = Column(Integer, primary_key=True)
word = Column(String)
word_insensitive = comparable_property(lambda prop, mapper:
- CaseInsensitiveComparator(mapper.c.word, mapper)
+ CaseInsensitiveComparator(
+ mapper.c.word, mapper)
)
@@ -675,5 +678,3 @@ class ComparableProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
-
-
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 68a09ff8c..51db1b107 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -17,9 +17,10 @@ from ..sql import operators
from . import (
attributes, object_session, util as orm_util, strategies,
object_mapper, exc as orm_exc, properties
- )
+)
from .query import Query
+
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
@@ -30,7 +31,8 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
"On relationship %s, 'dynamic' loaders cannot be used with "
"many-to-one/one-to-one relationships and/or "
"uselist=False." % self.parent_property)
- strategies._register_attribute(self,
+ strategies._register_attribute(
+ self,
mapper,
useobject=True,
uselist=True,
@@ -41,6 +43,7 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
backref=self.parent_property.back_populates,
)
+
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
@@ -48,10 +51,10 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection = False
def __init__(self, class_, key, typecallable,
- dispatch,
- target_mapper, order_by, query_class=None, **kw):
+ dispatch,
+ target_mapper, order_by, query_class=None, **kw):
super(DynamicAttributeImpl, self).\
- __init__(class_, key, typecallable, dispatch, **kw)
+ __init__(class_, key, typecallable, dispatch, **kw)
self.target_mapper = target_mapper
self.order_by = order_by
if not query_class:
@@ -63,16 +66,16 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
if not passive & attributes.SQL_OK:
- return self._get_collection_history(state,
- attributes.PASSIVE_NO_INITIALIZE).added_items
+ return self._get_collection_history(
+ state, attributes.PASSIVE_NO_INITIALIZE).added_items
else:
return self.query_class(self, state)
def get_collection(self, state, dict_, user_data=None,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
if not passive & attributes.SQL_OK:
return self._get_collection_history(state,
- passive).added_items
+ passive).added_items
else:
history = self._get_collection_history(state, passive)
return history.added_plus_unchanged
@@ -86,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return attributes.Event(self, attributes.OP_REMOVE)
def fire_append_event(self, state, dict_, value, initiator,
- collection_history=None):
+ collection_history=None):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
@@ -99,7 +102,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
self.sethasparent(attributes.instance_state(value), state, True)
def fire_remove_event(self, state, dict_, value, initiator,
- collection_history=None):
+ collection_history=None):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
@@ -117,8 +120,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
state.committed_state[self.key] = CollectionHistory(self, state)
state._modified_event(dict_,
- self,
- attributes.NEVER_SET)
+ self,
+ attributes.NEVER_SET)
# this is a hack to allow the fixtures.ComparableEntity fixture
# to work
@@ -126,8 +129,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return state.committed_state[self.key]
def set(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF,
- check_old=None, pop=False):
+ passive=attributes.PASSIVE_OFF,
+ check_old=None, pop=False):
if initiator and initiator.parent_token is self.parent_token:
return
@@ -145,7 +148,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
old_collection = collection_history.added_items
else:
old_collection = old_collection.union(
- collection_history.added_items)
+ collection_history.added_items)
idset = util.IdentitySet
constants = old_collection.intersection(new_values)
@@ -155,11 +158,11 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
for member in new_values:
if member in additions:
self.fire_append_event(state, dict_, member, None,
- collection_history=collection_history)
+ collection_history=collection_history)
for member in removals:
self.fire_remove_event(state, dict_, member, None,
- collection_history=collection_history)
+ collection_history=collection_history)
def delete(self, *args, **kwargs):
raise NotImplementedError()
@@ -173,14 +176,14 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return c.as_history()
def get_all_pending(self, state, dict_,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
c = self._get_collection_history(
state, passive)
return [
- (attributes.instance_state(x), x)
- for x in
- c.all_items
- ]
+ (attributes.instance_state(x), x)
+ for x in
+ c.all_items
+ ]
def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
if self.key in state.committed_state:
@@ -194,17 +197,17 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
return c
def append(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_append_event(state, dict_, value, initiator)
def remove(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_remove_event(state, dict_, value, initiator)
def pop(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
self.remove(state, dict_, value, initiator, passive=passive)
@@ -219,10 +222,10 @@ class AppenderMixin(object):
mapper = object_mapper(instance)
prop = mapper._props[self.attr.key]
self._criterion = prop.compare(
- operators.eq,
- instance,
- value_is_parent=True,
- alias_secondary=False)
+ operators.eq,
+ instance,
+ value_is_parent=True,
+ alias_secondary=False)
if self.attr.order_by:
self._order_by = self.attr.order_by
@@ -230,7 +233,7 @@ class AppenderMixin(object):
def session(self):
sess = object_session(self.instance)
if sess is not None and self.autoflush and sess.autoflush \
- and self.instance in sess:
+ and self.instance in sess:
sess.flush()
if not orm_util.has_identity(self.instance):
return None
@@ -339,7 +342,7 @@ class CollectionHistory(object):
@property
def all_items(self):
return list(self.added_items.union(
- self.unchanged_items).union(self.deleted_items))
+ self.unchanged_items).union(self.deleted_items))
def as_history(self):
if self._reconcile_collection:
@@ -348,13 +351,13 @@ class CollectionHistory(object):
unchanged = self.unchanged_items.difference(deleted)
else:
added, unchanged, deleted = self.added_items,\
- self.unchanged_items,\
- self.deleted_items
+ self.unchanged_items,\
+ self.deleted_items
return attributes.History(
- list(added),
- list(unchanged),
- list(deleted),
- )
+ list(added),
+ list(unchanged),
+ list(deleted),
+ )
def indexed(self, index):
return list(self.added_items)[index]
@@ -367,4 +370,3 @@ class CollectionHistory(object):
self.added_items.remove(value)
else:
self.deleted_items.add(value)
-
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 3b56ff55a..2026e5d0a 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -14,15 +14,15 @@ class UnevaluatableError(Exception):
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
- 'div',
- 'mod', 'truediv',
+ 'div',
+ 'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
- for op in ('like_op', 'notlike_op', 'ilike_op',
- 'notilike_op', 'between_op', 'in_op',
- 'notin_op', 'endswith_op', 'concat_op'))
+ for op in ('like_op', 'notlike_op', 'ilike_op',
+ 'notilike_op', 'between_op', 'in_op',
+ 'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
@@ -55,7 +55,7 @@ class EvaluatorCompiler(object):
self.target_cls, parentmapper.class_):
raise UnevaluatableError(
"Can't evaluate criteria against alternate class %s" %
- parentmapper.class_
+ parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
@@ -95,7 +95,7 @@ class EvaluatorCompiler(object):
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
- [clause.left, clause.right]))
+ [clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
@@ -112,8 +112,8 @@ class EvaluatorCompiler(object):
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
- (type(clause).__name__, clause.operator))
+ "Cannot evaluate %s with operator %s" %
+ (type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
@@ -126,8 +126,8 @@ class EvaluatorCompiler(object):
return not value
return evaluate
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
- (type(clause).__name__, clause.operator))
+ "Cannot evaluate %s with operator %s" %
+ (type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 0e08a0898..aa99673ba 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -18,6 +18,7 @@ from .session import Session, sessionmaker
from .scoping import scoped_session
from .attributes import QueryableAttribute
+
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
@@ -50,7 +51,6 @@ class InstrumentationEvents(event.Events):
_target_class_doc = "SomeBaseClass"
_dispatch_target = instrumentation.InstrumentationFactory
-
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
@@ -71,10 +71,11 @@ class InstrumentationEvents(event.Events):
return fn(target_cls, *arg)
def remove(ref):
- key = event.registry._EventKey(None, identifier, listen,
- instrumentation._instrumentation_factory)
+ key = event.registry._EventKey(
+ None, identifier, listen,
+ instrumentation._instrumentation_factory)
getattr(instrumentation._instrumentation_factory.dispatch,
- identifier).remove(key)
+ identifier).remove(key)
target = weakref.ref(target.class_, remove)
@@ -107,17 +108,18 @@ class InstrumentationEvents(event.Events):
"""Called when an attribute is instrumented."""
-
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
"""
+
def __init__(self, class_):
self.class_ = class_
dispatch = event.dispatcher(InstrumentationEvents)
+
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
@@ -201,7 +203,8 @@ class InstanceEvents(event.Events):
if propagate:
for mgr in target.subclass_managers(True):
- event_key.with_dispatch_target(mgr).base_listen(propagate=True)
+ event_key.with_dispatch_target(mgr).base_listen(
+ propagate=True)
@classmethod
def _clear(cls):
@@ -330,6 +333,7 @@ class InstanceEvents(event.Events):
"""
+
class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
@@ -337,6 +341,7 @@ class _EventsHold(event.RefCollection):
those objects are created for that class.
"""
+
def __init__(self, class_):
self.class_ = class_
@@ -387,9 +392,9 @@ class _EventsHold(event.RefCollection):
collection = cls.all_holds[subclass]
for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
- # since we can't be sure in what order different classes
- # in a hierarchy are triggered with populate(),
- # we rely upon _EventsHold for all event
+ # since we can't be sure in what order different
+ # classes in a hierarchy are triggered with
+ # populate(), we rely upon _EventsHold for all event
# assignment, instead of using the generic propagate
# flag.
event_key.with_dispatch_target(subject).\
@@ -502,16 +507,17 @@ class MapperEvents(event.Events):
return target
@classmethod
- def _listen(cls, event_key, raw=False, retval=False, propagate=False, **kw):
+ def _listen(
+ cls, event_key, raw=False, retval=False, propagate=False, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
if identifier in ("before_configured", "after_configured") and \
- target is not mapperlib.Mapper:
+ target is not mapperlib.Mapper:
util.warn(
- "'before_configured' and 'after_configured' ORM events "
- "only invoke with the mapper() function or Mapper class "
- "as the target.")
+ "'before_configured' and 'after_configured' ORM events "
+ "only invoke with the mapper() function or Mapper class "
+ "as the target.")
if not raw or not retval:
if not raw:
@@ -536,7 +542,7 @@ class MapperEvents(event.Events):
if propagate:
for mapper in target.self_and_descendants:
event_key.with_dispatch_target(mapper).base_listen(
- propagate=True, **kw)
+ propagate=True, **kw)
else:
event_key.base_listen(**kw)
@@ -722,7 +728,7 @@ class MapperEvents(event.Events):
"""
def append_result(self, mapper, context, row, target,
- result, **flags):
+ result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
@@ -759,7 +765,7 @@ class MapperEvents(event.Events):
"""
def populate_instance(self, mapper, context, row,
- target, **flags):
+ target, **flags):
"""Receive an instance before that instance has
its attributes populated.
@@ -1165,6 +1171,7 @@ class MapperEvents(event.Events):
"""
+
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
@@ -1215,11 +1222,11 @@ class SessionEvents(event.Events):
(
not isinstance(target, type) or
not issubclass(target, Session)
- ):
+ ):
raise exc.ArgumentError(
- "Session event listen on a scoped_session "
- "requires that its creation callable "
- "is associated with the Session class.")
+ "Session event listen on a scoped_session "
+ "requires that its creation callable "
+ "is associated with the Session class.")
if isinstance(target, sessionmaker):
return target.class_
@@ -1284,8 +1291,10 @@ class SessionEvents(event.Events):
The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
- For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
- :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec`
+ For interception of these events, use the
+ :meth:`~.SessionEvents.before_flush`,
+ :meth:`~.SessionEvents.after_flush`, or
+ :meth:`~.SessionEvents.after_flush_postexec`
events.
:param session: The target :class:`.Session`.
@@ -1310,16 +1319,19 @@ class SessionEvents(event.Events):
The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
- For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
- :meth:`~.SessionEvents.after_flush`, or :meth:`~.SessionEvents.after_flush_postexec`
+ For interception of these events, use the
+ :meth:`~.SessionEvents.before_flush`,
+ :meth:`~.SessionEvents.after_flush`, or
+ :meth:`~.SessionEvents.after_flush_postexec`
events.
.. note::
The :class:`.Session` is not in an active transaction
- when the :meth:`~.SessionEvents.after_commit` event is invoked, and therefore
- can not emit SQL. To emit SQL corresponding to every transaction,
- use the :meth:`~.SessionEvents.before_commit` event.
+ when the :meth:`~.SessionEvents.after_commit` event is invoked,
+ and therefore can not emit SQL. To emit SQL corresponding to
+ every transaction, use the :meth:`~.SessionEvents.before_commit`
+ event.
:param session: The target :class:`.Session`.
@@ -1467,8 +1479,8 @@ class SessionEvents(event.Events):
This is called before an add, delete or merge causes
the object to be part of the session.
- .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
- fires off after the item is part of the session.
+ .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach`
+ now fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
@@ -1501,12 +1513,12 @@ class SessionEvents(event.Events):
"""
@event._legacy_signature("0.9",
- ["session", "query", "query_context", "result"],
- lambda update_context: (
- update_context.session,
- update_context.query,
- update_context.context,
- update_context.result))
+ ["session", "query", "query_context", "result"],
+ lambda update_context: (
+ update_context.session,
+ update_context.query,
+ update_context.context,
+ update_context.result))
def after_bulk_update(self, update_context):
"""Execute after a bulk update operation to the session.
@@ -1516,8 +1528,8 @@ class SessionEvents(event.Events):
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
- * ``query`` -the :class:`.Query` object that this update operation was
- called upon.
+ * ``query`` -the :class:`.Query` object that this update operation
+ was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
@@ -1527,12 +1539,12 @@ class SessionEvents(event.Events):
"""
@event._legacy_signature("0.9",
- ["session", "query", "query_context", "result"],
- lambda delete_context: (
- delete_context.session,
- delete_context.query,
- delete_context.context,
- delete_context.result))
+ ["session", "query", "query_context", "result"],
+ lambda delete_context: (
+ delete_context.session,
+ delete_context.query,
+ delete_context.context,
+ delete_context.result))
def after_bulk_delete(self, delete_context):
"""Execute after a bulk delete operation to the session.
@@ -1542,8 +1554,8 @@ class SessionEvents(event.Events):
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
- * ``query`` -the :class:`.Query` object that this update operation was
- called upon.
+ * ``query`` -the :class:`.Query` object that this update operation
+ was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
@@ -1628,8 +1640,8 @@ class AttributeEvents(event.Events):
@classmethod
def _listen(cls, event_key, active_history=False,
- raw=False, retval=False,
- propagate=False):
+ raw=False, retval=False,
+ propagate=False):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
@@ -1654,7 +1666,8 @@ class AttributeEvents(event.Events):
manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
- event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
+ event_key.with_dispatch_target(
+ mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -1672,8 +1685,9 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
@@ -1693,8 +1707,9 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: No return value is defined for this event.
"""
@@ -1721,11 +1736,11 @@ class AttributeEvents(event.Events):
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
- passed as a :class:`.attributes.Event` object, and may be modified
- by backref handlers within a chain of backref-linked events.
+ passed as a :class:`.attributes.Event` object, and may be
+ modified by backref handlers within a chain of backref-linked
+ events.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
-
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index 11e69d221..ff0ece411 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -121,7 +121,7 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
- "row is otherwise not present." % base.state_str(state)
+ "row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
@@ -150,6 +150,7 @@ def _safe_cls_name(cls):
cls_name = repr(cls)
return cls_name
+
@util.dependencies("sqlalchemy.orm.base")
def _default_unmapped(base, cls):
try:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 745b9d569..d9cdd791f 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -9,6 +9,7 @@ import weakref
from . import attributes
from .. import util
+
class IdentityMap(object):
def __init__(self):
self._dict = {}
@@ -237,7 +238,6 @@ class StrongInstanceDict(IdentityMap):
def items(self):
return self._dict.items()
-
def all_states(self):
return [attributes.instance_state(o) for o in self.values()]
@@ -262,8 +262,8 @@ class StrongInstanceDict(IdentityMap):
if state.key in self:
if attributes.instance_state(self._dict[state.key]) is not state:
raise AssertionError('A conflicting state is already '
- 'present in the identity map for key %r'
- % (state.key, ))
+ 'present in the identity map for key %r'
+ % (state.key, ))
else:
self._dict[state.key] = state.obj()
self._manage_incoming_state(state)
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index fd74704df..f58b8807f 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -34,6 +34,7 @@ from . import exc, collections, interfaces, state
from .. import util
from . import base
+
class ClassManager(dict):
"""tracks state information at the class level."""
@@ -54,16 +55,16 @@ class ClassManager(dict):
self.originals = {}
self._bases = [mgr for mgr in [
- manager_of_class(base)
- for base in self.class_.__bases__
- if isinstance(base, type)
- ] if mgr is not None]
+ manager_of_class(base)
+ for base in self.class_.__bases__
+ if isinstance(base, type)
+ ] if mgr is not None]
for base in self._bases:
self.update(base)
self.dispatch._events._new_classmanager_instance(class_, self)
- #events._InstanceEventsHold.populate(class_, self)
+ # events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
@@ -74,10 +75,10 @@ class ClassManager(dict):
if '__del__' in class_.__dict__:
util.warn("__del__() method on class %s will "
- "cause unreachable cycles and memory leaks, "
- "as SQLAlchemy instrumentation often creates "
- "reference cycles. Please remove this method." %
- class_)
+ "cause unreachable cycles and memory leaks, "
+ "as SQLAlchemy instrumentation often creates "
+ "reference cycles. Please remove this method." %
+ class_)
def __hash__(self):
return id(self)
@@ -99,7 +100,8 @@ class ClassManager(dict):
implement :class:`._InspectionAttr`.
This includes :class:`.QueryableAttribute` as well as extension
- types such as :class:`.hybrid_property` and :class:`.AssociationProxy`.
+ types such as :class:`.hybrid_property` and
+ :class:`.AssociationProxy`.
"""
if exclude is None:
@@ -111,7 +113,6 @@ class ClassManager(dict):
if isinstance(val, interfaces._InspectionAttr):
yield key, val
-
def _attr_has_impl(self, key):
"""Return True if the given attribute is fully initialized.
@@ -185,7 +186,6 @@ class ClassManager(dict):
def dict_getter(self):
return _default_dict_getter
-
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
if key in self.local_attrs:
@@ -210,7 +210,7 @@ class ClassManager(dict):
def post_configure_attribute(self, key):
_instrumentation_factory.dispatch.\
- attribute_instrument(self.class_, key, self[key])
+ attribute_instrument(self.class_, key, self[key])
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
@@ -284,19 +284,19 @@ class ClassManager(dict):
def attributes(self):
return iter(self.values())
- ## InstanceState management
+ # InstanceState management
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ self._state_constructor(instance, self)
+ if not state else state)
return instance
def setup_instance(self, instance, state=None):
setattr(instance, self.STATE_ATTR,
- self._state_constructor(instance, self)
- if not state else state)
+ self._state_constructor(instance, self)
+ if not state else state)
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
@@ -320,7 +320,7 @@ class ClassManager(dict):
# to be constructed, so that it is usable
# in a non-ORM context at least.
return self._subclass_manager(instance.__class__).\
- _new_state_if_none(instance)
+ _new_state_if_none(instance)
else:
state = self._state_constructor(instance, self)
setattr(instance, self.STATE_ATTR, state)
@@ -343,6 +343,7 @@ class ClassManager(dict):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
+
class _SerializeManager(object):
"""Provide serialization of a :class:`.ClassManager`.
@@ -350,6 +351,7 @@ class _SerializeManager(object):
and ``__call__()`` on deserialize.
"""
+
def __init__(self, state, d):
self.class_ = state.class_
manager = state.manager
@@ -359,12 +361,12 @@ class _SerializeManager(object):
state.manager = manager = manager_of_class(self.class_)
if manager is None:
raise exc.UnmappedInstanceError(
- inst,
- "Cannot deserialize object of type %r - "
- "no mapper() has "
- "been configured for this class within the current "
- "Python process!" %
- self.class_)
+ inst,
+ "Cannot deserialize object of type %r - "
+ "no mapper() has "
+ "been configured for this class within the current "
+ "Python process!" %
+ self.class_)
elif manager.is_mapped and not manager.mapper.configured:
manager.mapper._configure_all()
@@ -375,6 +377,7 @@ class _SerializeManager(object):
manager.setup_instance(inst, state)
manager.dispatch.unpickle(state, state_dict)
+
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
@@ -426,6 +429,7 @@ instance_dict = _default_dict_getter = base.instance_dict
manager_of_class = _default_manager_getter = base.manager_of_class
+
def register_class(class_):
"""Register class instrumentation.
@@ -454,7 +458,7 @@ def is_instrumented(instance, key):
"""
return manager_of_class(instance.__class__).\
- is_instrumented(key, search=True)
+ is_instrumented(key, search=True)
def _generate_init(class_, class_manager):
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index d5e430506..9bc1c3dd0 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -22,7 +22,8 @@ from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
-from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
+from .base import (ONETOMANY, MANYTOONE, MANYTOMANY,
+ EXT_CONTINUE, EXT_STOP, NOT_EXTENSION)
from .base import _InspectionAttr, _MappedAttribute
from .path_registry import PathRegistry
import collections
@@ -43,8 +44,7 @@ __all__ = (
'PropComparator',
'SessionExtension',
'StrategizedProperty',
- )
-
+)
class MapperProperty(_MappedAttribute, _InspectionAttr):
@@ -82,14 +82,14 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
pass
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
- halt_on=None):
+ halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
@@ -200,7 +200,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
+ dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
@@ -223,6 +223,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
+
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
@@ -434,10 +435,10 @@ class StrategizedProperty(MapperProperty):
# search among: exact match, "attr.*", "default" strategy
# if any.
for path_key in (
- search_path._loader_key,
- search_path._wildcard_path_loader_key,
- search_path._default_path_loader_key
- ):
+ search_path._loader_key,
+ search_path._wildcard_path_loader_key,
+ search_path._default_path_loader_key
+ ):
if path_key in context.attributes:
load = context.attributes[path_key]
break
@@ -449,7 +450,8 @@ class StrategizedProperty(MapperProperty):
return self._strategies[key]
except KeyError:
cls = self._strategy_lookup(*key)
- self._strategies[key] = self._strategies[cls] = strategy = cls(self)
+ self._strategies[key] = self._strategies[
+ cls] = strategy = cls(self)
return strategy
def _get_strategy_by_cls(self, cls):
@@ -470,7 +472,7 @@ class StrategizedProperty(MapperProperty):
else:
strat = self.strategy
return strat.create_row_processor(context, path, loader,
- mapper, row, adapter)
+ mapper, row, adapter)
def do_init(self):
self._strategies = {}
@@ -478,10 +480,9 @@ class StrategizedProperty(MapperProperty):
def post_instrument_class(self, mapper):
if self.is_primary() and \
- not mapper.class_manager._attr_has_impl(self.key):
+ not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
-
_strategies = collections.defaultdict(dict)
@classmethod
@@ -527,8 +528,6 @@ class MapperOption(object):
self.process_query(query)
-
-
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
@@ -552,6 +551,7 @@ class LoaderStrategy(object):
on a particular mapped instance.
"""
+
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
@@ -565,7 +565,7 @@ class LoaderStrategy(object):
pass
def create_row_processor(self, context, path, loadopt, mapper,
- row, adapter):
+ row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 3c152717c..232eb89de 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -31,11 +31,11 @@ def instances(query, cursor, context):
context.runid = _new_runid()
filter_fns = [ent.filter_fn
- for ent in query._entities]
+ for ent in query._entities]
filtered = id in filter_fns
single_entity = len(query._entities) == 1 and \
- query._entities[0].supports_single_entity
+ query._entities[0].supports_single_entity
if filtered:
if single_entity:
@@ -45,14 +45,14 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
custom_rows = single_entity and \
- query._entities[0].custom_rows
+ query._entities[0].custom_rows
(process, labels) = \
- list(zip(*[
- query_entity.row_processor(query,
- context, custom_rows)
- for query_entity in query._entities
- ]))
+ list(zip(*[
+ query_entity.row_processor(query,
+ context, custom_rows)
+ for query_entity in query._entities
+ ]))
while True:
context.progress = {}
@@ -79,9 +79,9 @@ def instances(query, cursor, context):
rows = util.unique_list(rows, filter_fn)
if context.refresh_state and query._only_load_props \
- and context.refresh_state in context.progress:
+ and context.refresh_state in context.progress:
context.refresh_state._commit(
- context.refresh_state.dict, query._only_load_props)
+ context.refresh_state.dict, query._only_load_props)
context.progress.pop(context.refresh_state)
statelib.InstanceState._commit_all_states(
@@ -115,15 +115,15 @@ def merge_result(querylib, query, iterator, load=True):
if single_entity:
if isinstance(query._entities[0], querylib._MapperEntity):
result = [session._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive={})
- for instance in iterator]
+ attributes.instance_state(instance),
+ attributes.instance_dict(instance),
+ load=load, _recursive={})
+ for instance in iterator]
else:
result = list(iterator)
else:
mapped_entities = [i for i, e in enumerate(query._entities)
- if isinstance(e, querylib._MapperEntity)]
+ if isinstance(e, querylib._MapperEntity)]
result = []
keys = [ent._label_name for ent in query._entities]
for row in iterator:
@@ -131,9 +131,9 @@ def merge_result(querylib, query, iterator, load=True):
for i in mapped_entities:
if newrow[i] is not None:
newrow[i] = session._merge(
- attributes.instance_state(newrow[i]),
- attributes.instance_dict(newrow[i]),
- load=load, _recursive={})
+ attributes.instance_state(newrow[i]),
+ attributes.instance_dict(newrow[i]),
+ load=load, _recursive={})
result.append(util.KeyedTuple(newrow, keys))
return iter(result)
@@ -171,8 +171,8 @@ def get_from_identity(session, key, passive):
def load_on_ident(query, key,
- refresh_state=None, lockmode=None,
- only_load_props=None):
+ refresh_state=None, lockmode=None,
+ only_load_props=None):
"""Load the given identity key from the database."""
if key is not None:
@@ -196,10 +196,10 @@ def load_on_ident(query, key,
if None in ident:
nones = set([
_get_params[col].key for col, value in
- zip(mapper.primary_key, ident) if value is None
+ zip(mapper.primary_key, ident) if value is None
])
_get_clause = sql_util.adapt_criterion_to_null(
- _get_clause, nones)
+ _get_clause, nones)
_get_clause = q._adapt_clause(_get_clause, True, False)
q._criterion = _get_clause
@@ -234,11 +234,10 @@ def load_on_ident(query, key,
def instance_processor(mapper, context, path, adapter,
- polymorphic_from=None,
- only_load_props=None,
- refresh_state=None,
- polymorphic_discriminator=None):
-
+ polymorphic_from=None,
+ only_load_props=None,
+ refresh_state=None,
+ polymorphic_discriminator=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
@@ -258,10 +257,10 @@ def instance_processor(mapper, context, path, adapter,
else:
polymorphic_on = mapper.polymorphic_on
polymorphic_instances = util.PopulateDict(
- _configure_subclass_mapper(
- mapper,
- context, path, adapter)
- )
+ _configure_subclass_mapper(
+ mapper,
+ context, path, adapter)
+ )
version_id_col = mapper.version_id_col
@@ -279,8 +278,8 @@ def instance_processor(mapper, context, path, adapter,
eager_populators = []
load_path = context.query._current_path + path \
- if context.query._current_path.path \
- else path
+ if context.query._current_path.path \
+ else path
def populate_state(state, dict_, row, isnew, only_load_props):
if isnew:
@@ -291,10 +290,10 @@ def instance_processor(mapper, context, path, adapter,
if not new_populators:
_populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators
- )
+ new_populators,
+ existing_populators,
+ eager_populators
+ )
if isnew:
populators = new_populators
@@ -313,7 +312,7 @@ def instance_processor(mapper, context, path, adapter,
listeners = mapper.dispatch
- ### legacy events - I'd very much like to yank these totally
+ # legacy events - I'd very much like to yank these totally
translate_row = listeners.translate_row or None
create_instance = listeners.create_instance or None
populate_instance = listeners.populate_instance or None
@@ -335,9 +334,9 @@ def instance_processor(mapper, context, path, adapter,
def _instance(row, result):
if not new_populators and invoke_all_eagers:
_populators(mapper, context, path, row, adapter,
- new_populators,
- existing_populators,
- eager_populators)
+ new_populators,
+ existing_populators,
+ eager_populators)
if translate_row:
for fn in translate_row:
@@ -363,9 +362,9 @@ def instance_processor(mapper, context, path, adapter,
identitykey = mapper._identity_key_from_state(refresh_state)
else:
identitykey = (
- identity_class,
- tuple([row[column] for column in pk_cols])
- )
+ identity_class,
+ tuple([row[column] for column in pk_cols])
+ )
instance = session_identity_map.get(identitykey)
@@ -381,19 +380,19 @@ def instance_processor(mapper, context, path, adapter,
version_id_col is not None and \
context.version_check and \
mapper._get_state_attr_by_column(
- state,
- dict_,
- mapper.version_id_col) != \
- row[version_id_col]:
+ state,
+ dict_,
+ mapper.version_id_col) != \
+ row[version_id_col]:
raise orm_exc.StaleDataError(
- "Instance '%s' has version id '%s' which "
- "does not match database-loaded version id '%s'."
- % (state_str(state),
- mapper._get_state_attr_by_column(
- state, dict_,
- mapper.version_id_col),
- row[version_id_col]))
+ "Instance '%s' has version id '%s' which "
+ "does not match database-loaded version id '%s'."
+ % (state_str(state),
+ mapper._get_state_attr_by_column(
+ state, dict_,
+ mapper.version_id_col),
+ row[version_id_col]))
elif refresh_state:
# out of band refresh_state detected (i.e. its not in the
# session.identity_map) honor it anyway. this can happen
@@ -418,10 +417,10 @@ def instance_processor(mapper, context, path, adapter,
if create_instance:
for fn in create_instance:
instance = fn(mapper, context,
- row, mapper.class_)
+ row, mapper.class_)
if instance is not EXT_CONTINUE:
manager = attributes.manager_of_class(
- instance.__class__)
+ instance.__class__)
# TODO: if manager is None, raise a friendly error
# about returning instances of unmapped types
manager.setup_instance(instance)
@@ -449,8 +448,8 @@ def instance_processor(mapper, context, path, adapter,
if populate_instance:
for fn in populate_instance:
ret = fn(mapper, context, row, state,
- only_load_props=only_load_props,
- instancekey=identitykey, isnew=isnew)
+ only_load_props=only_load_props,
+ instancekey=identitykey, isnew=isnew)
if ret is not EXT_CONTINUE:
break
else:
@@ -461,7 +460,8 @@ def instance_processor(mapper, context, path, adapter,
if loaded_instance and load_evt:
state.manager.dispatch.load(state, context)
elif isnew and refresh_evt:
- state.manager.dispatch.refresh(state, context, only_load_props)
+ state.manager.dispatch.refresh(
+ state, context, only_load_props)
elif state in context.partials or state.unloaded or eager_populators:
# state is having a partial set of its attributes
@@ -478,8 +478,8 @@ def instance_processor(mapper, context, path, adapter,
if populate_instance:
for fn in populate_instance:
ret = fn(mapper, context, row, state,
- only_load_props=attrs,
- instancekey=identitykey, isnew=isnew)
+ only_load_props=attrs,
+ instancekey=identitykey, isnew=isnew)
if ret is not EXT_CONTINUE:
break
else:
@@ -498,8 +498,8 @@ def instance_processor(mapper, context, path, adapter,
if append_result:
for fn in append_result:
if fn(mapper, context, row, state,
- result, instancekey=identitykey,
- isnew=isnew) is not EXT_CONTINUE:
+ result, instancekey=identitykey,
+ isnew=isnew) is not EXT_CONTINUE:
break
else:
result.append(instance)
@@ -511,20 +511,20 @@ def instance_processor(mapper, context, path, adapter,
def _populators(mapper, context, path, row, adapter,
- new_populators, existing_populators, eager_populators):
+ new_populators, existing_populators, eager_populators):
"""Produce a collection of attribute level row processor
callables."""
delayed_populators = []
pops = (new_populators, existing_populators, delayed_populators,
- eager_populators)
+ eager_populators)
for prop in mapper._props.values():
for i, pop in enumerate(prop.create_row_processor(
- context,
- path,
- mapper, row, adapter)):
+ context,
+ path,
+ mapper, row, adapter)):
if pop is not None:
pops[i].append((prop.key, pop))
@@ -541,30 +541,30 @@ def _configure_subclass_mapper(mapper, context, path, adapter):
sub_mapper = mapper.polymorphic_map[discriminator]
except KeyError:
raise AssertionError(
- "No such polymorphic_identity %r is defined" %
- discriminator)
+ "No such polymorphic_identity %r is defined" %
+ discriminator)
if sub_mapper is mapper:
return None
return instance_processor(
- sub_mapper,
- context,
- path,
- adapter,
- polymorphic_from=mapper)
+ sub_mapper,
+ context,
+ path,
+ adapter,
+ polymorphic_from=mapper)
return configure_subclass_mapper
def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
- #assert mapper is _state_mapper(state)
+ # assert mapper is _state_mapper(state)
session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
- "Instance %s is not bound to a Session; "
- "attribute refresh operation cannot proceed" %
- (state_str(state)))
+ "Instance %s is not bound to a Session; "
+ "attribute refresh operation cannot proceed" %
+ (state_str(state)))
has_key = bool(state.key)
@@ -574,11 +574,11 @@ def load_scalar_attributes(mapper, state, attribute_names):
statement = mapper._optimized_get_statement(state, attribute_names)
if statement is not None:
result = load_on_ident(
- session.query(mapper).from_statement(statement),
- None,
- only_load_props=attribute_names,
- refresh_state=state
- )
+ session.query(mapper).from_statement(statement),
+ None,
+ only_load_props=attribute_names,
+ refresh_state=state
+ )
if result is False:
if has_key:
@@ -592,25 +592,25 @@ def load_scalar_attributes(mapper, state, attribute_names):
for col in mapper.primary_key]
if state.expired_attributes.intersection(pk_attrs):
raise sa_exc.InvalidRequestError(
- "Instance %s cannot be refreshed - it's not "
- " persistent and does not "
- "contain a full primary key." % state_str(state))
+ "Instance %s cannot be refreshed - it's not "
+ " persistent and does not "
+ "contain a full primary key." % state_str(state))
identity_key = mapper._identity_key_from_state(state)
- if (_none_set.issubset(identity_key) and \
+ if (_none_set.issubset(identity_key) and
not mapper.allow_partial_pks) or \
_none_set.issuperset(identity_key):
util.warn("Instance %s to be refreshed doesn't "
- "contain a full primary key - can't be refreshed "
- "(and shouldn't be expired, either)."
- % state_str(state))
+ "contain a full primary key - can't be refreshed "
+ "(and shouldn't be expired, either)."
+ % state_str(state))
return
result = load_on_ident(
- session.query(mapper),
- identity_key,
- refresh_state=state,
- only_load_props=attribute_names)
+ session.query(mapper),
+ identity_key,
+ refresh_state=state,
+ only_load_props=attribute_names)
# if instance is pending, a refresh operation
# may not complete (even if PK attributes are assigned)
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 6c1b149bb..7e5166393 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -29,7 +29,7 @@ from . import util as orm_util
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
from .base import _class_to_mapper, _state_mapper, class_mapper, \
- state_str, _INSTRUMENTOR
+ state_str, _INSTRUMENTOR
from .path_registry import PathRegistry
import sys
@@ -214,13 +214,14 @@ class Mapper(_InspectionAttr):
:param confirm_deleted_rows: defaults to True; when a DELETE occurs
of one more rows based on specific primary keys, a warning is
emitted when the number of rows matched does not equal the number
- of rows expected. This parameter may be set to False to handle the case
- where database ON DELETE CASCADE rules may be deleting some of those
- rows automatically. The warning may be changed to an exception
- in a future release.
+ of rows expected. This parameter may be set to False to handle the
+ case where database ON DELETE CASCADE rules may be deleting some of
+ those rows automatically. The warning may be changed to an
+ exception in a future release.
- .. versionadded:: 0.9.4 - added :paramref:`.mapper.confirm_deleted_rows`
- as well as conditional matched row checking on delete.
+ .. versionadded:: 0.9.4 - added
+ :paramref:`.mapper.confirm_deleted_rows` as well as conditional
+ matched row checking on delete.
:param eager_defaults: if True, the ORM will immediately fetch the
value of server-generated default values after an INSERT or UPDATE,
@@ -230,8 +231,8 @@ class Mapper(_InspectionAttr):
this scheme will emit an individual ``SELECT`` statement per row
inserted or updated, which note can add significant performance
overhead. However, if the
- target database supports :term:`RETURNING`, the default values will be
- returned inline with the INSERT or UPDATE statement, which can
+ target database supports :term:`RETURNING`, the default values will
+ be returned inline with the INSERT or UPDATE statement, which can
greatly enhance performance for an application that needs frequent
access to just-generated server defaults.
@@ -269,10 +270,10 @@ class Mapper(_InspectionAttr):
define how the two tables are joined; defaults to a natural join
between the two tables.
- :param inherit_foreign_keys: When ``inherit_condition`` is used and the
- columns present are missing a :class:`.ForeignKey` configuration,
- this parameter can be used to specify which columns are "foreign".
- In most cases can be left as ``None``.
+ :param inherit_foreign_keys: When ``inherit_condition`` is used and
+ the columns present are missing a :class:`.ForeignKey`
+ configuration, this parameter can be used to specify which columns
+ are "foreign". In most cases can be left as ``None``.
:param legacy_is_orphan: Boolean, defaults to ``False``.
When ``True``, specifies that "legacy" orphan consideration
@@ -280,12 +281,12 @@ class Mapper(_InspectionAttr):
that a pending (that is, not persistent) object is auto-expunged
from an owning :class:`.Session` only when it is de-associated
from *all* parents that specify a ``delete-orphan`` cascade towards
- this mapper. The new default behavior is that the object is auto-expunged
- when it is de-associated with *any* of its parents that specify
- ``delete-orphan`` cascade. This behavior is more consistent with
- that of a persistent object, and allows behavior to be consistent
- in more scenarios independently of whether or not an orphanable
- object has been flushed yet or not.
+ this mapper. The new default behavior is that the object is
+ auto-expunged when it is de-associated with *any* of its parents
+ that specify ``delete-orphan`` cascade. This behavior is more
+ consistent with that of a persistent object, and allows behavior to
+ be consistent in more scenarios independently of whether or not an
+ orphanable object has been flushed yet or not.
See the change note and example at :ref:`legacy_is_orphan_addition`
for more detail on this change.
@@ -296,9 +297,9 @@ class Mapper(_InspectionAttr):
is expunged from the :class:`.Session` as soon as it is
de-associated from any of its orphan-enabled parents. Previously,
the pending object would be expunged only if de-associated
- from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
- is added to :func:`.orm.mapper` which re-establishes the
- legacy behavior.
+ from all of its orphan-enabled parents. The new flag
+ ``legacy_is_orphan`` is added to :func:`.orm.mapper` which
+ re-establishes the legacy behavior.
:param non_primary: Specify that this :class:`.Mapper` is in addition
to the "primary" mapper, that is, the one used for persistence.
@@ -447,8 +448,8 @@ class Mapper(_InspectionAttr):
based on all those :class:`.MapperProperty` instances declared
in the declared class body.
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
+ :param primary_key: A list of :class:`.Column` objects which define
+ the primary key to be used against this mapper's selectable unit.
This is normally simply the primary key of the ``local_table``, but
can be overridden here.
@@ -478,13 +479,13 @@ class Mapper(_InspectionAttr):
return next_version
Alternatively, server-side versioning functions such as triggers,
- or programmatic versioning schemes outside of the version id generator
- may be used, by specifying the value ``False``.
+ or programmatic versioning schemes outside of the version id
+ generator may be used, by specifying the value ``False``.
Please see :ref:`server_side_version_counter` for a discussion
of important points when using this option.
- .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
- version number generation.
+ .. versionadded:: 0.9.0 ``version_id_generator`` supports
+ server-side version number generation.
.. seealso::
@@ -505,7 +506,8 @@ class Mapper(_InspectionAttr):
.. seealso::
- :ref:`with_polymorphic` - discussion of polymorphic querying techniques.
+ :ref:`with_polymorphic` - discussion of polymorphic querying
+ techniques.
"""
@@ -547,7 +549,7 @@ class Mapper(_InspectionAttr):
self.eager_defaults = eager_defaults
self.column_prefix = column_prefix
self.polymorphic_on = expression._clause_element_as_expr(
- polymorphic_on)
+ polymorphic_on)
self._dependency_processors = []
self.validators = util.immutabledict()
self.passive_updates = passive_updates
@@ -574,13 +576,13 @@ class Mapper(_InspectionAttr):
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
- )
+ )
if self.with_polymorphic and \
- isinstance(self.with_polymorphic[1],
- expression.SelectBase):
+ isinstance(self.with_polymorphic[1],
+ expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
- self.with_polymorphic[1].alias())
+ self.with_polymorphic[1].alias())
# our 'polymorphic identity', a string name that when located in a
# result set row indicates this Mapper should be used to construct
@@ -892,20 +894,20 @@ class Mapper(_InspectionAttr):
self.inherits = class_mapper(self.inherits, configure=False)
if not issubclass(self.class_, self.inherits.class_):
raise sa_exc.ArgumentError(
- "Class '%s' does not inherit from '%s'" %
- (self.class_.__name__, self.inherits.class_.__name__))
+ "Class '%s' does not inherit from '%s'" %
+ (self.class_.__name__, self.inherits.class_.__name__))
if self.non_primary != self.inherits.non_primary:
np = not self.non_primary and "primary" or "non-primary"
raise sa_exc.ArgumentError(
- "Inheritance of %s mapper for class '%s' is "
- "only allowed from a %s mapper" %
- (np, self.class_.__name__, np))
+ "Inheritance of %s mapper for class '%s' is "
+ "only allowed from a %s mapper" %
+ (np, self.class_.__name__, np))
# inherit_condition is optional.
if self.local_table is None:
self.local_table = self.inherits.local_table
self.mapped_table = self.inherits.mapped_table
self.single = True
- elif not self.local_table is self.inherits.local_table:
+ elif self.local_table is not self.inherits.local_table:
if self.concrete:
self.mapped_table = self.local_table
for mapper in self.iterate_to_root():
@@ -918,17 +920,18 @@ class Mapper(_InspectionAttr):
# full table which could pull in other stuff we don't
# want (allows test/inheritance.InheritTest4 to pass)
self.inherit_condition = sql_util.join_condition(
- self.inherits.local_table,
- self.local_table)
+ self.inherits.local_table,
+ self.local_table)
self.mapped_table = sql.join(
- self.inherits.mapped_table,
- self.local_table,
- self.inherit_condition)
+ self.inherits.mapped_table,
+ self.local_table,
+ self.inherit_condition)
fks = util.to_set(self.inherit_foreign_keys)
- self._inherits_equated_pairs = sql_util.criterion_as_pairs(
- self.mapped_table.onclause,
- consider_as_foreign_keys=fks)
+ self._inherits_equated_pairs = \
+ sql_util.criterion_as_pairs(
+ self.mapped_table.onclause,
+ consider_as_foreign_keys=fks)
else:
self.mapped_table = self.local_table
@@ -941,7 +944,7 @@ class Mapper(_InspectionAttr):
self.version_id_col = self.inherits.version_id_col
self.version_id_generator = self.inherits.version_id_generator
elif self.inherits.version_id_col is not None and \
- self.version_id_col is not self.inherits.version_id_col:
+ self.version_id_col is not self.inherits.version_id_col:
util.warn(
"Inheriting version_id_col '%s' does not match inherited "
"version_id_col '%s' and will not automatically populate "
@@ -949,12 +952,12 @@ class Mapper(_InspectionAttr):
"version_id_col should only be specified on "
"the base-most mapper that includes versioning." %
(self.version_id_col.description,
- self.inherits.version_id_col.description)
+ self.inherits.version_id_col.description)
)
if self.order_by is False and \
- not self.concrete and \
- self.inherits.order_by is not False:
+ not self.concrete and \
+ self.inherits.order_by is not False:
self.order_by = self.inherits.order_by
self.polymorphic_map = self.inherits.polymorphic_map
@@ -977,14 +980,15 @@ class Mapper(_InspectionAttr):
if self.mapped_table is None:
raise sa_exc.ArgumentError(
- "Mapper '%s' does not have a mapped_table specified."
- % self)
+ "Mapper '%s' does not have a mapped_table specified."
+ % self)
def _set_with_polymorphic(self, with_polymorphic):
if with_polymorphic == '*':
self.with_polymorphic = ('*', None)
elif isinstance(with_polymorphic, (tuple, list)):
- if isinstance(with_polymorphic[0], util.string_types + (tuple, list)):
+ if isinstance(
+ with_polymorphic[0], util.string_types + (tuple, list)):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
@@ -999,13 +1003,13 @@ class Mapper(_InspectionAttr):
"an alias() of the construct instead."
"This because several databases don't allow a "
"SELECT from a subquery that does not have an alias."
- )
+ )
if self.with_polymorphic and \
- isinstance(self.with_polymorphic[1],
- expression.SelectBase):
+ isinstance(self.with_polymorphic[1],
+ expression.SelectBase):
self.with_polymorphic = (self.with_polymorphic[0],
- self.with_polymorphic[1].alias())
+ self.with_polymorphic[1].alias())
if self.configured:
self._expire_memoizations()
@@ -1032,7 +1036,7 @@ class Mapper(_InspectionAttr):
for key, prop in mapper._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
- column=None):
+ column=None):
self._adapt_inherited_property(key, prop, False)
def _set_polymorphic_on(self, polymorphic_on):
@@ -1044,7 +1048,7 @@ class Mapper(_InspectionAttr):
if self.inherits:
self.dispatch._update(self.inherits.dispatch)
super_extensions = set(
- chain(*[m._deprecated_extensions
+ chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
@@ -1056,7 +1060,7 @@ class Mapper(_InspectionAttr):
def _configure_listeners(self):
if self.inherits:
super_extensions = set(
- chain(*[m._deprecated_extensions
+ chain(*[m._deprecated_extensions
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
@@ -1098,7 +1102,7 @@ class Mapper(_InspectionAttr):
"create a non primary Mapper. clear_mappers() will "
"remove *all* current mappers from all classes." %
self.class_)
- #else:
+ # else:
# a ClassManager may already exist as
# ClassManager.instrument_attribute() creates
# new managers for each subclass if they don't yet exist.
@@ -1114,7 +1118,7 @@ class Mapper(_InspectionAttr):
manager.mapper = self
manager.deferred_scalar_loader = util.partial(
- loading.load_scalar_attributes, self)
+ loading.load_scalar_attributes, self)
# The remaining members can be added by any mapper,
# e_name None or not.
@@ -1139,7 +1143,6 @@ class Mapper(_InspectionAttr):
manager.info[_INSTRUMENTOR] = self
-
@classmethod
def _configure_all(cls):
"""Class-level path to the :func:`.configure_mappers` call.
@@ -1167,8 +1170,8 @@ class Mapper(_InspectionAttr):
self._cols_by_table = {}
all_cols = util.column_set(chain(*[
- col.proxy_set for col in
- self._columntoproperty]))
+ col.proxy_set for col in
+ self._columntoproperty]))
pk_cols = util.column_set(c for c in all_cols if c.primary_key)
@@ -1180,11 +1183,11 @@ class Mapper(_InspectionAttr):
# ordering is important since it determines the ordering of
# mapper.primary_key (and therefore query.get())
self._pks_by_table[t] = \
- util.ordered_column_set(t.primary_key).\
- intersection(pk_cols)
+ util.ordered_column_set(t.primary_key).\
+ intersection(pk_cols)
self._cols_by_table[t] = \
- util.ordered_column_set(t.c).\
- intersection(all_cols)
+ util.ordered_column_set(t.c).\
+ intersection(all_cols)
# determine cols that aren't expressed within our tables; mark these
# as "read only" properties which are refreshed upon INSERT/UPDATE
@@ -1204,17 +1207,17 @@ class Mapper(_InspectionAttr):
# otherwise, see that we got a full PK for the mapped table
elif self.mapped_table not in self._pks_by_table or \
- len(self._pks_by_table[self.mapped_table]) == 0:
- raise sa_exc.ArgumentError(
- "Mapper %s could not assemble any primary "
- "key columns for mapped table '%s'" %
- (self, self.mapped_table.description))
+ len(self._pks_by_table[self.mapped_table]) == 0:
+ raise sa_exc.ArgumentError(
+ "Mapper %s could not assemble any primary "
+ "key columns for mapped table '%s'" %
+ (self, self.mapped_table.description))
elif self.local_table not in self._pks_by_table and \
- isinstance(self.local_table, schema.Table):
+ isinstance(self.local_table, schema.Table):
util.warn("Could not assemble any primary "
- "keys for locally mapped table '%s' - "
- "no rows will be persisted in this Table."
- % self.local_table.description)
+ "keys for locally mapped table '%s' - "
+ "no rows will be persisted in this Table."
+ % self.local_table.description)
if self.inherits and \
not self.concrete and \
@@ -1228,12 +1231,12 @@ class Mapper(_InspectionAttr):
if self._primary_key_argument:
primary_key = sql_util.reduce_columns(
[self.mapped_table.corresponding_column(c) for c in
- self._primary_key_argument],
+ self._primary_key_argument],
ignore_nonexistent_tables=True)
else:
primary_key = sql_util.reduce_columns(
- self._pks_by_table[self.mapped_table],
- ignore_nonexistent_tables=True)
+ self._pks_by_table[self.mapped_table],
+ ignore_nonexistent_tables=True)
if len(primary_key) == 0:
raise sa_exc.ArgumentError(
@@ -1267,7 +1270,7 @@ class Mapper(_InspectionAttr):
for key, prop in self.inherits._props.items():
if key not in self._props and \
not self._should_exclude(key, key, local=False,
- column=None):
+ column=None):
self._adapt_inherited_property(key, prop, False)
# create properties for each column in the mapped table,
@@ -1279,10 +1282,10 @@ class Mapper(_InspectionAttr):
column_key = (self.column_prefix or '') + column.key
if self._should_exclude(
- column.key, column_key,
- local=self.local_table.c.contains_column(column),
- column=column
- ):
+ column.key, column_key,
+ local=self.local_table.c.contains_column(column),
+ column=column
+ ):
continue
# adjust the "key" used for this column to that
@@ -1292,9 +1295,9 @@ class Mapper(_InspectionAttr):
column_key = mapper._columntoproperty[column].key
self._configure_property(column_key,
- column,
- init=False,
- setparent=True)
+ column,
+ init=False,
+ setparent=True)
def _configure_polymorphic_setter(self, init=False):
"""Configure an attribute on the mapper representing the
@@ -1319,9 +1322,9 @@ class Mapper(_InspectionAttr):
self.polymorphic_on = self._props[self.polymorphic_on]
except KeyError:
raise sa_exc.ArgumentError(
- "Can't determine polymorphic_on "
- "value '%s' - no attribute is "
- "mapped to this name." % self.polymorphic_on)
+ "Can't determine polymorphic_on "
+ "value '%s' - no attribute is "
+ "mapped to this name." % self.polymorphic_on)
if self.polymorphic_on in self._columntoproperty:
# polymorphic_on is a column that is already mapped
@@ -1334,11 +1337,11 @@ class Mapper(_InspectionAttr):
# polymorphic_on is directly a MapperProperty,
# ensure it's a ColumnProperty
if not isinstance(self.polymorphic_on,
- properties.ColumnProperty):
+ properties.ColumnProperty):
raise sa_exc.ArgumentError(
- "Only direct column-mapped "
- "property or SQL expression "
- "can be passed for polymorphic_on")
+ "Only direct column-mapped "
+ "property or SQL expression "
+ "can be passed for polymorphic_on")
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
@@ -1357,7 +1360,7 @@ class Mapper(_InspectionAttr):
# 2. a totally standalone SQL expression which we'd
# hope is compatible with this mapper's mapped_table
col = self.mapped_table.corresponding_column(
- self.polymorphic_on)
+ self.polymorphic_on)
if col is None:
# polymorphic_on doesn't derive from any
# column/expression isn't present in the mapped
@@ -1373,15 +1376,14 @@ class Mapper(_InspectionAttr):
instrument = False
col = self.polymorphic_on
if isinstance(col, schema.Column) and (
- self.with_polymorphic is None or \
- self.with_polymorphic[1].\
- corresponding_column(col) is None
- ):
+ self.with_polymorphic is None or
+ self.with_polymorphic[1].
+ corresponding_column(col) is None):
raise sa_exc.InvalidRequestError(
"Could not map polymorphic_on column "
"'%s' to the mapped table - polymorphic "
"loads will not function properly"
- % col.description)
+ % col.description)
else:
# column/expression that polymorphic_on derives from
# is present in our mapped table
@@ -1396,19 +1398,19 @@ class Mapper(_InspectionAttr):
if key:
if self._should_exclude(col.key, col.key, False, col):
raise sa_exc.InvalidRequestError(
- "Cannot exclude or override the "
- "discriminator column %r" %
- col.key)
+ "Cannot exclude or override the "
+ "discriminator column %r" %
+ col.key)
else:
self.polymorphic_on = col = \
- col.label("_sa_polymorphic_on")
+ col.label("_sa_polymorphic_on")
key = col.key
self._configure_property(
- key,
- properties.ColumnProperty(col,
- _instrument=instrument),
- init=init, setparent=True)
+ key,
+ properties.ColumnProperty(col,
+ _instrument=instrument),
+ init=init, setparent=True)
polymorphic_key = key
else:
# no polymorphic_on was set.
@@ -1424,8 +1426,8 @@ class Mapper(_InspectionAttr):
self.polymorphic_on = mapper.polymorphic_on
else:
self.polymorphic_on = \
- self.mapped_table.corresponding_column(
- mapper.polymorphic_on)
+ self.mapped_table.corresponding_column(
+ mapper.polymorphic_on)
# we can use the parent mapper's _set_polymorphic_identity
# directly; it ensures the polymorphic_identity of the
# instance's mapper is used so is portable to subclasses.
@@ -1441,28 +1443,30 @@ class Mapper(_InspectionAttr):
if setter:
def _set_polymorphic_identity(state):
dict_ = state.dict
- state.get_impl(polymorphic_key).set(state, dict_,
- state.manager.mapper.polymorphic_identity, None)
+ state.get_impl(polymorphic_key).set(
+ state, dict_,
+ state.manager.mapper.polymorphic_identity,
+ None)
def _validate_polymorphic_identity(mapper, state, dict_):
if polymorphic_key in dict_ and \
- dict_[polymorphic_key] not in \
- mapper._acceptable_polymorphic_identities:
+ dict_[polymorphic_key] not in \
+ mapper._acceptable_polymorphic_identities:
util.warn(
- "Flushing object %s with "
- "incompatible polymorphic identity %r; the "
- "object may not refresh and/or load correctly" % (
- state_str(state),
- dict_[polymorphic_key]
- )
- )
+ "Flushing object %s with "
+ "incompatible polymorphic identity %r; the "
+ "object may not refresh and/or load correctly" % (
+ state_str(state),
+ dict_[polymorphic_key]
+ )
+ )
self._set_polymorphic_identity = _set_polymorphic_identity
- self._validate_polymorphic_identity = _validate_polymorphic_identity
+ self._validate_polymorphic_identity = \
+ _validate_polymorphic_identity
else:
self._set_polymorphic_identity = None
-
_validate_polymorphic_identity = None
@_memoized_configured_property
@@ -1490,9 +1494,9 @@ class Mapper(_InspectionAttr):
self._configure_property(key, prop, init=False, setparent=False)
elif key not in self._props:
self._configure_property(
- key,
- properties.ConcreteInheritedProperty(),
- init=init, setparent=True)
+ key,
+ properties.ConcreteInheritedProperty(),
+ init=init, setparent=True)
def _configure_property(self, key, prop, init=True, setparent=True):
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
@@ -1514,7 +1518,7 @@ class Mapper(_InspectionAttr):
for m2 in path:
m2.mapped_table._reset_exported()
col = self.mapped_table.corresponding_column(
- prop.columns[0])
+ prop.columns[0])
break
path.append(m)
@@ -1528,14 +1532,14 @@ class Mapper(_InspectionAttr):
if hasattr(self, '_readonly_props') and \
(not hasattr(col, 'table') or
col.table not in self._cols_by_table):
- self._readonly_props.add(prop)
+ self._readonly_props.add(prop)
else:
# if column is coming in after _cols_by_table was
# initialized, ensure the col is in the right set
if hasattr(self, '_cols_by_table') and \
- col.table in self._cols_by_table and \
- col not in self._cols_by_table[col.table]:
+ col.table in self._cols_by_table and \
+ col not in self._cols_by_table[col.table]:
self._cols_by_table[col.table].add(col)
# if this properties.ColumnProperty represents the "polymorphic
@@ -1543,8 +1547,8 @@ class Mapper(_InspectionAttr):
# columns in SELECT statements.
if not hasattr(prop, '_is_polymorphic_discriminator'):
prop._is_polymorphic_discriminator = \
- (col is self.polymorphic_on or
- prop.columns[0] is self.polymorphic_on)
+ (col is self.polymorphic_on or
+ prop.columns[0] is self.polymorphic_on)
self.columns[key] = col
for col in prop.columns + prop._orig_columns:
@@ -1560,20 +1564,20 @@ class Mapper(_InspectionAttr):
getattr(self._props[key], '_mapped_by_synonym', False):
syn = self._props[key]._mapped_by_synonym
raise sa_exc.ArgumentError(
- "Can't call map_column=True for synonym %r=%r, "
- "a ColumnProperty already exists keyed to the name "
- "%r for column %r" % (syn, key, key, syn)
- )
+ "Can't call map_column=True for synonym %r=%r, "
+ "a ColumnProperty already exists keyed to the name "
+ "%r for column %r" % (syn, key, key, syn)
+ )
if key in self._props and \
not isinstance(prop, properties.ColumnProperty) and \
not isinstance(self._props[key], properties.ColumnProperty):
util.warn("Property %s on %s being replaced with new "
- "property %s; the old property will be discarded" % (
- self._props[key],
- self,
- prop,
- ))
+ "property %s; the old property will be discarded" % (
+ self._props[key],
+ self,
+ prop,
+ ))
self._props[key] = prop
@@ -1600,8 +1604,8 @@ class Mapper(_InspectionAttr):
column = columns[0]
if not expression._is_column(column):
raise sa_exc.ArgumentError(
- "%s=%r is not an instance of MapperProperty or Column"
- % (key, prop))
+ "%s=%r is not an instance of MapperProperty or Column"
+ % (key, prop))
prop = self._props.get(key, None)
@@ -1609,15 +1613,15 @@ class Mapper(_InspectionAttr):
if (
not self._inherits_equated_pairs or
(prop.columns[0], column) not in self._inherits_equated_pairs
- ) and \
+ ) and \
not prop.columns[0].shares_lineage(column) and \
prop.columns[0] is not self.version_id_col and \
column is not self.version_id_col:
warn_only = prop.parent is not self
msg = ("Implicitly combining column %s with column "
- "%s under attribute '%s'. Please configure one "
- "or more attributes for these same-named columns "
- "explicitly." % (prop.columns[-1], column, key))
+ "%s under attribute '%s'. Please configure one "
+ "or more attributes for these same-named columns "
+ "explicitly." % (prop.columns[-1], column, key))
if warn_only:
util.warn(msg)
else:
@@ -1628,10 +1632,10 @@ class Mapper(_InspectionAttr):
prop = prop.copy()
prop.columns.insert(0, column)
self._log("inserting column to existing list "
- "in properties.ColumnProperty %s" % (key))
+ "in properties.ColumnProperty %s" % (key))
return prop
elif prop is None or isinstance(prop,
- properties.ConcreteInheritedProperty):
+ properties.ConcreteInheritedProperty):
mapped_column = []
for c in columns:
mc = self.mapped_table.corresponding_column(c)
@@ -1646,11 +1650,11 @@ class Mapper(_InspectionAttr):
mc = self.mapped_table.corresponding_column(c)
if mc is None:
raise sa_exc.ArgumentError(
- "When configuring property '%s' on %s, "
- "column '%s' is not represented in the mapper's "
- "table. Use the `column_property()` function to "
- "force this column to be mapped as a read-only "
- "attribute." % (key, self, c))
+ "When configuring property '%s' on %s, "
+ "column '%s' is not represented in the mapper's "
+ "table. Use the `column_property()` function to "
+ "force this column to be mapped as a read-only "
+ "attribute." % (key, self, c))
mapped_column.append(mc)
return properties.ColumnProperty(*mapped_column)
else:
@@ -1721,7 +1725,7 @@ class Mapper(_InspectionAttr):
self.local_table.description or
str(self.local_table)) +\
(self.non_primary and
- "|non-primary" or "") + ")"
+ "|non-primary" or "") + ")"
def _log(self, msg, *args):
self.logger.info(
@@ -1752,7 +1756,7 @@ class Mapper(_InspectionAttr):
orphan_possible = True
has_parent = attributes.manager_of_class(cls).has_parent(
- state, key, optimistic=state.has_identity)
+ state, key, optimistic=state.has_identity)
if self.legacy_is_orphan and has_parent:
return False
@@ -1778,7 +1782,7 @@ class Mapper(_InspectionAttr):
return self._props[key]
except KeyError:
raise sa_exc.InvalidRequestError(
- "Mapper '%s' has no property '%s'" % (self, key))
+ "Mapper '%s' has no property '%s'" % (self, key))
def get_property_by_column(self, column):
"""Given a :class:`.Column` object, return the
@@ -1809,8 +1813,8 @@ class Mapper(_InspectionAttr):
m = _class_to_mapper(m)
if not m.isa(self):
raise sa_exc.InvalidRequestError(
- "%r does not inherit from %r" %
- (m, self))
+ "%r does not inherit from %r" %
+ (m, self))
if selectable is None:
mappers.update(m.iterate_to_root())
@@ -1822,7 +1826,7 @@ class Mapper(_InspectionAttr):
if selectable is not None:
tables = set(sql_util.find_tables(selectable,
- include_aliases=True))
+ include_aliases=True))
mappers = [m for m in mappers if m.local_table in tables]
return mappers
@@ -1838,23 +1842,23 @@ class Mapper(_InspectionAttr):
continue
if m.concrete:
raise sa_exc.InvalidRequestError(
- "'with_polymorphic()' requires 'selectable' argument "
- "when concrete-inheriting mappers are used.")
+ "'with_polymorphic()' requires 'selectable' argument "
+ "when concrete-inheriting mappers are used.")
elif not m.single:
if innerjoin:
from_obj = from_obj.join(m.local_table,
- m.inherit_condition)
+ m.inherit_condition)
else:
from_obj = from_obj.outerjoin(m.local_table,
- m.inherit_condition)
+ m.inherit_condition)
return from_obj
@_memoized_configured_property
def _single_table_criterion(self):
if self.single and \
- self.inherits and \
- self.polymorphic_on is not None:
+ self.inherits and \
+ self.polymorphic_on is not None:
return self.polymorphic_on.in_(
m.polymorphic_identity
for m in self.self_and_descendants)
@@ -1879,8 +1883,8 @@ class Mapper(_InspectionAttr):
return selectable
else:
return self._selectable_from_mappers(
- self._mappers_from_spec(spec, selectable),
- False)
+ self._mappers_from_spec(spec, selectable),
+ False)
with_polymorphic_mappers = _with_polymorphic_mappers
"""The list of :class:`.Mapper` objects included in the
@@ -1901,7 +1905,7 @@ class Mapper(_InspectionAttr):
return self._with_polymorphic_selectable
def _with_polymorphic_args(self, spec=None, selectable=False,
- innerjoin=False):
+ innerjoin=False):
if self.with_polymorphic:
if not spec:
spec = self.with_polymorphic[0]
@@ -1914,14 +1918,13 @@ class Mapper(_InspectionAttr):
return mappers, selectable
else:
return mappers, self._selectable_from_mappers(mappers,
- innerjoin)
+ innerjoin)
@_memoized_configured_property
def _polymorphic_properties(self):
return list(self._iterate_polymorphic_properties(
self._with_polymorphic_mappers))
-
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
@@ -1937,14 +1940,14 @@ class Mapper(_InspectionAttr):
# mapper's polymorphic selectable (which we don't want rendered)
for c in util.unique_list(
chain(*[
- list(mapper.iterate_properties) for mapper in
- [self] + mappers
- ])
+ list(mapper.iterate_properties) for mapper in
+ [self] + mappers
+ ])
):
if getattr(c, '_is_polymorphic_discriminator', False) and \
(self.polymorphic_on is None or
- c.columns[0] is not self.polymorphic_on):
- continue
+ c.columns[0] is not self.polymorphic_on):
+ continue
yield c
@util.memoized_property
@@ -1979,15 +1982,16 @@ class Mapper(_InspectionAttr):
"""A namespace of all :class:`._InspectionAttr` attributes associated
with the mapped class.
- These attributes are in all cases Python :term:`descriptors` associated
- with the mapped class or its superclasses.
+ These attributes are in all cases Python :term:`descriptors`
+ associated with the mapped class or its superclasses.
This namespace includes attributes that are mapped to the class
as well as attributes declared by extension modules.
It includes any Python descriptor type that inherits from
- :class:`._InspectionAttr`. This includes :class:`.QueryableAttribute`,
- as well as extension types such as :class:`.hybrid_property`,
- :class:`.hybrid_method` and :class:`.AssociationProxy`.
+ :class:`._InspectionAttr`. This includes
+ :class:`.QueryableAttribute`, as well as extension types such as
+ :class:`.hybrid_property`, :class:`.hybrid_method` and
+ :class:`.AssociationProxy`.
To distinguish between mapped attributes and extension attributes,
the attribute :attr:`._InspectionAttr.extension_type` will refer
@@ -1995,8 +1999,9 @@ class Mapper(_InspectionAttr):
When dealing with a :class:`.QueryableAttribute`, the
:attr:`.QueryableAttribute.property` attribute refers to the
- :class:`.MapperProperty` property, which is what you get when referring
- to the collection of mapped properties via :attr:`.Mapper.attrs`.
+ :class:`.MapperProperty` property, which is what you get when
+ referring to the collection of mapped properties via
+ :attr:`.Mapper.attrs`.
.. versionadded:: 0.8.0
@@ -2006,7 +2011,7 @@ class Mapper(_InspectionAttr):
"""
return util.ImmutableProperties(
- dict(self.class_manager._all_sqla_attributes()))
+ dict(self.class_manager._all_sqla_attributes()))
@_memoized_configured_property
def synonyms(self):
@@ -2078,7 +2083,7 @@ class Mapper(_InspectionAttr):
params = [(primary_key, sql.bindparam(None, type_=primary_key.type))
for primary_key in self.primary_key]
return sql.and_(*[k == v for (k, v) in params]), \
- util.column_dict(params)
+ util.column_dict(params)
@_memoized_configured_property
def _equivalent_columns(self):
@@ -2116,8 +2121,8 @@ class Mapper(_InspectionAttr):
for mapper in self.base_mapper.self_and_descendants:
if mapper.inherit_condition is not None:
visitors.traverse(
- mapper.inherit_condition, {},
- {'binary': visit_binary})
+ mapper.inherit_condition, {},
+ {'binary': visit_binary})
return result
@@ -2142,13 +2147,13 @@ class Mapper(_InspectionAttr):
# either local or from an inherited class
if local:
if self.class_.__dict__.get(assigned_name, None) is not None \
- and self._is_userland_descriptor(
- self.class_.__dict__[assigned_name]):
+ and self._is_userland_descriptor(
+ self.class_.__dict__[assigned_name]):
return True
else:
if getattr(self.class_, assigned_name, None) is not None \
- and self._is_userland_descriptor(
- getattr(self.class_, assigned_name)):
+ and self._is_userland_descriptor(
+ getattr(self.class_, assigned_name)):
return True
if self.include_properties is not None and \
@@ -2158,10 +2163,10 @@ class Mapper(_InspectionAttr):
return True
if self.exclude_properties is not None and \
- (
- name in self.exclude_properties or \
- (column is not None and column in self.exclude_properties)
- ):
+ (
+ name in self.exclude_properties or
+ (column is not None and column in self.exclude_properties)
+ ):
self._log("excluding property %s" % (name))
return True
@@ -2237,11 +2242,11 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
- :param row: A :class:`.RowProxy` instance. The columns which are mapped
- by this :class:`.Mapper` should be locatable in the row, preferably
- via the :class:`.Column` object directly (as is the case when a
- :func:`.select` construct is executed), or via string names of the form
- ``<tablename>_<colname>``.
+ :param row: A :class:`.RowProxy` instance. The columns which are
+ mapped by this :class:`.Mapper` should be locatable in the row,
+ preferably via the :class:`.Column` object directly (as is the case
+ when a :func:`.select` construct is executed), or via string names of
+ the form ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
@@ -2249,7 +2254,7 @@ class Mapper(_InspectionAttr):
pk_cols = [adapter.columns[c] for c in pk_cols]
return self._identity_class, \
- tuple(row[column] for column in pk_cols)
+ tuple(row[column] for column in pk_cols)
def identity_key_from_primary_key(self, primary_key):
"""Return an identity-map key for use in storing/retrieving an
@@ -2274,14 +2279,14 @@ class Mapper(_InspectionAttr):
"""
return self.identity_key_from_primary_key(
- self.primary_key_from_instance(instance))
+ self.primary_key_from_instance(instance))
def _identity_key_from_state(self, state):
dict_ = state.dict
manager = state.manager
return self._identity_class, tuple([
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET)
+ manager[self._columntoproperty[col].key].
+ impl.get(state, dict_, attributes.PASSIVE_RETURN_NEVER_SET)
for col in self.primary_key
])
@@ -2302,14 +2307,15 @@ class Mapper(_InspectionAttr):
dict_ = state.dict
manager = state.manager
return [
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_,
- attributes.PASSIVE_RETURN_NEVER_SET)
+ manager[self._columntoproperty[col].key].
+ impl.get(state, dict_,
+ attributes.PASSIVE_RETURN_NEVER_SET)
for col in self.primary_key
]
- def _get_state_attr_by_column(self, state, dict_, column,
- passive=attributes.PASSIVE_RETURN_NEVER_SET):
+ def _get_state_attr_by_column(
+ self, state, dict_, column,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
@@ -2322,13 +2328,13 @@ class Mapper(_InspectionAttr):
dict_ = attributes.instance_dict(obj)
return self._get_committed_state_attr_by_column(state, dict_, column)
- def _get_committed_state_attr_by_column(self, state, dict_,
- column,
- passive=attributes.PASSIVE_RETURN_NEVER_SET):
+ def _get_committed_state_attr_by_column(
+ self, state, dict_, column,
+ passive=attributes.PASSIVE_RETURN_NEVER_SET):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.\
- get_committed_value(state, dict_, passive=passive)
+ get_committed_value(state, dict_, passive=passive)
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
@@ -2343,10 +2349,10 @@ class Mapper(_InspectionAttr):
props = self._props
tables = set(chain(
- *[sql_util.find_tables(c, check_columns=True)
- for key in attribute_names
- for c in props[key].columns]
- ))
+ *[sql_util.find_tables(c, check_columns=True)
+ for key in attribute_names
+ for c in props[key].columns]
+ ))
if self.base_mapper.local_table in tables:
return None
@@ -2362,22 +2368,22 @@ class Mapper(_InspectionAttr):
if leftcol.table not in tables:
leftval = self._get_committed_state_attr_by_column(
- state, state.dict,
- leftcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state, state.dict,
+ leftcol,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if leftval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.left = sql.bindparam(None, leftval,
type_=binary.right.type)
elif rightcol.table not in tables:
rightval = self._get_committed_state_attr_by_column(
- state, state.dict,
- rightcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state, state.dict,
+ rightcol,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if rightval in orm_util._none_set:
raise ColumnsNotAvailable()
binary.right = sql.bindparam(None, rightval,
- type_=binary.right.type)
+ type_=binary.right.type)
allconds = []
@@ -2386,15 +2392,16 @@ class Mapper(_InspectionAttr):
for mapper in reversed(list(self.iterate_to_root())):
if mapper.local_table in tables:
start = True
- elif not isinstance(mapper.local_table, expression.TableClause):
+ elif not isinstance(mapper.local_table,
+ expression.TableClause):
return None
if start and not mapper.single:
allconds.append(visitors.cloned_traverse(
- mapper.inherit_condition,
- {},
- {'binary': visit_binary}
- )
- )
+ mapper.inherit_condition,
+ {},
+ {'binary': visit_binary}
+ )
+ )
except ColumnsNotAvailable:
return None
@@ -2425,7 +2432,7 @@ class Mapper(_InspectionAttr):
prp, mpp = object(), object()
visitables = deque([(deque(self._props.values()), prp,
- state, state.dict)])
+ state, state.dict)])
while visitables:
iterator, item_type, parent_state, parent_dict = visitables[-1]
@@ -2437,18 +2444,19 @@ class Mapper(_InspectionAttr):
prop = iterator.popleft()
if type_ not in prop.cascade:
continue
- queue = deque(prop.cascade_iterator(type_, parent_state,
- parent_dict, visited_states, halt_on))
+ queue = deque(prop.cascade_iterator(
+ type_, parent_state, parent_dict,
+ visited_states, halt_on))
if queue:
visitables.append((queue, mpp, None, None))
elif item_type is mpp:
instance, instance_mapper, corresponding_state, \
- corresponding_dict = iterator.popleft()
+ corresponding_dict = iterator.popleft()
yield instance, instance_mapper, \
- corresponding_state, corresponding_dict
+ corresponding_state, corresponding_dict
visitables.append((deque(instance_mapper._props.values()),
- prp, corresponding_state,
- corresponding_dict))
+ prp, corresponding_state,
+ corresponding_dict))
@_memoized_configured_property
def _compiled_cache(self):
@@ -2469,7 +2477,7 @@ class Mapper(_InspectionAttr):
extra_dependencies.extend([
(super_table, table)
for super_table in super_.tables
- ])
+ ])
def skip(fk):
# attempt to skip dependencies that are not
@@ -2486,15 +2494,15 @@ class Mapper(_InspectionAttr):
cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
cols = cols.union(sql_util._find_columns(
- parent.inherit_condition))
+ parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
return fk.parent not in cols
return False
sorted_ = sql_util.sort_tables(table_to_mapper,
- skip_fn=skip,
- extra_dependencies=extra_dependencies)
+ skip_fn=skip,
+ extra_dependencies=extra_dependencies)
ret = util.OrderedDict()
for t in sorted_:
@@ -2519,10 +2527,11 @@ class Mapper(_InspectionAttr):
cols = set(table.c)
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
- cols.intersection(
- util.reduce(set.union,
- [l.proxy_set for l, r in m._inherits_equated_pairs])
- ):
+ cols.intersection(
+ util.reduce(set.union,
+ [l.proxy_set for l, r in
+ m._inherits_equated_pairs])
+ ):
result[table].append((m, m._inherits_equated_pairs))
return result
@@ -2561,10 +2570,10 @@ def configure_mappers():
for mapper in list(_mapper_registry):
if getattr(mapper, '_configure_failed', False):
e = sa_exc.InvalidRequestError(
- "One or more mappers failed to initialize - "
- "can't proceed with initialization of other "
- "mappers. Original exception was: %s"
- % mapper._configure_failed)
+ "One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: %s"
+ % mapper._configure_failed)
e._configure_failed = mapper._configure_failed
raise e
if not mapper.configured:
@@ -2572,7 +2581,7 @@ def configure_mappers():
mapper._post_configure_properties()
mapper._expire_memoizations()
mapper.dispatch.mapper_configured(
- mapper, mapper.class_)
+ mapper, mapper.class_)
except:
exc = sys.exc_info()[1]
if not hasattr(exc, '_configure_failed'):
@@ -2649,9 +2658,9 @@ def validates(*names, **kw):
def wrap(fn):
fn.__sa_validators__ = names
fn.__sa_validation_opts__ = {
- "include_removes": include_removes,
- "include_backrefs": include_backrefs
- }
+ "include_removes": include_removes,
+ "include_backrefs": include_backrefs
+ }
return fn
return wrap
@@ -2700,7 +2709,7 @@ def _event_on_resurrect(state):
if instrumenting_mapper:
for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
instrumenting_mapper._set_state_attr_by_column(
- state, state.dict, col, val)
+ state, state.dict, col, val)
class _ColumnMapping(dict):
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
index fb4f4b986..f10a125a8 100644
--- a/lib/sqlalchemy/orm/path_registry.py
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -14,6 +14,7 @@ from .. import exc
from itertools import chain
from .base import class_mapper
+
def _unreduce_path(path):
return PathRegistry.deserialize(path)
@@ -21,6 +22,7 @@ def _unreduce_path(path):
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
+
class PathRegistry(object):
"""Represent query load paths and registry functions.
@@ -81,7 +83,7 @@ class PathRegistry(object):
self.path[i] for i in range(0, len(self.path), 2)
]:
if path_mapper.is_mapper and \
- path_mapper.isa(mapper):
+ path_mapper.isa(mapper):
return True
else:
return False
@@ -105,9 +107,9 @@ class PathRegistry(object):
return None
p = tuple(chain(*[(class_mapper(mcls),
- class_mapper(mcls).attrs[key]
- if key is not None else None)
- for mcls, key in path]))
+ class_mapper(mcls).attrs[key]
+ if key is not None else None)
+ for mcls, key in path]))
if p and p[-1] is None:
p = p[0:-1]
return cls.coerce(p)
@@ -115,8 +117,8 @@ class PathRegistry(object):
@classmethod
def per_mapper(cls, mapper):
return EntityRegistry(
- cls.root, mapper
- )
+ cls.root, mapper
+ )
@classmethod
def coerce(cls, raw):
@@ -132,8 +134,8 @@ class PathRegistry(object):
def __add__(self, other):
return util.reduce(
- lambda prev, next: prev[next],
- other.path, self)
+ lambda prev, next: prev[next],
+ other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path, )
@@ -146,11 +148,13 @@ class RootRegistry(PathRegistry):
"""
path = ()
has_entity = False
+
def __getitem__(self, entity):
return entity._path_registry
PathRegistry.root = RootRegistry()
+
class TokenRegistry(PathRegistry):
def __init__(self, parent, token):
self.token = token
@@ -162,6 +166,7 @@ class TokenRegistry(PathRegistry):
def __getitem__(self, entity):
raise NotImplementedError()
+
class PropRegistry(PathRegistry):
def __init__(self, parent, prop):
# restate this path in terms of the
@@ -171,7 +176,7 @@ class PropRegistry(PathRegistry):
parent = parent.parent[prop.parent]
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
if prop.parent is not insp.mapper and \
- prop.parent in insp.with_polymorphic_mappers:
+ prop.parent in insp.with_polymorphic_mappers:
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
@@ -196,16 +201,18 @@ class PropRegistry(PathRegistry):
"""
return ("loader",
self.parent.token(
- "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
- ).path
+ "%s:%s" % (
+ self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
+ ).path
)
@util.memoized_property
def _default_path_loader_key(self):
return ("loader",
self.parent.token(
- "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
- ).path
+ "%s:%s" % (self.prop.strategy_wildcard_key,
+ _DEFAULT_TOKEN)
+ ).path
)
@util.memoized_property
@@ -228,6 +235,7 @@ class PropRegistry(PathRegistry):
self, entity
)
+
class EntityRegistry(PathRegistry, dict):
is_aliased_class = False
has_entity = True
@@ -257,6 +265,3 @@ class EntityRegistry(PathRegistry, dict):
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item
-
-
-
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 6669efc56..295d4a3d0 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -41,18 +41,18 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
return
states_to_insert, states_to_update = _organize_states_for_save(
- base_mapper,
- states,
- uowtransaction)
+ base_mapper,
+ states,
+ uowtransaction)
cached_connections = _cached_connection_dict(base_mapper)
for table, mapper in base_mapper._sorted_tables.items():
insert = _collect_insert_commands(base_mapper, uowtransaction,
- table, states_to_insert)
+ table, states_to_insert)
update = _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update)
+ table, states_to_update)
if update:
_emit_update_statements(base_mapper, uowtransaction,
@@ -65,7 +65,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
mapper, table, insert)
_finalize_insert_update_commands(base_mapper, uowtransaction,
- states_to_insert, states_to_update)
+ states_to_insert, states_to_update)
def post_update(base_mapper, states, uowtransaction, post_update_cols):
@@ -76,18 +76,18 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
cached_connections = _cached_connection_dict(base_mapper)
states_to_update = _organize_states_for_post_update(
- base_mapper,
- states, uowtransaction)
+ base_mapper,
+ states, uowtransaction)
for table, mapper in base_mapper._sorted_tables.items():
update = _collect_post_update_commands(base_mapper, uowtransaction,
- table, states_to_update,
- post_update_cols)
+ table, states_to_update,
+ post_update_cols)
if update:
_emit_post_update_statements(base_mapper, uowtransaction,
- cached_connections,
- mapper, table, update)
+ cached_connections,
+ mapper, table, update)
def delete_obj(base_mapper, states, uowtransaction):
@@ -101,23 +101,23 @@ def delete_obj(base_mapper, states, uowtransaction):
cached_connections = _cached_connection_dict(base_mapper)
states_to_delete = _organize_states_for_delete(
- base_mapper,
- states,
- uowtransaction)
+ base_mapper,
+ states,
+ uowtransaction)
table_to_mapper = base_mapper._sorted_tables
for table in reversed(list(table_to_mapper.keys())):
delete = _collect_delete_commands(base_mapper, uowtransaction,
- table, states_to_delete)
+ table, states_to_delete)
mapper = table_to_mapper[table]
_emit_delete_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, delete)
+ cached_connections, mapper, table, delete)
for state, state_dict, mapper, has_identity, connection \
- in states_to_delete:
+ in states_to_delete:
mapper.dispatch.after_delete(mapper, connection, state)
@@ -137,8 +137,8 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
states_to_update = []
for state, dict_, mapper, connection in _connections_for_states(
- base_mapper, uowtransaction,
- states):
+ base_mapper, uowtransaction,
+ states):
has_identity = bool(state.key)
instance_key = state.key or mapper._identity_key_from_state(state)
@@ -183,19 +183,19 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
if not has_identity and not row_switch:
states_to_insert.append(
(state, dict_, mapper, connection,
- has_identity, instance_key, row_switch)
+ has_identity, instance_key, row_switch)
)
else:
states_to_update.append(
(state, dict_, mapper, connection,
- has_identity, instance_key, row_switch)
+ has_identity, instance_key, row_switch)
)
return states_to_insert, states_to_update
def _organize_states_for_post_update(base_mapper, states,
- uowtransaction):
+ uowtransaction):
"""Make an initial pass across a set of states for UPDATE
corresponding to post_update.
@@ -205,7 +205,7 @@ def _organize_states_for_post_update(base_mapper, states,
"""
return list(_connections_for_states(base_mapper, uowtransaction,
- states))
+ states))
def _organize_states_for_delete(base_mapper, states, uowtransaction):
@@ -219,25 +219,25 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction):
states_to_delete = []
for state, dict_, mapper, connection in _connections_for_states(
- base_mapper, uowtransaction,
- states):
+ base_mapper, uowtransaction,
+ states):
mapper.dispatch.before_delete(mapper, connection, state)
states_to_delete.append((state, dict_, mapper,
- bool(state.key), connection))
+ bool(state.key), connection))
return states_to_delete
def _collect_insert_commands(base_mapper, uowtransaction, table,
- states_to_insert):
+ states_to_insert):
"""Identify sets of values to use in INSERT statements for a
list of states.
"""
insert = []
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_insert:
+ instance_key, row_switch in states_to_insert:
if table not in mapper._pks_by_table:
continue
@@ -250,7 +250,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
has_all_defaults = True
for col in mapper._cols_by_table[table]:
if col is mapper.version_id_col and \
- mapper.version_id_generator is not False:
+ mapper.version_id_generator is not False:
val = mapper.version_id_generator(None)
params[col.key] = val
else:
@@ -263,10 +263,10 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
if col in pks:
has_all_pks = False
elif col.default is None and \
- col.server_default is None:
+ col.server_default is None:
params[col.key] = value
elif col.server_default is not None and \
- mapper.base_mapper.eager_defaults:
+ mapper.base_mapper.eager_defaults:
has_all_defaults = False
elif isinstance(value, sql.ClauseElement):
@@ -275,13 +275,13 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[col.key] = value
insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks,
- has_all_defaults))
+ connection, value_params, has_all_pks,
+ has_all_defaults))
return insert
def _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update):
+ table, states_to_update):
"""Identify sets of values to use in UPDATE statements for a
list of states.
@@ -295,7 +295,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
update = []
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_update:
+ instance_key, row_switch in states_to_update:
if table not in mapper._pks_by_table:
continue
@@ -309,10 +309,10 @@ def _collect_update_commands(base_mapper, uowtransaction,
if col is mapper.version_id_col:
params[col._label] = \
mapper._get_committed_state_attr_by_column(
- row_switch or state,
- row_switch and row_switch.dict
- or state_dict,
- col)
+ row_switch or state,
+ row_switch and row_switch.dict
+ or state_dict,
+ col)
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
@@ -331,19 +331,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
# in a different table than the one
# where the version_id_col is.
for prop in mapper._columntoproperty.values():
- history = state.manager[prop.key].impl.get_history(
+ history = (
+ state.manager[prop.key].impl.get_history(
state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ attributes.PASSIVE_NO_INITIALIZE))
if history.added:
hasdata = True
else:
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ state, state_dict,
+ attributes.PASSIVE_NO_INITIALIZE)
if history.added:
if isinstance(history.added[0],
- sql.ClauseElement):
+ sql.ClauseElement):
value_params[col] = history.added[0]
else:
value = history.added[0]
@@ -351,13 +352,13 @@ def _collect_update_commands(base_mapper, uowtransaction,
if col in pks:
if history.deleted and \
- not row_switch:
+ not row_switch:
# if passive_updates and sync detected
# this was a pk->pk sync, use the new
# value to locate the row, since the
# DB would already have set this
if ("pk_cascaded", state, col) in \
- uowtransaction.attributes:
+ uowtransaction.attributes:
value = history.added[0]
params[col._label] = value
else:
@@ -381,7 +382,7 @@ def _collect_update_commands(base_mapper, uowtransaction,
hasdata = True
elif col in pks:
value = state.manager[prop.key].impl.get(
- state, state_dict)
+ state, state_dict)
if value is None:
hasnull = True
params[col._label] = value
@@ -389,16 +390,16 @@ def _collect_update_commands(base_mapper, uowtransaction,
if hasdata:
if hasnull:
raise orm_exc.FlushError(
- "Can't update table "
- "using NULL for primary "
- "key value")
+ "Can't update table "
+ "using NULL for primary "
+ "key value")
update.append((state, state_dict, params, mapper,
- connection, value_params))
+ connection, value_params))
return update
def _collect_post_update_commands(base_mapper, uowtransaction, table,
- states_to_update, post_update_cols):
+ states_to_update, post_update_cols):
"""Identify sets of values to use in UPDATE statements for a
list of states within a post_update operation.
@@ -415,34 +416,34 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
for col in mapper._cols_by_table[table]:
if col in pks:
params[col._label] = \
- mapper._get_state_attr_by_column(
- state,
- state_dict, col)
+ mapper._get_state_attr_by_column(
+ state,
+ state_dict, col)
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE)
+ state, state_dict,
+ attributes.PASSIVE_NO_INITIALIZE)
if history.added:
value = history.added[0]
params[col.key] = value
hasdata = True
if hasdata:
update.append((state, state_dict, params, mapper,
- connection))
+ connection))
return update
def _collect_delete_commands(base_mapper, uowtransaction, table,
- states_to_delete):
+ states_to_delete):
"""Identify values to use in DELETE statements for a list of
states to be deleted."""
delete = util.defaultdict(list)
for state, state_dict, mapper, has_identity, connection \
- in states_to_delete:
+ in states_to_delete:
if not has_identity or table not in mapper._pks_by_table:
continue
@@ -450,43 +451,44 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
delete[connection].append(params)
for col in mapper._pks_by_table[table]:
params[col.key] = \
- value = \
- mapper._get_committed_state_attr_by_column(
- state, state_dict, col)
+ value = \
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict, col)
if value is None:
raise orm_exc.FlushError(
- "Can't delete from table "
- "using NULL for primary "
- "key value")
+ "Can't delete from table "
+ "using NULL for primary "
+ "key value")
if mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col):
+ table.c.contains_column(mapper.version_id_col):
params[mapper.version_id_col.key] = \
- mapper._get_committed_state_attr_by_column(
- state, state_dict,
- mapper.version_id_col)
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict,
+ mapper.version_id_col)
return delete
def _emit_update_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, update):
+ cached_connections, mapper, table, update):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_update_commands()."""
needs_version_id = mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col)
+ table.c.contains_column(mapper.version_id_col)
def update_stmt():
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
+ type_=col.type))
if needs_version_id:
- clause.clauses.append(mapper.version_id_col ==\
- sql.bindparam(mapper.version_id_col._label,
- type_=mapper.version_id_col.type))
+ clause.clauses.append(
+ mapper.version_id_col == sql.bindparam(
+ mapper.version_id_col._label,
+ type_=mapper.version_id_col.type))
stmt = table.update(clause)
if mapper.base_mapper.eager_defaults:
@@ -500,43 +502,43 @@ def _emit_update_statements(base_mapper, uowtransaction,
rows = 0
for state, state_dict, params, mapper, \
- connection, value_params in update:
+ connection, value_params in update:
if value_params:
c = connection.execute(
- statement.values(value_params),
- params)
+ statement.values(value_params),
+ params)
else:
c = cached_connections[connection].\
- execute(statement, params)
+ execute(statement, params)
_postfetch(
- mapper,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- c.context.compiled_parameters[0],
- value_params)
+ mapper,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ c.context.compiled_parameters[0],
+ value_params)
rows += c.rowcount
if connection.dialect.supports_sane_rowcount:
if rows != len(update):
raise orm_exc.StaleDataError(
- "UPDATE statement on table '%s' expected to "
- "update %d row(s); %d were matched." %
- (table.description, len(update), rows))
+ "UPDATE statement on table '%s' expected to "
+ "update %d row(s); %d were matched." %
+ (table.description, len(update), rows))
elif needs_version_id:
util.warn("Dialect %s does not support updated rowcount "
- "- versioning cannot be verified." %
- c.dialect.dialect_description,
- stacklevel=12)
+ "- versioning cannot be verified." %
+ c.dialect.dialect_description,
+ stacklevel=12)
def _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, insert):
+ cached_connections, mapper, table, insert):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
@@ -544,37 +546,37 @@ def _emit_insert_statements(base_mapper, uowtransaction,
for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(insert,
- lambda rec: (rec[4],
- list(rec[2].keys()),
- bool(rec[5]),
- rec[6], rec[7])
- ):
+ lambda rec: (rec[4],
+ list(rec[2].keys()),
+ bool(rec[5]),
+ rec[6], rec[7])
+ ):
if \
- (
- has_all_defaults
- or not base_mapper.eager_defaults
- or not connection.dialect.implicit_returning
- ) and has_all_pks and not hasvalue:
+ (
+ has_all_defaults
+ or not base_mapper.eager_defaults
+ or not connection.dialect.implicit_returning
+ ) and has_all_pks and not hasvalue:
records = list(records)
multiparams = [rec[2] for rec in records]
c = cached_connections[connection].\
- execute(statement, multiparams)
+ execute(statement, multiparams)
for (state, state_dict, params, mapper_rec,
conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
_postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- c,
- last_inserted_params,
- value_params)
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ last_inserted_params,
+ value_params)
else:
if not has_all_defaults and base_mapper.eager_defaults:
@@ -583,45 +585,45 @@ def _emit_insert_statements(base_mapper, uowtransaction,
statement = statement.return_defaults(mapper.version_id_col)
for state, state_dict, params, mapper_rec, \
- connection, value_params, \
- has_all_pks, has_all_defaults in records:
+ connection, value_params, \
+ has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
- statement.values(value_params),
- params)
+ statement.values(value_params),
+ params)
else:
result = cached_connections[connection].\
- execute(statement, params)
+ execute(statement, params)
primary_key = result.context.inserted_primary_key
if primary_key is not None:
# set primary key attributes
for pk, col in zip(primary_key,
- mapper._pks_by_table[table]):
+ mapper._pks_by_table[table]):
prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
# TODO: would rather say:
- #state_dict[prop.key] = pk
+ # state_dict[prop.key] = pk
mapper_rec._set_state_attr_by_column(
- state,
- state_dict,
- col, pk)
+ state,
+ state_dict,
+ col, pk)
_postfetch(
- mapper_rec,
- uowtransaction,
- table,
- state,
- state_dict,
- result,
- result.context.compiled_parameters[0],
- value_params)
+ mapper_rec,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ result,
+ result.context.compiled_parameters[0],
+ value_params)
def _emit_post_update_statements(base_mapper, uowtransaction,
- cached_connections, mapper, table, update):
+ cached_connections, mapper, table, update):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_post_update_commands()."""
@@ -630,7 +632,7 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
+ type_=col.type))
return table.update(clause)
@@ -645,13 +647,13 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
):
connection = key[0]
multiparams = [params for state, state_dict,
- params, mapper, conn in grouper]
+ params, mapper, conn in grouper]
cached_connections[connection].\
- execute(statement, multiparams)
+ execute(statement, multiparams)
def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
- mapper, table, delete):
+ mapper, table, delete):
"""Emit DELETE statements corresponding to value lists collected
by _collect_delete_commands()."""
@@ -662,14 +664,14 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(
- col == sql.bindparam(col.key, type_=col.type))
+ col == sql.bindparam(col.key, type_=col.type))
if need_version_id:
clause.clauses.append(
mapper.version_id_col ==
sql.bindparam(
- mapper.version_id_col.key,
- type_=mapper.version_id_col.type
+ mapper.version_id_col.key,
+ type_=mapper.version_id_col.type
)
)
@@ -710,7 +712,7 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
connection.execute(statement, del_objects)
if base_mapper.confirm_deleted_rows and \
- rows_matched > -1 and expected != rows_matched:
+ rows_matched > -1 and expected != rows_matched:
if only_warn:
util.warn(
"DELETE statement on table '%s' expected to "
@@ -728,15 +730,16 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
(table.description, expected, rows_matched)
)
+
def _finalize_insert_update_commands(base_mapper, uowtransaction,
- states_to_insert, states_to_update):
+ states_to_insert, states_to_update):
"""finalize state on states that have been inserted or updated,
including calling after_insert/after_update events.
"""
for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in states_to_insert + \
- states_to_update:
+ instance_key, row_switch in states_to_insert + \
+ states_to_update:
if mapper._readonly_props:
readonly = state.unmodified_intersection(
@@ -754,7 +757,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
if base_mapper.eager_defaults:
toload_now.extend(state._unloaded_non_object)
elif mapper.version_id_col is not None and \
- mapper.version_id_generator is False:
+ mapper.version_id_generator is False:
prop = mapper._columntoproperty[mapper.version_id_col]
if prop.key in state.unloaded:
toload_now.extend([prop.key])
@@ -774,7 +777,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
def _postfetch(mapper, uowtransaction, table,
- state, dict_, result, params, value_params):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
@@ -800,19 +803,19 @@ def _postfetch(mapper, uowtransaction, table,
if postfetch_cols:
state._expire_attributes(state.dict,
- [mapper._columntoproperty[c].key
- for c in postfetch_cols if c in
- mapper._columntoproperty]
- )
+ [mapper._columntoproperty[c].key
+ for c in postfetch_cols if c in
+ mapper._columntoproperty]
+ )
# synchronize newly inserted ids from one table to the next
# TODO: this still goes a little too often. would be nice to
# have definitive list of "columns that changed" here
for m, equated_pairs in mapper._table_to_equated[table]:
sync.populate(state, m, state, m,
- equated_pairs,
- uowtransaction,
- mapper.passive_updates)
+ equated_pairs,
+ uowtransaction,
+ mapper.passive_updates)
def _connections_for_states(base_mapper, uowtransaction, states):
@@ -828,7 +831,7 @@ def _connections_for_states(base_mapper, uowtransaction, states):
# to use for update
if uowtransaction.session.connection_callable:
connection_callable = \
- uowtransaction.session.connection_callable
+ uowtransaction.session.connection_callable
else:
connection = None
connection_callable = None
@@ -838,7 +841,7 @@ def _connections_for_states(base_mapper, uowtransaction, states):
connection = connection_callable(base_mapper, state.obj())
elif not connection:
connection = uowtransaction.transaction.connection(
- base_mapper)
+ base_mapper)
mapper = _state_mapper(state)
@@ -849,8 +852,8 @@ def _cached_connection_dict(base_mapper):
# dictionary of connection->connection_with_cache_options.
return util.PopulateDict(
lambda conn: conn.execution_options(
- compiled_cache=base_mapper._compiled_cache
- ))
+ compiled_cache=base_mapper._compiled_cache
+ ))
def _sort_states(states):
@@ -858,7 +861,7 @@ def _sort_states(states):
persistent = set(s for s in pending if s.key is not None)
pending.difference_update(persistent)
return sorted(pending, key=operator.attrgetter("insert_order")) + \
- sorted(persistent, key=lambda q: q.key[1])
+ sorted(persistent, key=lambda q: q.key[1])
class BulkUD(object):
@@ -877,9 +880,9 @@ class BulkUD(object):
klass = lookup[synchronize_session]
except KeyError:
raise sa_exc.ArgumentError(
- "Valid strategies for session synchronization "
- "are %s" % (", ".join(sorted(repr(x)
- for x in lookup))))
+ "Valid strategies for session synchronization "
+ "are %s" % (", ".join(sorted(repr(x)
+ for x in lookup))))
else:
return klass(*arg)
@@ -894,12 +897,12 @@ class BulkUD(object):
query = self.query
self.context = context = query._compile_context()
if len(context.statement.froms) != 1 or \
- not isinstance(context.statement.froms[0], schema.Table):
+ not isinstance(context.statement.froms[0], schema.Table):
self.primary_table = query._only_entity_zero(
- "This operation requires only one Table or "
- "entity be specified as the target."
- ).mapper.local_table
+ "This operation requires only one Table or "
+ "entity be specified as the target."
+ ).mapper.local_table
else:
self.primary_table = context.statement.froms[0]
@@ -929,7 +932,7 @@ class BulkEvaluate(BulkUD):
evaluator_compiler = evaluator.EvaluatorCompiler(target_cls)
if query.whereclause is not None:
eval_condition = evaluator_compiler.process(
- query.whereclause)
+ query.whereclause)
else:
def eval_condition(obj):
return True
@@ -938,16 +941,16 @@ class BulkEvaluate(BulkUD):
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
- "Could not evaluate current criteria in Python. "
- "Specify 'fetch' or False for the "
- "synchronize_session parameter.")
+ "Could not evaluate current criteria in Python. "
+ "Specify 'fetch' or False for the "
+ "synchronize_session parameter.")
- #TODO: detect when the where clause is a trivial primary key match
+ # TODO: detect when the where clause is a trivial primary key match
self.matched_objects = [
- obj for (cls, pk), obj in
- query.session.identity_map.items()
- if issubclass(cls, target_cls) and
- eval_condition(obj)]
+ obj for (cls, pk), obj in
+ query.session.identity_map.items()
+ if issubclass(cls, target_cls) and
+ eval_condition(obj)]
class BulkFetch(BulkUD):
@@ -957,10 +960,10 @@ class BulkFetch(BulkUD):
query = self.query
session = query.session
select_stmt = self.context.statement.with_only_columns(
- self.primary_table.primary_key)
+ self.primary_table.primary_key)
self.matched_rows = session.execute(
- select_stmt,
- params=query._params).fetchall()
+ select_stmt,
+ params=query._params).fetchall()
class BulkUpdate(BulkUD):
@@ -981,10 +984,10 @@ class BulkUpdate(BulkUD):
def _do_exec(self):
update_stmt = sql.update(self.primary_table,
- self.context.whereclause, self.values)
+ self.context.whereclause, self.values)
self.result = self.query.session.execute(
- update_stmt, params=self.query._params)
+ update_stmt, params=self.query._params)
self.rowcount = self.result.rowcount
def _do_post(self):
@@ -1009,10 +1012,10 @@ class BulkDelete(BulkUD):
def _do_exec(self):
delete_stmt = sql.delete(self.primary_table,
- self.context.whereclause)
+ self.context.whereclause)
self.result = self.query.session.execute(delete_stmt,
- params=self.query._params)
+ params=self.query._params)
self.rowcount = self.result.rowcount
def _do_post(self):
@@ -1029,7 +1032,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
for key, value in self.values.items():
key = _attr_as_key(key)
self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
+ expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
@@ -1037,11 +1040,11 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
evaluated_keys = list(self.value_evaluators.keys())
for obj in self.matched_objects:
state, dict_ = attributes.instance_state(obj),\
- attributes.instance_dict(obj)
+ attributes.instance_dict(obj)
# only evaluate unmodified attributes
to_evaluate = state.unmodified.intersection(
- evaluated_keys)
+ evaluated_keys)
for key in to_evaluate:
dict_[key] = self.value_evaluators[key](obj)
@@ -1050,8 +1053,8 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
# expire attributes with pending changes
# (there was no autoflush, so they are overwritten)
state._expire_attributes(dict_,
- set(evaluated_keys).
- difference(to_evaluate))
+ set(evaluated_keys).
+ difference(to_evaluate))
states.add(state)
session._register_altered(states)
@@ -1062,8 +1065,8 @@ class BulkDeleteEvaluate(BulkEvaluate, BulkDelete):
def _do_post_synchronize(self):
self.query.session._remove_newly_deleted(
- [attributes.instance_state(obj)
- for obj in self.matched_objects])
+ [attributes.instance_state(obj)
+ for obj in self.matched_objects])
class BulkUpdateFetch(BulkFetch, BulkUpdate):
@@ -1078,7 +1081,7 @@ class BulkUpdateFetch(BulkFetch, BulkUpdate):
attributes.instance_state(session.identity_map[identity_key])
for identity_key in [
target_mapper.identity_key_from_primary_key(
- list(primary_key))
+ list(primary_key))
for primary_key in self.matched_rows
]
if identity_key in session.identity_map
@@ -1100,7 +1103,7 @@ class BulkDeleteFetch(BulkFetch, BulkDelete):
# TODO: inline this and call remove_newly_deleted
# once
identity_key = target_mapper.identity_key_from_primary_key(
- list(primary_key))
+ list(primary_key))
if identity_key in session.identity_map:
session._remove_newly_deleted(
[attributes.instance_state(
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index fabacba05..62ea93fb3 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -39,12 +39,12 @@ class ColumnProperty(StrategizedProperty):
Column-based properties can normally be applied to the mapper's
``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
+ Use this function when the given column is not directly present within
+ the mapper's selectable; examples include SQL expressions, functions,
+ and scalar SELECT queries.
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
+ Columns that aren't present in the mapper's selectable won't be
+ persisted by the mapper and are effectively "read-only" attributes.
:param \*cols:
list of Column objects to be mapped.
@@ -63,8 +63,8 @@ class ColumnProperty(StrategizedProperty):
.. versionadded:: 0.6.6
:param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
+ :class:`.ColumnProperty.Comparator` which provides custom SQL
+ clause generation for comparison operations.
:param group:
a group name for this property when marked as deferred.
@@ -111,12 +111,12 @@ class ColumnProperty(StrategizedProperty):
"""
self._orig_columns = [expression._labeled(c) for c in columns]
self.columns = [expression._labeled(_orm_full_deannotate(c))
- for c in columns]
+ for c in columns]
self.group = kwargs.pop('group', None)
self.deferred = kwargs.pop('deferred', False)
self.instrument = kwargs.pop('_instrument', True)
self.comparator_factory = kwargs.pop('comparator_factory',
- self.__class__.Comparator)
+ self.__class__.Comparator)
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
self.active_history = kwargs.pop('active_history', False)
@@ -145,9 +145,9 @@ class ColumnProperty(StrategizedProperty):
util.set_creation_order(self)
self.strategy_class = self._strategy_lookup(
- ("deferred", self.deferred),
- ("instrument", self.instrument)
- )
+ ("deferred", self.deferred),
+ ("instrument", self.instrument)
+ )
@property
def expression(self):
@@ -166,7 +166,7 @@ class ColumnProperty(StrategizedProperty):
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc
- )
+ )
def do_init(self):
super(ColumnProperty, self).do_init()
@@ -181,18 +181,18 @@ class ColumnProperty(StrategizedProperty):
def copy(self):
return ColumnProperty(
- deferred=self.deferred,
- group=self.group,
- active_history=self.active_history,
- *self.columns)
+ deferred=self.deferred,
+ group=self.group,
+ active_history=self.active_history,
+ *self.columns)
def _getcommitted(self, state, dict_, column,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
return state.get_impl(self.key).\
- get_committed_value(state, dict_, passive=passive)
+ get_committed_value(state, dict_, passive=passive)
def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
+ dest_dict, load, _recursive):
if not self.instrument:
return
elif self.key in source_dict:
@@ -257,4 +257,3 @@ class ColumnProperty(StrategizedProperty):
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
-
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index c340e9856..12e11b26c 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -24,19 +24,19 @@ from itertools import chain
from . import (
attributes, interfaces, object_mapper, persistence,
exc as orm_exc, loading
- )
+)
from .base import _entity_descriptor, _is_aliased_class, \
- _is_mapped_class, _orm_columns, _generative
+ _is_mapped_class, _orm_columns, _generative
from .path_registry import PathRegistry
from .util import (
AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
- )
+)
from .. import sql, util, log, exc as sa_exc, inspect, inspection
from ..sql.expression import _interpret_as_from
from ..sql import (
- util as sql_util,
- expression, visitors
- )
+ util as sql_util,
+ expression, visitors
+)
from ..sql.base import ColumnCollection
from . import properties
@@ -45,6 +45,7 @@ __all__ = ['Query', 'QueryContext', 'aliased']
_path_registry = PathRegistry.root
+
@inspection._self_inspects
@log.class_logger
class Query(object):
@@ -124,22 +125,22 @@ class Query(object):
if entity not in d:
ext_info = inspect(entity)
if not ext_info.is_aliased_class and \
- ext_info.mapper.with_polymorphic:
+ ext_info.mapper.with_polymorphic:
if ext_info.mapper.mapped_table not in \
- self._polymorphic_adapters:
+ self._polymorphic_adapters:
self._mapper_loads_polymorphically_with(
ext_info.mapper,
sql_util.ColumnAdapter(
- ext_info.selectable,
- ext_info.mapper._equivalent_columns
+ ext_info.selectable,
+ ext_info.mapper._equivalent_columns
)
)
aliased_adapter = None
elif ext_info.is_aliased_class:
aliased_adapter = sql_util.ColumnAdapter(
- ext_info.selectable,
- ext_info.mapper._equivalent_columns
- )
+ ext_info.selectable,
+ ext_info.mapper._equivalent_columns
+ )
else:
aliased_adapter = None
@@ -163,17 +164,17 @@ class Query(object):
info = inspect(from_obj)
if hasattr(info, 'mapper') and \
- (info.is_mapper or info.is_aliased_class):
+ (info.is_mapper or info.is_aliased_class):
self._select_from_entity = from_obj
if set_base_alias:
raise sa_exc.ArgumentError(
- "A selectable (FromClause) instance is "
- "expected when the base alias is being set.")
+ "A selectable (FromClause) instance is "
+ "expected when the base alias is being set.")
fa.append(info.selectable)
elif not info.is_selectable:
raise sa_exc.ArgumentError(
- "argument is not a mapped class, mapper, "
- "aliased(), or FromClause instance.")
+ "argument is not a mapped class, mapper, "
+ "aliased(), or FromClause instance.")
else:
if isinstance(from_obj, expression.SelectBase):
from_obj = from_obj.alias()
@@ -184,11 +185,11 @@ class Query(object):
self._from_obj = tuple(fa)
if set_base_alias and \
- len(self._from_obj) == 1 and \
- isinstance(select_from_alias, expression.Alias):
+ len(self._from_obj) == 1 and \
+ isinstance(select_from_alias, expression.Alias):
equivs = self.__all_equivs()
self._from_obj_alias = sql_util.ColumnAdapter(
- self._from_obj[0], equivs)
+ self._from_obj[0], equivs)
def _reset_polymorphic_adapter(self, mapper):
for m2 in mapper._with_polymorphic_mappers:
@@ -216,11 +217,11 @@ class Query(object):
def _adapt_col_list(self, cols):
return [
- self._adapt_clause(
- expression._literal_as_text(o),
- True, True)
- for o in cols
- ]
+ self._adapt_clause(
+ expression._literal_as_text(o),
+ True, True)
+ for o in cols
+ ]
@_generative()
def _adapt_all_clauses(self):
@@ -270,18 +271,18 @@ class Query(object):
# if 'orm only', look for ORM annotations
# in the element before adapting.
if not _orm_only or \
- '_orm_adapt' in elem._annotations or \
- "parententity" in elem._annotations:
+ '_orm_adapt' in elem._annotations or \
+ "parententity" in elem._annotations:
e = adapter(elem)
if e is not None:
return e
return visitors.replacement_traverse(
- clause,
- {},
- replace
- )
+ clause,
+ {},
+ replace
+ )
def _entity_zero(self):
return self._entities[0]
@@ -311,26 +312,26 @@ class Query(object):
def _only_mapper_zero(self, rationale=None):
if len(self._entities) > 1:
raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query "
- "against a single mapper."
- )
+ rationale or
+ "This operation requires a Query "
+ "against a single mapper."
+ )
return self._mapper_zero()
def _only_full_mapper_zero(self, methname):
if self._entities != [self._primary_entity]:
raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
+ "%s() can only be used against "
+ "a single mapped class." % methname)
return self._primary_entity.entity_zero
def _only_entity_zero(self, rationale=None):
if len(self._entities) > 1:
raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query "
- "against a single mapper."
- )
+ rationale or
+ "This operation requires a Query "
+ "against a single mapper."
+ )
return self._entity_zero()
def __all_equivs(self):
@@ -340,7 +341,8 @@ class Query(object):
return equivs
def _get_condition(self):
- return self._no_criterion_condition("get", order_by=False, distinct=False)
+ return self._no_criterion_condition(
+ "get", order_by=False, distinct=False)
def _get_existing_condition(self):
self._no_criterion_assertion("get", order_by=False, distinct=False)
@@ -354,8 +356,8 @@ class Query(object):
self._group_by or (order_by and self._order_by) or \
(distinct and self._distinct):
raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
+ "Query.%s() being called on a "
+ "Query with existing criterion. " % meth)
def _no_criterion_condition(self, meth, order_by=True, distinct=True):
self._no_criterion_assertion(meth, order_by, distinct)
@@ -369,8 +371,8 @@ class Query(object):
return
if self._order_by:
raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
+ "Query.%s() being called on a "
+ "Query with existing criterion. " % meth)
self._no_criterion_condition(meth)
def _no_statement_condition(self, meth):
@@ -411,9 +413,9 @@ class Query(object):
)
def _get_options(self, populate_existing=None,
- version_check=None,
- only_load_props=None,
- refresh_state=None):
+ version_check=None,
+ only_load_props=None,
+ refresh_state=None):
if populate_existing:
self._populate_existing = populate_existing
if version_check:
@@ -441,11 +443,10 @@ class Query(object):
"""
stmt = self._compile_context(labels=self._with_labels).\
- statement
+ statement
if self._params:
stmt = stmt.params(self._params)
-
# TODO: there's no tests covering effects of
# the annotation not being there
return stmt._annotate({'no_replacement_traverse': True})
@@ -662,9 +663,9 @@ class Query(object):
@_generative(_no_clauseelement_condition)
def with_polymorphic(self,
- cls_or_mappers,
- selectable=None,
- polymorphic_on=None):
+ cls_or_mappers,
+ selectable=None,
+ polymorphic_on=None):
"""Load columns for inheriting classes.
:meth:`.Query.with_polymorphic` applies transformations
@@ -692,13 +693,13 @@ class Query(object):
if not self._primary_entity:
raise sa_exc.InvalidRequestError(
- "No primary mapper set up for this Query.")
+ "No primary mapper set up for this Query.")
entity = self._entities[0]._clone()
self._entities = [entity] + self._entities[1:]
entity.set_with_polymorphic(self,
- cls_or_mappers,
- selectable=selectable,
- polymorphic_on=polymorphic_on)
+ cls_or_mappers,
+ selectable=selectable,
+ polymorphic_on=polymorphic_on)
@_generative()
def yield_per(self, count):
@@ -717,15 +718,16 @@ class Query(object):
Also note that while :meth:`~sqlalchemy.orm.query.Query.yield_per`
will set the ``stream_results`` execution option to True, currently
- this is only understood by :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect
- which will stream results using server side cursors instead of pre-buffer
- all rows for this query. Other DBAPIs pre-buffer all rows before
- making them available.
+ this is only understood by
+ :mod:`~sqlalchemy.dialects.postgresql.psycopg2` dialect which will
+ stream results using server side cursors instead of pre-buffer all
+ rows for this query. Other DBAPIs pre-buffer all rows before making
+ them available.
"""
self._yield_per = count
self._execution_options = self._execution_options.union(
- {"stream_results": True})
+ {"stream_results": True})
def get(self, ident):
"""Return an instance based on the given primary key identifier,
@@ -795,9 +797,9 @@ class Query(object):
if len(ident) != len(mapper.primary_key):
raise sa_exc.InvalidRequestError(
- "Incorrect number of values in identifier to formulate "
- "primary key for query.get(); primary key columns are %s" %
- ','.join("'%s'" % c for c in mapper.primary_key))
+ "Incorrect number of values in identifier to formulate "
+ "primary key for query.get(); primary key columns are %s" %
+ ','.join("'%s'" % c for c in mapper.primary_key))
key = mapper.identity_key_from_primary_key(ident)
@@ -839,9 +841,9 @@ class Query(object):
"""
self._correlate = self._correlate.union(
- _interpret_as_from(s)
- if s is not None else None
- for s in args)
+ _interpret_as_from(s)
+ if s is not None else None
+ for s in args)
@_generative()
def autoflush(self, setting):
@@ -900,17 +902,17 @@ class Query(object):
for prop in mapper.iterate_properties:
if isinstance(prop, properties.RelationshipProperty) and \
- prop.mapper is self._mapper_zero():
+ prop.mapper is self._mapper_zero():
property = prop
break
else:
raise sa_exc.InvalidRequestError(
- "Could not locate a property which relates instances "
- "of class '%s' to instances of class '%s'" %
- (
- self._mapper_zero().class_.__name__,
- instance.__class__.__name__)
- )
+ "Could not locate a property which relates instances "
+ "of class '%s' to instances of class '%s'" %
+ (
+ self._mapper_zero().class_.__name__,
+ instance.__class__.__name__)
+ )
return self.filter(with_parent(instance, property))
@@ -943,8 +945,8 @@ class Query(object):
"""
fromclause = self.with_labels().enable_eagerloads(False).\
- _set_enable_single_crit(False).\
- statement.correlate(None)
+ _set_enable_single_crit(False).\
+ statement.correlate(None)
q = self._from_selectable(fromclause)
if entities:
q._set_entities(entities)
@@ -1039,8 +1041,9 @@ class Query(object):
self._set_entity_selectables(self._entities[l:])
@util.pending_deprecation("0.7",
- ":meth:`.add_column` is superseded by :meth:`.add_columns`",
- False)
+ ":meth:`.add_column` is superseded "
+ "by :meth:`.add_columns`",
+ False)
def add_column(self, column):
"""Add a column expression to the list of result columns to be
returned.
@@ -1205,8 +1208,8 @@ class Query(object):
kwargs.update(args[0])
elif len(args) > 0:
raise sa_exc.ArgumentError(
- "params() takes zero or one positional argument, "
- "which is a dictionary.")
+ "params() takes zero or one positional argument, "
+ "which is a dictionary.")
self._params = self._params.copy()
self._params.update(kwargs)
@@ -1246,7 +1249,6 @@ class Query(object):
else:
self._criterion = criterion
-
def filter_by(self, **kwargs):
"""apply the given filtering criterion to a copy
of this :class:`.Query`, using keyword expressions.
@@ -1271,7 +1273,7 @@ class Query(object):
"""
clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
- for key, value in kwargs.items()]
+ for key, value in kwargs.items()]
return self.filter(sql.and_(*clauses))
@_generative(_no_statement_condition, _no_limit_offset)
@@ -1324,7 +1326,8 @@ class Query(object):
"""apply a HAVING criterion to the query and return the
newly resulting :class:`.Query`.
- :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`.
+ :meth:`~.Query.having` is used in conjunction with
+ :meth:`~.Query.group_by`.
HAVING criterion makes it possible to use filters on aggregate
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
@@ -1342,8 +1345,8 @@ class Query(object):
if criterion is not None and \
not isinstance(criterion, sql.ClauseElement):
raise sa_exc.ArgumentError(
- "having() argument must be of type "
- "sqlalchemy.sql.ClauseElement or string")
+ "having() argument must be of type "
+ "sqlalchemy.sql.ClauseElement or string")
criterion = self._adapt_clause(criterion, True, True)
@@ -1391,7 +1394,7 @@ class Query(object):
"""
return self._from_selectable(
- expression.union(*([self] + list(q))))
+ expression.union(*([self] + list(q))))
def union_all(self, *q):
"""Produce a UNION ALL of this Query against one or more queries.
@@ -1401,8 +1404,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.union_all(*([self] + list(q)))
- )
+ expression.union_all(*([self] + list(q)))
+ )
def intersect(self, *q):
"""Produce an INTERSECT of this Query against one or more queries.
@@ -1412,8 +1415,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.intersect(*([self] + list(q)))
- )
+ expression.intersect(*([self] + list(q)))
+ )
def intersect_all(self, *q):
"""Produce an INTERSECT ALL of this Query against one or more queries.
@@ -1423,8 +1426,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.intersect_all(*([self] + list(q)))
- )
+ expression.intersect_all(*([self] + list(q)))
+ )
def except_(self, *q):
"""Produce an EXCEPT of this Query against one or more queries.
@@ -1434,8 +1437,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.except_(*([self] + list(q)))
- )
+ expression.except_(*([self] + list(q)))
+ )
def except_all(self, *q):
"""Produce an EXCEPT ALL of this Query against one or more queries.
@@ -1445,8 +1448,8 @@ class Query(object):
"""
return self._from_selectable(
- expression.except_all(*([self] + list(q)))
- )
+ expression.except_all(*([self] + list(q)))
+ )
def join(self, *props, **kwargs):
"""Create a SQL JOIN against this :class:`.Query` object's criterion
@@ -1456,8 +1459,8 @@ class Query(object):
Consider a mapping between two classes ``User`` and ``Address``,
with a relationship ``User.addresses`` representing a collection
- of ``Address`` objects associated with each ``User``. The most common
- usage of :meth:`~.Query.join` is to create a JOIN along this
+ of ``Address`` objects associated with each ``User``. The most
+ common usage of :meth:`~.Query.join` is to create a JOIN along this
relationship, using the ``User.addresses`` attribute as an indicator
for how this should occur::
@@ -1683,8 +1686,8 @@ class Query(object):
:ref:`ormtutorial_joins` in the ORM tutorial.
- :ref:`inheritance_toplevel` for details on how :meth:`~.Query.join`
- is used for inheritance relationships.
+ :ref:`inheritance_toplevel` for details on how
+ :meth:`~.Query.join` is used for inheritance relationships.
:func:`.orm.join` - a standalone ORM-level join function,
used internally by :meth:`.Query.join`, which in previous
@@ -1692,13 +1695,13 @@ class Query(object):
"""
aliased, from_joinpoint = kwargs.pop('aliased', False),\
- kwargs.pop('from_joinpoint', False)
+ kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.keys))
+ ','.join(kwargs.keys))
return self._join(props,
- outerjoin=False, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
+ outerjoin=False, create_aliases=aliased,
+ from_joinpoint=from_joinpoint)
def outerjoin(self, *props, **kwargs):
"""Create a left outer join against this ``Query`` object's criterion
@@ -1708,13 +1711,13 @@ class Query(object):
"""
aliased, from_joinpoint = kwargs.pop('aliased', False), \
- kwargs.pop('from_joinpoint', False)
+ kwargs.pop('from_joinpoint', False)
if kwargs:
raise TypeError("unknown arguments: %s" %
- ','.join(kwargs))
+ ','.join(kwargs))
return self._join(props,
- outerjoin=True, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
+ outerjoin=True, create_aliases=aliased,
+ from_joinpoint=from_joinpoint)
def _update_joinpoint(self, jp):
self._joinpoint = jp
@@ -1740,9 +1743,9 @@ class Query(object):
if len(keys) == 2 and \
isinstance(keys[0], (expression.FromClause,
- type, AliasedClass)) and \
+ type, AliasedClass)) and \
isinstance(keys[1], (str, expression.ClauseElement,
- interfaces.PropComparator)):
+ interfaces.PropComparator)):
# detect 2-arg form of join and
# convert to a tuple.
keys = (keys,)
@@ -1761,7 +1764,8 @@ class Query(object):
# is a little bit of legacy behavior still at work here
# which means they might be in either order. may possibly
# lock this down to (right_entity, onclause) in 0.6.
- if isinstance(arg1, (interfaces.PropComparator, util.string_types)):
+ if isinstance(
+ arg1, (interfaces.PropComparator, util.string_types)):
right_entity, onclause = arg2, arg1
else:
right_entity, onclause = arg1, arg2
@@ -1777,7 +1781,7 @@ class Query(object):
# check for q.join(Class.propname, from_joinpoint=True)
# and Class is that of the current joinpoint
elif from_joinpoint and \
- isinstance(onclause, interfaces.PropComparator):
+ isinstance(onclause, interfaces.PropComparator):
left_entity = onclause._parententity
info = inspect(self._joinpoint_zero())
@@ -1789,7 +1793,7 @@ class Query(object):
if left_mapper is left_entity:
left_entity = self._joinpoint_zero()
descriptor = _entity_descriptor(left_entity,
- onclause.key)
+ onclause.key)
onclause = descriptor
if isinstance(onclause, interfaces.PropComparator):
@@ -1804,7 +1808,7 @@ class Query(object):
left_entity = onclause._parententity
prop = onclause.property
- if not isinstance(onclause, attributes.QueryableAttribute):
+ if not isinstance(onclause, attributes.QueryableAttribute):
onclause = prop
if not create_aliases:
@@ -1829,10 +1833,9 @@ class Query(object):
raise NotImplementedError("query.join(a==b) not supported.")
self._join_left_to_right(
- left_entity,
- right_entity, onclause,
- outerjoin, create_aliases, prop)
-
+ left_entity,
+ right_entity, onclause,
+ outerjoin, create_aliases, prop)
def _join_left_to_right(self, left, right,
onclause, outerjoin, create_aliases, prop):
@@ -1848,48 +1851,49 @@ class Query(object):
if left is None:
raise sa_exc.InvalidRequestError(
- "Don't know how to join from %s; please use "
- "select_from() to establish the left "
- "entity/selectable of this join" % self._entities[0])
+ "Don't know how to join from %s; please use "
+ "select_from() to establish the left "
+ "entity/selectable of this join" % self._entities[0])
if left is right and \
not create_aliases:
raise sa_exc.InvalidRequestError(
- "Can't construct a join from %s to %s, they "
- "are the same entity" %
- (left, right))
+ "Can't construct a join from %s to %s, they "
+ "are the same entity" %
+ (left, right))
l_info = inspect(left)
r_info = inspect(right)
-
overlap = False
if not create_aliases:
right_mapper = getattr(r_info, "mapper", None)
# if the target is a joined inheritance mapping,
# be more liberal about auto-aliasing.
if right_mapper and (
- right_mapper.with_polymorphic or
- isinstance(right_mapper.mapped_table, expression.Join)
- ):
+ right_mapper.with_polymorphic or
+ isinstance(right_mapper.mapped_table, expression.Join)
+ ):
for from_obj in self._from_obj or [l_info.selectable]:
- if sql_util.selectables_overlap(l_info.selectable, from_obj) and \
- sql_util.selectables_overlap(from_obj, r_info.selectable):
+ if sql_util.selectables_overlap(
+ l_info.selectable, from_obj) and \
+ sql_util.selectables_overlap(
+ from_obj, r_info.selectable):
overlap = True
break
- elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable):
+ elif sql_util.selectables_overlap(l_info.selectable,
+ r_info.selectable):
overlap = True
-
if overlap and l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
- "Can't join table/selectable '%s' to itself" %
- l_info.selectable)
+ "Can't join table/selectable '%s' to itself" %
+ l_info.selectable)
right, onclause = self._prepare_right_side(
- r_info, right, onclause,
- create_aliases,
- prop, overlap)
+ r_info, right, onclause,
+ create_aliases,
+ prop, overlap)
# if joining on a MapperProperty path,
# track the path to prevent redundant joins
@@ -1904,7 +1908,7 @@ class Query(object):
self._join_to_left(l_info, left, right, onclause, outerjoin)
def _prepare_right_side(self, r_info, right, onclause, create_aliases,
- prop, overlap):
+ prop, overlap):
info = r_info
right_mapper, right_selectable, right_is_aliased = \
@@ -1918,8 +1922,8 @@ class Query(object):
if right_mapper and prop and \
not right_mapper.common_parent(prop.mapper):
raise sa_exc.InvalidRequestError(
- "Join target %s does not correspond to "
- "the right side of join condition %s" % (right, onclause)
+ "Join target %s does not correspond to "
+ "the right side of join condition %s" % (right, onclause)
)
if not right_mapper and prop:
@@ -1929,11 +1933,11 @@ class Query(object):
if right_mapper and right is right_selectable:
if not right_selectable.is_derived_from(
- right_mapper.mapped_table):
+ right_mapper.mapped_table):
raise sa_exc.InvalidRequestError(
"Selectable '%s' is not derived from '%s'" %
(right_selectable.description,
- right_mapper.mapped_table.description))
+ right_mapper.mapped_table.description))
if isinstance(right_selectable, expression.SelectBase):
# TODO: this isn't even covered now!
@@ -1943,16 +1947,16 @@ class Query(object):
right = aliased(right_mapper, right_selectable)
aliased_entity = right_mapper and \
- not right_is_aliased and \
- (
- right_mapper.with_polymorphic and isinstance(
- right_mapper._with_polymorphic_selectable,
- expression.Alias)
- or
- overlap # test for overlap:
- # orm/inheritance/relationships.py
- # SelfReferentialM2MTest
- )
+ not right_is_aliased and \
+ (
+ right_mapper.with_polymorphic and isinstance(
+ right_mapper._with_polymorphic_selectable,
+ expression.Alias)
+ or
+ overlap # test for overlap:
+ # orm/inheritance/relationships.py
+ # SelfReferentialM2MTest
+ )
if not need_adapter and (create_aliases or aliased_entity):
right = aliased(right, flat=True)
@@ -1962,10 +1966,11 @@ class Query(object):
# apply an adapter to all subsequent filter() calls
# until reset_joinpoint() is called.
if need_adapter:
- self._filter_aliases = ORMAdapter(right,
- equivalents=right_mapper and
- right_mapper._equivalent_columns or {},
- chain_to=self._filter_aliases)
+ self._filter_aliases = ORMAdapter(
+ right,
+ equivalents=right_mapper and
+ right_mapper._equivalent_columns or {},
+ chain_to=self._filter_aliases)
# if the onclause is a ClauseElement, adapt it with any
# adapters that are in place right now
@@ -1978,12 +1983,12 @@ class Query(object):
# set are also adapted.
if aliased_entity and not create_aliases:
self._mapper_loads_polymorphically_with(
- right_mapper,
- ORMAdapter(
- right,
- equivalents=right_mapper._equivalent_columns
- )
- )
+ right_mapper,
+ ORMAdapter(
+ right,
+ equivalents=right_mapper._equivalent_columns
+ )
+ )
return right, onclause
@@ -1994,22 +1999,22 @@ class Query(object):
if self._from_obj:
replace_clause_index, clause = sql_util.find_join_source(
- self._from_obj,
- left_selectable)
+ self._from_obj,
+ left_selectable)
if clause is not None:
try:
clause = orm_join(clause,
- right,
- onclause, isouter=outerjoin)
+ right,
+ onclause, isouter=outerjoin)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
+ "Could not find a FROM clause to join from. "
+ "Tried joining to %s, but got: %s" % (right, ae))
self._from_obj = \
- self._from_obj[:replace_clause_index] + \
- (clause, ) + \
- self._from_obj[replace_clause_index + 1:]
+ self._from_obj[:replace_clause_index] + \
+ (clause, ) + \
+ self._from_obj[replace_clause_index + 1:]
return
if left_mapper:
@@ -2027,8 +2032,8 @@ class Query(object):
clause = orm_join(clause, right, onclause, isouter=outerjoin)
except sa_exc.ArgumentError as ae:
raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
+ "Could not find a FROM clause to join from. "
+ "Tried joining to %s, but got: %s" % (right, ae))
self._from_obj = self._from_obj + (clause,)
def _reset_joinpoint(self):
@@ -2186,14 +2191,14 @@ class Query(object):
start, stop, step = util.decode_slice(item)
if isinstance(stop, int) and \
- isinstance(start, int) and \
- stop - start <= 0:
+ isinstance(start, int) and \
+ stop - start <= 0:
return []
# perhaps we should execute a count() here so that we
# can still use LIMIT/OFFSET ?
elif (isinstance(start, int) and start < 0) \
- or (isinstance(stop, int) and stop < 0):
+ or (isinstance(stop, int) and stop < 0):
return list(self)[item]
res = self.slice(start, stop)
@@ -2310,11 +2315,11 @@ class Query(object):
statement = sql.text(statement)
if not isinstance(statement,
- (expression.TextClause,
- expression.SelectBase)):
+ (expression.TextClause,
+ expression.SelectBase)):
raise sa_exc.ArgumentError(
- "from_statement accepts text(), select(), "
- "and union() objects only.")
+ "from_statement accepts text(), select(), "
+ "and union() objects only.")
self._statement = statement
@@ -2408,16 +2413,16 @@ class Query(object):
def _connection_from_session(self, **kw):
conn = self.session.connection(
- **kw)
+ **kw)
if self._execution_options:
conn = conn.execution_options(**self._execution_options)
return conn
def _execute_and_instances(self, querycontext):
conn = self._connection_from_session(
- mapper=self._mapper_zero_or_none(),
- clause=querycontext.statement,
- close_with_result=True)
+ mapper=self._mapper_zero_or_none(),
+ clause=querycontext.statement,
+ close_with_result=True)
result = conn.execute(querycontext.statement, self._params)
return loading.instances(self, result, querycontext)
@@ -2553,7 +2558,7 @@ class Query(object):
# .with_only_columns() after we have a core select() so that
# we get just "SELECT 1" without any entities.
return sql.exists(self.add_columns('1').with_labels().
- statement.with_only_columns(['1']))
+ statement.with_only_columns(['1']))
def count(self):
"""Return a count of rows this Query would return.
@@ -2629,10 +2634,11 @@ class Query(object):
This method has several key caveats:
- * The method does **not** offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key
- references which require it, otherwise the database may emit an
- integrity violation if foreign key references are being enforced.
+ * The method does **not** offer in-Python cascading of relationships
+ - it is assumed that ON DELETE CASCADE/SET NULL/etc. is configured
+ for any foreign key references which require it, otherwise the
+ database may emit an integrity violation if foreign key references
+ are being enforced.
After the DELETE, dependent objects in the :class:`.Session` which
were impacted by an ON DELETE may not contain the current
@@ -2641,8 +2647,8 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`. Accessing an expired object
whose row has been deleted will invoke a SELECT to locate the
- row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError`
- is raised.
+ row; when the row is not found, an
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
* The :meth:`.MapperEvents.before_delete` and
:meth:`.MapperEvents.after_delete`
@@ -2657,10 +2663,10 @@ class Query(object):
:ref:`inserts_and_updates` - Core SQL tutorial
"""
- #TODO: cascades need handling.
+ # TODO: cascades need handling.
delete_op = persistence.BulkDelete.factory(
- self, synchronize_session)
+ self, synchronize_session)
delete_op.exec_()
return delete_op.rowcount
@@ -2698,9 +2704,9 @@ class Query(object):
This method has several key caveats:
- * The method does **not** offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it, otherwise the database may emit an
+ * The method does **not** offer in-Python cascading of relationships
+ - it is assumed that ON UPDATE CASCADE is configured for any foreign
+ key references which require it, otherwise the database may emit an
integrity violation if foreign key references are being enforced.
After the UPDATE, dependent objects in the :class:`.Session` which
@@ -2709,16 +2715,16 @@ class Query(object):
which normally occurs upon :meth:`.Session.commit` or can be forced
by using :meth:`.Session.expire_all`.
- * As of 0.8, this method will support multiple table updates, as detailed
- in :ref:`multi_table_updates`, and this behavior does extend to support
- updates of joined-inheritance and other multiple table mappings. However,
- the **join condition of an inheritance mapper is currently not
- automatically rendered**.
- Care must be taken in any multiple-table update to explicitly include
- the joining condition between those tables, even in mappings where
- this is normally automatic.
- E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the
- ``Engineer`` local table using criteria against the ``Employee``
+ * As of 0.8, this method will support multiple table updates, as
+ detailed in :ref:`multi_table_updates`, and this behavior does
+ extend to support updates of joined-inheritance and other multiple
+ table mappings. However, the **join condition of an inheritance
+ mapper is currently not automatically rendered**.
+ Care must be taken in any multiple-table update to explicitly
+ include the joining condition between those tables, even in mappings
+ where this is normally automatic.
+ E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of
+ the ``Engineer`` local table using criteria against the ``Employee``
local table might look like::
session.query(Engineer).\\
@@ -2740,18 +2746,17 @@ class Query(object):
"""
- #TODO: value keys need to be mapped to corresponding sql cols and
+ # TODO: value keys need to be mapped to corresponding sql cols and
# instr.attr.s to string keys
- #TODO: updates of manytoone relationships need to be converted to
+ # TODO: updates of manytoone relationships need to be converted to
# fk assignments
- #TODO: cascades need handling.
+ # TODO: cascades need handling.
update_op = persistence.BulkUpdate.factory(
- self, synchronize_session, values)
+ self, synchronize_session, values)
update_op.exec_()
return update_op.rowcount
-
def _compile_context(self, labels=True):
context = QueryContext(self)
@@ -2784,13 +2789,13 @@ class Query(object):
if not context.primary_columns:
if self._only_load_props:
raise sa_exc.InvalidRequestError(
- "No column-based properties specified for "
- "refresh operation. Use session.expire() "
- "to reload collections and related items.")
+ "No column-based properties specified for "
+ "refresh operation. Use session.expire() "
+ "to reload collections and related items.")
else:
raise sa_exc.InvalidRequestError(
- "Query contains no columns with which to "
- "SELECT from.")
+ "Query contains no columns with which to "
+ "SELECT from.")
if context.multi_row_eager_loaders and self._should_nest_selectable:
context.statement = self._compound_eager_statement(context)
@@ -2805,26 +2810,26 @@ class Query(object):
if context.order_by:
order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
+ chain(*[
+ sql_util.unwrap_order_by(o)
+ for o in context.order_by
+ ])
+ )
else:
context.order_by = None
order_by_col_expr = []
inner = sql.select(
- context.primary_columns + order_by_col_expr,
- context.whereclause,
- from_obj=context.froms,
- use_labels=context.labels,
- # TODO: this order_by is only needed if
- # LIMIT/OFFSET is present in self._select_args,
- # else the application on the outside is enough
- order_by=context.order_by,
- **self._select_args
- )
+ context.primary_columns + order_by_col_expr,
+ context.whereclause,
+ from_obj=context.froms,
+ use_labels=context.labels,
+ # TODO: this order_by is only needed if
+ # LIMIT/OFFSET is present in self._select_args,
+ # else the application on the outside is enough
+ order_by=context.order_by,
+ **self._select_args
+ )
for hint in self._with_hints:
inner = inner.with_hint(*hint)
@@ -2839,8 +2844,8 @@ class Query(object):
context.adapter = sql_util.ColumnAdapter(inner, equivs)
statement = sql.select(
- [inner] + context.secondary_columns,
- use_labels=context.labels)
+ [inner] + context.secondary_columns,
+ use_labels=context.labels)
statement._for_update_arg = context._for_update_arg
@@ -2850,8 +2855,8 @@ class Query(object):
# giving us a marker as to where the "splice point" of
# the join should be
from_clause = sql_util.splice_joins(
- from_clause,
- eager_join, eager_join.stop_on)
+ from_clause,
+ eager_join, eager_join.stop_on)
statement.append_from(from_clause)
@@ -2871,24 +2876,24 @@ class Query(object):
if self._distinct and context.order_by:
order_by_col_expr = list(
- chain(*[
- sql_util.unwrap_order_by(o)
- for o in context.order_by
- ])
- )
+ chain(*[
+ sql_util.unwrap_order_by(o)
+ for o in context.order_by
+ ])
+ )
context.primary_columns += order_by_col_expr
context.froms += tuple(context.eager_joins.values())
statement = sql.select(
- context.primary_columns +
- context.secondary_columns,
- context.whereclause,
- from_obj=context.froms,
- use_labels=context.labels,
- order_by=context.order_by,
- **self._select_args
- )
+ context.primary_columns +
+ context.secondary_columns,
+ context.whereclause,
+ from_obj=context.froms,
+ use_labels=context.labels,
+ order_by=context.order_by,
+ **self._select_args
+ )
statement._for_update_arg = context._for_update_arg
for hint in self._with_hints:
@@ -2920,14 +2925,15 @@ class Query(object):
single_crit = adapter.traverse(single_crit)
single_crit = self._adapt_clause(single_crit, False, False)
context.whereclause = sql.and_(
- sql.True_._ifnone(context.whereclause),
- single_crit)
+ sql.True_._ifnone(context.whereclause),
+ single_crit)
def __str__(self):
return str(self._compile_context().statement)
from ..sql.selectable import ForUpdateArg
+
class LockmodeArg(ForUpdateArg):
@classmethod
def parse_legacy_query(self, mode):
@@ -2944,10 +2950,11 @@ class LockmodeArg(ForUpdateArg):
read = False
else:
raise sa_exc.ArgumentError(
- "Unknown with_lockmode argument: %r" % mode)
+ "Unknown with_lockmode argument: %r" % mode)
return LockmodeArg(read=read, nowait=nowait)
+
class _QueryEntity(object):
"""represent an entity column returned within a Query result."""
@@ -2955,7 +2962,7 @@ class _QueryEntity(object):
if cls is _QueryEntity:
entity = args[1]
if not isinstance(entity, util.string_types) and \
- _is_mapped_class(entity):
+ _is_mapped_class(entity):
cls = _MapperEntity
elif isinstance(entity, Bundle):
cls = _BundleEntity
@@ -2989,7 +2996,7 @@ class _MapperEntity(_QueryEntity):
self.is_aliased_class = ext_info.is_aliased_class
self._with_polymorphic = ext_info.with_polymorphic_mappers
self._polymorphic_discriminator = \
- ext_info.polymorphic_on
+ ext_info.polymorphic_on
self.entity_zero = ext_info
if ext_info.is_aliased_class:
self._label_name = self.entity_zero.name
@@ -2999,7 +3006,7 @@ class _MapperEntity(_QueryEntity):
self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
- selectable, polymorphic_on):
+ selectable, polymorphic_on):
"""Receive an update from a call to query.with_polymorphic().
Note the newer style of using a free standing with_polymporphic()
@@ -3010,23 +3017,23 @@ class _MapperEntity(_QueryEntity):
if self.is_aliased_class:
# TODO: invalidrequest ?
raise NotImplementedError(
- "Can't use with_polymorphic() against "
- "an Aliased object"
- )
+ "Can't use with_polymorphic() against "
+ "an Aliased object"
+ )
if cls_or_mappers is None:
query._reset_polymorphic_adapter(self.mapper)
return
mappers, from_obj = self.mapper._with_polymorphic_args(
- cls_or_mappers, selectable)
+ cls_or_mappers, selectable)
self._with_polymorphic = mappers
self._polymorphic_discriminator = polymorphic_on
self.selectable = from_obj
- query._mapper_loads_polymorphically_with(self.mapper,
- sql_util.ColumnAdapter(from_obj,
- self.mapper._equivalent_columns))
+ query._mapper_loads_polymorphically_with(
+ self.mapper, sql_util.ColumnAdapter(
+ from_obj, self.mapper._equivalent_columns))
filter_fn = id
@@ -3115,7 +3122,7 @@ class _MapperEntity(_QueryEntity):
def setup_context(self, query, context):
adapter = self._get_entity_clauses(query, context)
- #if self._adapted_selectable is None:
+ # if self._adapted_selectable is None:
context.froms += (self.selectable,)
if context.order_by is False and self.mapper.order_by:
@@ -3124,10 +3131,10 @@ class _MapperEntity(_QueryEntity):
# apply adaptation to the mapper's order_by if needed.
if adapter:
context.order_by = adapter.adapt_list(
- util.to_list(
- context.order_by
- )
- )
+ util.to_list(
+ context.order_by
+ )
+ )
if self._with_polymorphic:
poly_properties = self.mapper._iterate_polymorphic_properties(
@@ -3161,6 +3168,7 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
+
@inspection._self_inspects
class Bundle(object):
"""A grouping of SQL expressions that are returned by a :class:`.Query`
@@ -3192,7 +3200,8 @@ class Bundle(object):
bn = Bundle("mybundle", MyClass.x, MyClass.y)
- for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+ for row in session.query(bn).filter(
+ bn.c.x == 5).filter(bn.c.y == 4):
print(row.mybundle.x, row.mybundle.y)
:param name: name of the bundle.
@@ -3206,7 +3215,7 @@ class Bundle(object):
self.exprs = exprs
self.c = self.columns = ColumnCollection()
self.columns.update((getattr(col, "key", col._label), col)
- for col in exprs)
+ for col in exprs)
self.single_entity = kw.pop('single_entity', self.single_entity)
columns = None
@@ -3225,7 +3234,8 @@ class Bundle(object):
Bundle('b3', MyClass.x, MyClass.y)
)
- q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+ q = sess.query(b1).filter(
+ b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
.. seealso::
@@ -3266,7 +3276,8 @@ class Bundle(object):
"""
def proc(row, result):
- return util.KeyedTuple([proc(row, None) for proc in procs], labels)
+ return util.KeyedTuple(
+ [proc(row, None) for proc in procs], labels)
return proc
@@ -3318,9 +3329,9 @@ class _BundleEntity(_QueryEntity):
def adapt_to_selectable(self, query, sel):
c = _BundleEntity(query, self.bundle, setup_entities=False)
- #c._label_name = self._label_name
- #c.entity_zero = self.entity_zero
- #c.entities = self.entities
+ # c._label_name = self._label_name
+ # c.entity_zero = self.entity_zero
+ # c.entities = self.entities
for ent in self._entities:
ent.adapt_to_selectable(c, sel)
@@ -3335,14 +3346,15 @@ class _BundleEntity(_QueryEntity):
def row_processor(self, query, context, custom_rows):
procs, labels = zip(
- *[ent.row_processor(query, context, custom_rows)
- for ent in self._entities]
- )
+ *[ent.row_processor(query, context, custom_rows)
+ for ent in self._entities]
+ )
proc = self.bundle.create_row_processor(query, procs, labels)
return proc, self._label_name
+
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
@@ -3354,16 +3366,16 @@ class _ColumnEntity(_QueryEntity):
column = sql.literal_column(column)
self._label_name = column.name
elif isinstance(column, (
- attributes.QueryableAttribute,
- interfaces.PropComparator
- )):
+ attributes.QueryableAttribute,
+ interfaces.PropComparator
+ )):
self._label_name = column.key
column = column._query_clause_element()
else:
self._label_name = getattr(column, 'key', None)
if not isinstance(column, expression.ColumnElement) and \
- hasattr(column, '_select_iterable'):
+ hasattr(column, '_select_iterable'):
for c in column._select_iterable:
if c is column:
break
@@ -3414,7 +3426,7 @@ class _ColumnEntity(_QueryEntity):
for elem in visitors.iterate(column, {})
if 'parententity' in elem._annotations
and actual_froms.intersection(elem._from_objects)
- )
+ )
if self.entities:
self.entity_zero = list(self.entities)[0]
@@ -3456,14 +3468,14 @@ class _ColumnEntity(_QueryEntity):
return entity is self.entity_zero
else:
return not _is_aliased_class(self.entity_zero) and \
- entity.common_parent(self.entity_zero)
+ entity.common_parent(self.entity_zero)
def _resolve_expr_against_query_aliases(self, query, expr, context):
return query._adapt_clause(expr, False, True)
def row_processor(self, query, context, custom_rows):
column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
+ query, self.column, context)
if context.adapter:
column = context.adapter.columns[column]
@@ -3475,7 +3487,7 @@ class _ColumnEntity(_QueryEntity):
def setup_context(self, query, context):
column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
+ query, self.column, context)
context.froms += tuple(self.froms)
context.primary_columns.append(column)
@@ -3493,8 +3505,8 @@ class QueryContext(object):
if query._statement is not None:
if isinstance(query._statement, expression.SelectBase) and \
- not query._statement._textual and \
- not query._statement.use_labels:
+ not query._statement._textual and \
+ not query._statement.use_labels:
self.statement = query._statement.apply_labels()
else:
self.statement = query._statement
@@ -3516,7 +3528,7 @@ class QueryContext(object):
self.eager_joins = {}
self.create_eager_joins = []
self.propagate_options = set(o for o in query._with_options if
- o.propagate_to_loaders)
+ o.propagate_to_loaders)
self.attributes = query._attributes.copy()
@@ -3563,5 +3575,3 @@ class AliasOption(interfaces.MapperOption):
else:
alias = self.alias
query._from_obj_alias = sql_util.ColumnAdapter(alias)
-
-
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index a5327e52e..c2debda03 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -23,13 +23,15 @@ from ..sql.util import (
ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
_deep_deannotate, selectables_overlap
- )
+)
from ..sql import operators, expression, visitors
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator
+from .interfaces import (MANYTOMANY, MANYTOONE, ONETOMANY,
+ StrategizedProperty, PropComparator)
from ..inspection import inspect
from . import mapper as mapperlib
import collections
+
def remote(expr):
"""Annotate a portion of a primaryjoin expression
with a 'remote' annotation.
@@ -47,7 +49,7 @@ def remote(expr):
"""
return _annotate_columns(expression._clause_element_as_expr(expr),
- {"remote": True})
+ {"remote": True})
def foreign(expr):
@@ -68,7 +70,7 @@ def foreign(expr):
"""
return _annotate_columns(expression._clause_element_as_expr(expr),
- {"foreign": True})
+ {"foreign": True})
@log.class_logger
@@ -90,33 +92,34 @@ class RelationshipProperty(StrategizedProperty):
_dependency_processor = None
def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- distinct_target_key=None,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None,
- info=None):
+ secondary=None, primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ uselist=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ post_update=False,
+ cascade=False, extension=None,
+ viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None,
+ single_parent=False, innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=False,
+ cascade_backrefs=True,
+ load_on_pending=False,
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None,
+ info=None):
"""Provide a relationship between two mapped classes.
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
+ This corresponds to a parent-child or associative table relationship.
+ The constructed class is an instance of
+ :class:`.RelationshipProperty`.
A typical :func:`.relationship`, used in a classical mapping::
@@ -127,10 +130,11 @@ class RelationshipProperty(StrategizedProperty):
Some arguments accepted by :func:`.relationship` optionally accept a
callable function, which when called produces the desired value.
The callable is invoked by the parent :class:`.Mapper` at "mapper
- initialization" time, which happens only when mappers are first used, and
- is assumed to be after all mappings have been constructed. This can be
- used to resolve order-of-declaration and other dependency issues, such as
- if ``Child`` is declared below ``Parent`` in the same file::
+ initialization" time, which happens only when mappers are first used,
+ and is assumed to be after all mappings have been constructed. This
+ can be used to resolve order-of-declaration and other dependency
+ issues, such as if ``Child`` is declared below ``Parent`` in the same
+ file::
mapper(Parent, properties={
"children":relationship(lambda: Child,
@@ -138,12 +142,12 @@ class RelationshipProperty(StrategizedProperty):
})
When using the :ref:`declarative_toplevel` extension, the Declarative
- initializer allows string arguments to be passed to :func:`.relationship`.
- These string arguments are converted into callables that evaluate
- the string as Python code, using the Declarative
- class-registry as a namespace. This allows the lookup of related
- classes to be automatic via their string name, and removes the need to
- import related classes at all into the local module space::
+ initializer allows string arguments to be passed to
+ :func:`.relationship`. These string arguments are converted into
+ callables that evaluate the string as Python code, using the
+ Declarative class-registry as a namespace. This allows the lookup of
+ related classes to be automatic via their string name, and removes the
+ need to import related classes at all into the local module space::
from sqlalchemy.ext.declarative import declarative_base
@@ -156,18 +160,18 @@ class RelationshipProperty(StrategizedProperty):
.. seealso::
- :ref:`relationship_config_toplevel` - Full introductory and reference
- documentation for :func:`.relationship`.
+ :ref:`relationship_config_toplevel` - Full introductory and
+ reference documentation for :func:`.relationship`.
:ref:`orm_tutorial_relationship` - ORM tutorial introduction.
:param argument:
- a mapped class, or actual :class:`.Mapper` instance, representing the
- target of the relationship.
+ a mapped class, or actual :class:`.Mapper` instance, representing
+ the target of the relationship.
- :paramref:`~.relationship.argument` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.argument` may also be passed as a callable
+ function which is evaluated at mapper initialization time, and may
+ be passed as a Python-evaluable string when using Declarative.
.. seealso::
@@ -187,35 +191,37 @@ class RelationshipProperty(StrategizedProperty):
present in the :class:`.MetaData` collection associated with the
parent-mapped :class:`.Table`.
- The :paramref:`~.relationship.secondary` keyword argument is typically
- applied in the case where the intermediary :class:`.Table` is not
- otherwise exprssed in any direct class mapping. If the "secondary" table
- is also explicitly mapped elsewhere
- (e.g. as in :ref:`association_pattern`), one should consider applying
- the :paramref:`~.relationship.viewonly` flag so that this :func:`.relationship`
- is not used for persistence operations which may conflict with those
- of the association object pattern.
+ The :paramref:`~.relationship.secondary` keyword argument is
+ typically applied in the case where the intermediary :class:`.Table`
+ is not otherwise exprssed in any direct class mapping. If the
+ "secondary" table is also explicitly mapped elsewhere (e.g. as in
+ :ref:`association_pattern`), one should consider applying the
+ :paramref:`~.relationship.viewonly` flag so that this
+ :func:`.relationship` is not used for persistence operations which
+ may conflict with those of the association object pattern.
.. seealso::
- :ref:`relationships_many_to_many` - Reference example of "many to many".
+ :ref:`relationships_many_to_many` - Reference example of "many
+ to many".
:ref:`orm_tutorial_many_to_many` - ORM tutorial introduction to
many-to-many relationships.
- :ref:`self_referential_many_to_many` - Specifics on using many-to-many
- in a self-referential case.
+ :ref:`self_referential_many_to_many` - Specifics on using
+ many-to-many in a self-referential case.
:ref:`declarative_many_to_many` - Additional options when using
Declarative.
- :ref:`association_pattern` - an alternative to :paramref:`~.relationship.secondary`
- when composing association table relationships, allowing additional
- attributes to be specified on the association table.
+ :ref:`association_pattern` - an alternative to
+ :paramref:`~.relationship.secondary` when composing association
+ table relationships, allowing additional attributes to be
+ specified on the association table.
- :ref:`composite_secondary_join` - a lesser-used pattern which in some
- cases can enable complex :func:`.relationship` SQL conditions
- to be used.
+ :ref:`composite_secondary_join` - a lesser-used pattern which
+ in some cases can enable complex :func:`.relationship` SQL
+ conditions to be used.
.. versionadded:: 0.9.2 :paramref:`~.relationship.secondary` works
more effectively when referring to a :class:`.Join` instance.
@@ -251,11 +257,13 @@ class RelationshipProperty(StrategizedProperty):
:param back_populates:
- Takes a string name and has the same meaning as :paramref:`~.relationship.backref`,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate :paramref:`~.relationship.back_populates`
- to this relationship to ensure proper functioning.
+ Takes a string name and has the same meaning as
+ :paramref:`~.relationship.backref`, except the complementing
+ property is **not** created automatically, and instead must be
+ configured explicitly on the other mapper. The complementing
+ property should also indicate
+ :paramref:`~.relationship.back_populates` to this relationship to
+ ensure proper functioning.
.. seealso::
@@ -309,8 +317,9 @@ class RelationshipProperty(StrategizedProperty):
examples.
:param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
+ a class which extends :class:`.RelationshipProperty.Comparator`
+ which provides custom SQL clause generation for comparison
+ operations.
.. seealso::
@@ -325,20 +334,21 @@ class RelationshipProperty(StrategizedProperty):
keyword to the innermost SELECT statement. When left as ``None``,
the DISTINCT keyword will be applied in those cases when the target
columns do not comprise the full primary key of the target table.
- When set to ``True``, the DISTINCT keyword is applied to the innermost
- SELECT unconditionally.
+ When set to ``True``, the DISTINCT keyword is applied to the
+ innermost SELECT unconditionally.
It may be desirable to set this flag to False when the DISTINCT is
reducing performance of the innermost subquery beyond that of what
duplicate innermost rows may be causing.
- .. versionadded:: 0.8.3 - :paramref:`~.relationship.distinct_target_key`
- allows the
+ .. versionadded:: 0.8.3 -
+ :paramref:`~.relationship.distinct_target_key` allows the
subquery eager loader to apply a DISTINCT modifier to the
innermost SELECT.
- .. versionchanged:: 0.9.0 - :paramref:`~.relationship.distinct_target_key`
- now defaults to ``None``, so that the feature enables itself automatically for
+ .. versionchanged:: 0.9.0 -
+ :paramref:`~.relationship.distinct_target_key` now defaults to
+ ``None``, so that the feature enables itself automatically for
those cases where the innermost query targets a non-unique
key.
@@ -387,8 +397,9 @@ class RelationshipProperty(StrategizedProperty):
.. versionchanged:: 0.8
A multiple-foreign key join ambiguity can be resolved by
- setting the :paramref:`~.relationship.foreign_keys` parameter alone, without the
- need to explicitly set :paramref:`~.relationship.primaryjoin` as well.
+ setting the :paramref:`~.relationship.foreign_keys`
+ parameter alone, without the need to explicitly set
+ :paramref:`~.relationship.primaryjoin` as well.
2. The :class:`.Table` being mapped does not actually have
:class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
@@ -396,10 +407,11 @@ class RelationshipProperty(StrategizedProperty):
was reflected from a database that does not support foreign key
reflection (MySQL MyISAM).
- 3. The :paramref:`~.relationship.primaryjoin` argument is used to construct a non-standard
- join condition, which makes use of columns or expressions that do
- not normally refer to their "parent" column, such as a join condition
- expressed by a complex comparison using a SQL function.
+ 3. The :paramref:`~.relationship.primaryjoin` argument is used to
+ construct a non-standard join condition, which makes use of
+ columns or expressions that do not normally refer to their
+ "parent" column, such as a join condition expressed by a
+ complex comparison using a SQL function.
The :func:`.relationship` construct will raise informative
error messages that suggest the use of the
@@ -409,9 +421,10 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.foreign_keys` parameter is usually
not needed.
- :paramref:`~.relationship.foreign_keys` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.foreign_keys` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -419,14 +432,16 @@ class RelationshipProperty(StrategizedProperty):
:ref:`relationship_custom_foreign`
- :func:`.foreign` - allows direct annotation of the "foreign" columns
- within a :paramref:`~.relationship.primaryjoin` condition.
+ :func:`.foreign` - allows direct annotation of the "foreign"
+ columns within a :paramref:`~.relationship.primaryjoin` condition.
.. versionadded:: 0.8
The :func:`.foreign` annotation can also be applied
- directly to the :paramref:`~.relationship.primaryjoin` expression, which is an alternate,
- more specific system of describing which columns in a particular
- :paramref:`~.relationship.primaryjoin` should be considered "foreign".
+ directly to the :paramref:`~.relationship.primaryjoin`
+ expression, which is an alternate, more specific system of
+ describing which columns in a particular
+ :paramref:`~.relationship.primaryjoin` should be considered
+ "foreign".
:param info: Optional data dictionary which will be populated into the
:attr:`.MapperProperty.info` attribute of this object.
@@ -440,18 +455,19 @@ class RelationshipProperty(StrategizedProperty):
generally perform better than outer joins.
This flag can be set to ``True`` when the relationship references an
- object via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is guaranteed
- to have one or at least one entry.
+ object via many-to-one using local foreign keys that are not
+ nullable, or when the reference is one-to-one or a collection that
+ is guaranteed to have one or at least one entry.
- If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
- ``innerjoin=True`` will be bypassed and the join will continue to
- chain as LEFT OUTER JOIN so that the results don't change. As an alternative,
- specify the value ``"nested"``. This will instead nest the join
- on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)".
+ If the joined-eager load is chained onto an existing LEFT OUTER
+ JOIN, ``innerjoin=True`` will be bypassed and the join will continue
+ to chain as LEFT OUTER JOIN so that the results don't change. As an
+ alternative, specify the value ``"nested"``. This will instead nest
+ the join on the right side, e.g. using the form "a LEFT OUTER JOIN
+ (b JOIN c)".
- .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
- nesting of eager "inner" joins.
+ .. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to
+ support nesting of eager "inner" joins.
.. seealso::
@@ -479,8 +495,8 @@ class RelationshipProperty(StrategizedProperty):
how the related items should be loaded. Default value is
``select``. Values include:
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
+ * ``select`` - items should be loaded lazily when the property is
+ first accessed, using a separate SELECT statement, or identity map
fetch for simple many-to-one references.
* ``immediate`` - items should be loaded as the parents are loaded,
@@ -493,8 +509,9 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.innerjoin` parameter.
* ``subquery`` - items should be loaded "eagerly" as the parents are
- loaded, using one additional SQL statement, which issues a JOIN to a
- subquery of the original statement, for each collection requested.
+ loaded, using one additional SQL statement, which issues a JOIN to
+ a subquery of the original statement, for each collection
+ requested.
* ``noload`` - no loading should occur at any time. This is to
support "write-only" attributes, or attributes which are
@@ -523,35 +540,35 @@ class RelationshipProperty(StrategizedProperty):
Indicates loading behavior for transient or pending parent objects.
When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
+ issue a query for a parent object that is not persistent, meaning it
+ has never been flushed. This may take effect for a pending object
+ when autoflush is disabled, or for a transient object that has been
"attached" to a :class:`.Session` but is not part of its pending
collection.
- The :paramref:`~.relationship.load_on_pending` flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before a flush proceeds. This flag
- is not not intended for general use.
+ The :paramref:`~.relationship.load_on_pending` flag does not improve
+ behavior when the ORM is used normally - object references should be
+ constructed at the object level, not at the foreign key level, so
+ that they are present in an ordinary way before a flush proceeds.
+ This flag is not not intended for general use.
.. seealso::
- :meth:`.Session.enable_relationship_loading` - this method establishes
- "load on pending" behavior for the whole object, and also allows
- loading on objects that remain transient or detached.
+ :meth:`.Session.enable_relationship_loading` - this method
+ establishes "load on pending" behavior for the whole object, and
+ also allows loading on objects that remain transient or
+ detached.
:param order_by:
indicates the ordering that should be applied when loading these
- items. :paramref:`~.relationship.order_by` is expected to refer to one
- of the :class:`.Column`
- objects to which the target class is mapped, or
- the attribute itself bound to the target class which refers
- to the column.
+ items. :paramref:`~.relationship.order_by` is expected to refer to
+ one of the :class:`.Column` objects to which the target class is
+ mapped, or the attribute itself bound to the target class which
+ refers to the column.
- :paramref:`~.relationship.order_by` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.order_by` may also be passed as a callable
+ function which is evaluated at mapper initialization time, and may
+ be passed as a Python-evaluable string when using Declarative.
:param passive_deletes=False:
Indicates loading behavior during delete operations.
@@ -640,12 +657,13 @@ class RelationshipProperty(StrategizedProperty):
join of this child object against the parent object, or in a
many-to-many relationship the join of the primary object to the
association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
+ foreign key relationships of the parent and child tables (or
+ association table).
- :paramref:`~.relationship.primaryjoin` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.primaryjoin` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -655,15 +673,16 @@ class RelationshipProperty(StrategizedProperty):
used for self-referential relationships, indicates the column or
list of columns that form the "remote side" of the relationship.
- :paramref:`.relationship.remote_side` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`.relationship.remote_side` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. versionchanged:: 0.8
The :func:`.remote` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "remote".
+ directly to the ``primaryjoin`` expression, which is an
+ alternate, more specific system of describing which columns in a
+ particular ``primaryjoin`` should be considered "remote".
.. seealso::
@@ -671,8 +690,8 @@ class RelationshipProperty(StrategizedProperty):
:paramref:`~.relationship.remote_side`
is used to configure self-referential relationships.
- :func:`.remote` - an annotation function that accomplishes the same
- purpose as :paramref:`~.relationship.remote_side`, typically
+ :func:`.remote` - an annotation function that accomplishes the
+ same purpose as :paramref:`~.relationship.remote_side`, typically
when a custom :paramref:`~.relationship.primaryjoin` condition
is used.
@@ -685,18 +704,19 @@ class RelationshipProperty(StrategizedProperty):
.. seealso::
- :ref:`dynamic_relationship` - Introduction to "dynamic" relationship
- loaders.
+ :ref:`dynamic_relationship` - Introduction to "dynamic"
+ relationship loaders.
:param secondaryjoin:
a SQL expression that will be used as the join of
an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
+ computed based on the foreign key relationships of the association
+ and child tables.
- :paramref:`~.relationship.secondaryjoin` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
+ :paramref:`~.relationship.secondaryjoin` may also be passed as a
+ callable function which is evaluated at mapper initialization time,
+ and may be passed as a Python-evaluable string when using
+ Declarative.
.. seealso::
@@ -709,9 +729,9 @@ class RelationshipProperty(StrategizedProperty):
should be treated either as one-to-one or one-to-many. Its usage
is optional, except for :func:`.relationship` constructs which
are many-to-one or many-to-many and also
- specify the ``delete-orphan`` cascade option. The :func:`.relationship`
- construct itself will raise an error instructing when this option
- is required.
+ specify the ``delete-orphan`` cascade option. The
+ :func:`.relationship` construct itself will raise an error
+ instructing when this option is required.
.. seealso::
@@ -726,33 +746,35 @@ class RelationshipProperty(StrategizedProperty):
of the relationship - one to many forms a list, many to one
forms a scalar, many to many is a list. If a scalar is desired
where normally a list would be present, such as a bi-directional
- one-to-one relationship, set :paramref:`~.relationship.uselist` to False.
+ one-to-one relationship, set :paramref:`~.relationship.uselist` to
+ False.
The :paramref:`~.relationship.uselist` flag is also available on an
- existing :func:`.relationship` construct as a read-only attribute, which
- can be used to determine if this :func:`.relationship` deals with
- collections or scalar attributes::
+ existing :func:`.relationship` construct as a read-only attribute,
+ which can be used to determine if this :func:`.relationship` deals
+ with collections or scalar attributes::
>>> User.addresses.property.uselist
True
.. seealso::
- :ref:`relationships_one_to_one` - Introduction to the "one to one"
- relationship pattern, which is typically when the
+ :ref:`relationships_one_to_one` - Introduction to the "one to
+ one" relationship pattern, which is typically when the
:paramref:`~.relationship.uselist` flag is needed.
:param viewonly=False:
when set to True, the relationship is used only for loading objects,
and not for any persistence operation. A :func:`.relationship`
which specifies :paramref:`~.relationship.viewonly` can work
- with a wider range of SQL operations within the :paramref:`~.relationship.primaryjoin`
- condition, including operations that feature the use of
- a variety of comparison operators as well as SQL functions such
- as :func:`~.sql.expression.cast`. The :paramref:`~.relationship.viewonly`
- flag is also of general use when defining any kind of :func:`~.relationship`
- that doesn't represent the full set of related objects, to prevent
- modifications of the collection from resulting in persistence operations.
+ with a wider range of SQL operations within the
+ :paramref:`~.relationship.primaryjoin` condition, including
+ operations that feature the use of a variety of comparison operators
+ as well as SQL functions such as :func:`~.sql.expression.cast`. The
+ :paramref:`~.relationship.viewonly` flag is also of general use when
+ defining any kind of :func:`~.relationship` that doesn't represent
+ the full set of related objects, to prevent modifications of the
+ collection from resulting in persistence operations.
"""
@@ -784,7 +806,7 @@ class RelationshipProperty(StrategizedProperty):
self.extension = extension
self.load_on_pending = load_on_pending
self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
+ RelationshipProperty.Comparator
self.comparator = self.comparator_factory(self, None)
util.set_creation_order(self)
@@ -799,7 +821,7 @@ class RelationshipProperty(StrategizedProperty):
self._reverse_property = set()
self.cascade = cascade if cascade is not False \
- else "save-update, merge"
+ else "save-update, merge"
self.order_by = order_by
@@ -808,8 +830,8 @@ class RelationshipProperty(StrategizedProperty):
if self.back_populates:
if backref:
raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
self.backref = None
else:
self.backref = backref
@@ -821,14 +843,14 @@ class RelationshipProperty(StrategizedProperty):
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc,
- )
+ )
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.RelationshipProperty` attributes.
- See the documentation for :class:`.PropComparator` for a brief overview
- of ORM level operator definition.
+ See the documentation for :class:`.PropComparator` for a brief
+ overview of ORM level operator definition.
See also:
@@ -846,7 +868,8 @@ class RelationshipProperty(StrategizedProperty):
_of_type = None
- def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
+ def __init__(
+ self, prop, parentmapper, adapt_to_entity=None, of_type=None):
"""Construction of :class:`.RelationshipProperty.Comparator`
is internal to the ORM's attribute mechanics.
@@ -860,7 +883,7 @@ class RelationshipProperty(StrategizedProperty):
def adapt_to_entity(self, adapt_to_entity):
return self.__class__(self.property, self._parentmapper,
adapt_to_entity=adapt_to_entity,
- of_type=self._of_type)
+ of_type=self._of_type)
@util.memoized_property
def mapper(self):
@@ -891,10 +914,10 @@ class RelationshipProperty(StrategizedProperty):
of_type = None
pj, sj, source, dest, \
- secondary, target_adapter = self.property._create_joins(
- source_selectable=adapt_from,
- source_polymorphic=True,
- of_type=of_type)
+ secondary, target_adapter = self.property._create_joins(
+ source_selectable=adapt_from,
+ source_polymorphic=True,
+ of_type=of_type)
if sj is not None:
return pj & sj
else:
@@ -909,10 +932,10 @@ class RelationshipProperty(StrategizedProperty):
"""
return RelationshipProperty.Comparator(
- self.property,
- self._parentmapper,
- adapt_to_entity=self._adapt_to_entity,
- of_type=cls)
+ self.property,
+ self._parentmapper,
+ adapt_to_entity=self._adapt_to_entity,
+ of_type=cls)
def in_(self, other):
"""Produce an IN clause - this is not implemented
@@ -920,8 +943,9 @@ class RelationshipProperty(StrategizedProperty):
"""
raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
+ 'relationships. For a simple '
+ 'many-to-one, use in_() against '
+ 'the set of foreign key values.')
__hash__ = None
@@ -967,21 +991,23 @@ class RelationshipProperty(StrategizedProperty):
return ~self._criterion_exists()
else:
return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
+ None, adapt_source=self.adapter))
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection to an object or collection; "
+ "use contains() to test for membership.")
else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
+ return _orm_annotate(
+ self.property._optimized_compare(
+ other, adapt_source=self.adapter))
def _criterion_exists(self, criterion=None, **kwargs):
if getattr(self, '_of_type', None):
info = inspect(self._of_type)
target_mapper, to_selectable, is_aliased_class = \
info.mapper, info.selectable, info.is_aliased_class
- if self.property._is_self_referential and not is_aliased_class:
+ if self.property._is_self_referential and not \
+ is_aliased_class:
to_selectable = to_selectable.alias()
single_crit = target_mapper._single_table_criterion
@@ -1000,9 +1026,10 @@ class RelationshipProperty(StrategizedProperty):
source_selectable = None
pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
+ self.property._create_joins(
+ dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
for k in kwargs:
crit = getattr(self.property.mapper.class_, k) == kwargs[k]
@@ -1019,7 +1046,8 @@ class RelationshipProperty(StrategizedProperty):
else:
j = _orm_annotate(pj, exclude=self.property.remote_side)
- if criterion is not None and target_adapter and not is_aliased_class:
+ if criterion is not None and target_adapter and not \
+ is_aliased_class:
# limit this adapter to annotated only?
criterion = target_adapter.traverse(criterion)
@@ -1082,9 +1110,9 @@ class RelationshipProperty(StrategizedProperty):
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
return self._criterion_exists(criterion, **kwargs)
@@ -1118,8 +1146,8 @@ class RelationshipProperty(StrategizedProperty):
"""
if self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
+ "'has()' not implemented for collections. "
+ "Use any().")
return self._criterion_exists(criterion, **kwargs)
def contains(self, other, **kwargs):
@@ -1180,10 +1208,10 @@ class RelationshipProperty(StrategizedProperty):
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
+ clause = self.property._optimized_compare(
+ other, adapt_source=self.adapter)
if self.property.secondaryjoin is not None:
clause.negation_clause = \
@@ -1197,8 +1225,10 @@ class RelationshipProperty(StrategizedProperty):
def state_bindparam(x, state, col):
o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda: \
- self.property.mapper._get_committed_attr_by_column(o, col))
+ return sql.bindparam(
+ x, unique=True, callable_=lambda:
+ self.property.mapper.
+ _get_committed_attr_by_column(o, col))
def adapt(col):
if self.adapter:
@@ -1214,12 +1244,12 @@ class RelationshipProperty(StrategizedProperty):
for (x, y) in self.property.local_remote_pairs])
criterion = sql.and_(*[x == y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
+ zip(
+ self.property.mapper.primary_key,
+ self.property.
+ mapper.
+ primary_key_from_instance(other))
+ ])
return ~self._criterion_exists(criterion)
def __ne__(self, other):
@@ -1264,13 +1294,14 @@ class RelationshipProperty(StrategizedProperty):
if isinstance(other, (util.NoneType, expression.Null)):
if self.property.direction == MANYTOONE:
return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
+ self.property._calculated_foreign_keys])
else:
return self._criterion_exists()
elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
+ raise sa_exc.InvalidRequestError(
+ "Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
else:
return self.__negated_contains_or_equals(other)
@@ -1281,50 +1312,53 @@ class RelationshipProperty(StrategizedProperty):
return self.prop
def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
+ value_is_parent=False,
+ alias_secondary=True):
if op == operators.eq:
if value is None:
if self.uselist:
return ~sql.exists([1], self.primaryjoin)
else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
+ return self._optimized_compare(
+ None,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
+ return self._optimized_compare(
+ value,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
else:
return op(self.comparator, value)
def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
+ adapt_source=None,
+ alias_secondary=True):
if value is not None:
value = attributes.instance_state(value)
- return self._lazy_strategy.lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
+ return self._lazy_strategy.lazy_clause(
+ value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
if load:
for r in self._reverse_property:
if (source_state, r) in _recursive:
return
- if not "merge" in self._cascade:
+ if "merge" not in self._cascade:
return
if self.key not in source_dict:
@@ -1332,7 +1366,7 @@ class RelationshipProperty(StrategizedProperty):
if self.uselist:
instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
+ get(source_state, source_dict)
if hasattr(instances, '_sa_adapter'):
# convert collections to adapters to get a true iterator
instances = instances._sa_adapter
@@ -1351,18 +1385,18 @@ class RelationshipProperty(StrategizedProperty):
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
+ load=load, _recursive=_recursive)
if obj is not None:
dest_list.append(obj)
if not load:
coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
+ dest_dict, self.key)
for c in dest_list:
coll.append_without_event(c)
else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
+ dest_state.get_impl(self.key)._set_iterable(
+ dest_state, dest_dict, dest_list)
else:
current = source_dict[self.key]
if current is not None:
@@ -1370,7 +1404,7 @@ class RelationshipProperty(StrategizedProperty):
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
+ load=load, _recursive=_recursive)
else:
obj = None
@@ -1378,10 +1412,10 @@ class RelationshipProperty(StrategizedProperty):
dest_dict[self.key] = obj
else:
dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
+ dest_dict, obj, None)
def _value_as_iterable(self, state, dict_, key,
- passive=attributes.PASSIVE_OFF):
+ passive=attributes.PASSIVE_OFF):
"""Return a list of tuples (state, obj) for the given
key.
@@ -1402,7 +1436,7 @@ class RelationshipProperty(StrategizedProperty):
def cascade_iterator(self, type_, state, dict_,
visited_states, halt_on=None):
- #assert type_ in self._cascade
+ # assert type_ in self._cascade
# only actively lazy load on the 'delete' cascade
if type_ != 'delete' or self.passive_deletes:
@@ -1412,11 +1446,11 @@ class RelationshipProperty(StrategizedProperty):
if type_ == 'save-update':
tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
+ get_all_pending(state, dict_)
else:
tuples = self._value_as_iterable(state, dict_, self.key,
- passive=passive)
+ passive=passive)
skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
not in self._cascade
@@ -1444,12 +1478,12 @@ class RelationshipProperty(StrategizedProperty):
if not instance_mapper.isa(self.mapper.class_manager.mapper):
raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
+ "doesn't handle objects "
+ "of type '%s'" % (
+ self.key,
+ self.parent.class_,
+ c.__class__
+ ))
visited_states.add(instance_state)
@@ -1461,16 +1495,19 @@ class RelationshipProperty(StrategizedProperty):
other._reverse_property.add(self)
if not other.mapper.common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
+ raise sa_exc.ArgumentError(
+ 'reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' %
+ (key, self, other, self.parent))
+
if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
+ == other.direction:
+ raise sa_exc.ArgumentError(
+ '%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?' %
+ (other, self, self.direction))
@util.memoized_property
def mapper(self):
@@ -1481,20 +1518,21 @@ class RelationshipProperty(StrategizedProperty):
"""
if util.callable(self.argument) and \
- not isinstance(self.argument, (type, mapperlib.Mapper)):
+ not isinstance(self.argument, (type, mapperlib.Mapper)):
argument = self.argument()
else:
argument = self.argument
if isinstance(argument, type):
mapper_ = mapperlib.class_mapper(argument,
- configure=False)
+ configure=False)
elif isinstance(self.argument, mapperlib.Mapper):
mapper_ = argument
else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(argument)))
+ raise sa_exc.ArgumentError(
+ "relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(argument)))
return mapper_
@util.memoized_property
@@ -1516,7 +1554,6 @@ class RelationshipProperty(StrategizedProperty):
super(RelationshipProperty, self).do_init()
self._lazy_strategy = self._get_strategy((("lazy", "select"),))
-
def _process_dependent_arguments(self):
"""Convert incoming configuration arguments to their
proper form.
@@ -1530,7 +1567,7 @@ class RelationshipProperty(StrategizedProperty):
for attr in (
'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
+ ):
attr_value = getattr(self, attr)
if util.callable(attr_value):
setattr(self, attr, attr_value())
@@ -1548,44 +1585,43 @@ class RelationshipProperty(StrategizedProperty):
# remote_side are all columns, not strings.
if self.order_by is not False and self.order_by is not None:
self.order_by = [
- expression._only_column_elements(x, "order_by")
- for x in
- util.to_list(self.order_by)]
+ expression._only_column_elements(x, "order_by")
+ for x in
+ util.to_list(self.order_by)]
self._user_defined_foreign_keys = \
util.column_set(
- expression._only_column_elements(x, "foreign_keys")
- for x in util.to_column_set(
- self._user_defined_foreign_keys
- ))
+ expression._only_column_elements(x, "foreign_keys")
+ for x in util.to_column_set(
+ self._user_defined_foreign_keys
+ ))
self.remote_side = \
util.column_set(
- expression._only_column_elements(x, "remote_side")
- for x in
- util.to_column_set(self.remote_side))
+ expression._only_column_elements(x, "remote_side")
+ for x in
+ util.to_column_set(self.remote_side))
self.target = self.mapper.mapped_table
-
def _setup_join_conditions(self):
self._join_condition = jc = JoinCondition(
- parent_selectable=self.parent.mapped_table,
- child_selectable=self.mapper.mapped_table,
- parent_local_selectable=self.parent.local_table,
- child_local_selectable=self.mapper.local_table,
- primaryjoin=self.primaryjoin,
- secondary=self.secondary,
- secondaryjoin=self.secondaryjoin,
- parent_equivalents=self.parent._equivalent_columns,
- child_equivalents=self.mapper._equivalent_columns,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- local_remote_pairs=self.local_remote_pairs,
- remote_side=self.remote_side,
- self_referential=self._is_self_referential,
- prop=self,
- support_sync=not self.viewonly,
- can_be_synced_fn=self._columns_are_mapped
+ parent_selectable=self.parent.mapped_table,
+ child_selectable=self.mapper.mapped_table,
+ parent_local_selectable=self.parent.local_table,
+ child_local_selectable=self.mapper.local_table,
+ primaryjoin=self.primaryjoin,
+ secondary=self.secondary,
+ secondaryjoin=self.secondaryjoin,
+ parent_equivalents=self.parent._equivalent_columns,
+ child_equivalents=self.mapper._equivalent_columns,
+ consider_as_foreign_keys=self._user_defined_foreign_keys,
+ local_remote_pairs=self.local_remote_pairs,
+ remote_side=self.remote_side,
+ self_referential=self._is_self_referential,
+ prop=self,
+ support_sync=not self.viewonly,
+ can_be_synced_fn=self._columns_are_mapped
)
self.primaryjoin = jc.deannotated_primaryjoin
self.secondaryjoin = jc.deannotated_secondaryjoin
@@ -1601,17 +1637,17 @@ class RelationshipProperty(StrategizedProperty):
"""Test that this relationship is legal, warn about
inheritance conflicts."""
- if not self.is_primary() \
- and not mapperlib.class_mapper(
- self.parent.class_,
- configure=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
+ if not self.is_primary() and not mapperlib.class_mapper(
+ self.parent.class_,
+ configure=False).has_property(self.key):
+ raise sa_exc.ArgumentError(
+ "Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " %
+ (self.key, self.parent.class_.__name__,
+ self.parent.class_.__name__))
# check for conflicting relationship() on superclass
if not self.parent.concrete:
@@ -1646,28 +1682,28 @@ class RelationshipProperty(StrategizedProperty):
and (self.direction is MANYTOMANY or self.direction
is MANYTOONE):
raise sa_exc.ArgumentError(
- 'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
+ 'On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
if self.direction is MANYTOONE and self.passive_deletes:
util.warn("On %s, 'passive_deletes' is normally configured "
"on one-to-many, one-to-one, many-to-many "
"relationships only."
- % self)
+ % self)
if self.passive_deletes == 'all' and \
- ("delete" in cascade or
- "delete-orphan" in cascade):
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
raise sa_exc.ArgumentError(
- "On %s, can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade" % self)
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
if cascade.delete_orphan:
self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
+ (self.key, self.parent.class_)
+ )
def _columns_are_mapped(self, *cols):
"""Return True if all columns in the given collection are
@@ -1697,13 +1733,14 @@ class RelationshipProperty(StrategizedProperty):
mapper = self.mapper.primary_mapper()
check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
+ union(mapper.self_and_descendants)
for m in check:
if m.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, m))
+ raise sa_exc.ArgumentError(
+ "Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" %
+ (backref_key, self, m))
# determine primaryjoin/secondaryjoin for the
# backref. Use the one we had, so that
@@ -1713,13 +1750,16 @@ class RelationshipProperty(StrategizedProperty):
# for many to many, just switch primaryjoin/
# secondaryjoin. use the annotated
# pj/sj on the _join_condition.
- pj = kwargs.pop('primaryjoin',
- self._join_condition.secondaryjoin_minus_local)
- sj = kwargs.pop('secondaryjoin',
- self._join_condition.primaryjoin_minus_local)
+ pj = kwargs.pop(
+ 'primaryjoin',
+ self._join_condition.secondaryjoin_minus_local)
+ sj = kwargs.pop(
+ 'secondaryjoin',
+ self._join_condition.primaryjoin_minus_local)
else:
- pj = kwargs.pop('primaryjoin',
- self._join_condition.primaryjoin_reverse_remote)
+ pj = kwargs.pop(
+ 'primaryjoin',
+ self._join_condition.primaryjoin_reverse_remote)
sj = kwargs.pop('secondaryjoin', None)
if sj:
raise sa_exc.InvalidRequestError(
@@ -1728,7 +1768,7 @@ class RelationshipProperty(StrategizedProperty):
)
foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
+ self._user_defined_foreign_keys)
parent = self.parent.primary_mapper()
kwargs.setdefault('viewonly', self.viewonly)
kwargs.setdefault('post_update', self.post_update)
@@ -1765,8 +1805,8 @@ class RelationshipProperty(StrategizedProperty):
return self.mapper.common_parent(self.parent)
def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
if source_selectable is None:
if source_polymorphic and self.parent.with_polymorphic:
source_selectable = self.parent._with_polymorphic_selectable
@@ -1799,7 +1839,8 @@ class RelationshipProperty(StrategizedProperty):
if dest_selectable is None:
dest_selectable = self.mapper.local_table
return (primaryjoin, secondaryjoin, source_selectable,
- dest_selectable, secondary, target_adapter)
+ dest_selectable, secondary, target_adapter)
+
def _annotate_columns(element, annotations):
def clone(elem):
@@ -1815,23 +1856,23 @@ def _annotate_columns(element, annotations):
class JoinCondition(object):
def __init__(self,
- parent_selectable,
- child_selectable,
- parent_local_selectable,
- child_local_selectable,
- primaryjoin=None,
- secondary=None,
- secondaryjoin=None,
- parent_equivalents=None,
- child_equivalents=None,
- consider_as_foreign_keys=None,
- local_remote_pairs=None,
- remote_side=None,
- self_referential=False,
- prop=None,
- support_sync=True,
- can_be_synced_fn=lambda *c: True
- ):
+ parent_selectable,
+ child_selectable,
+ parent_local_selectable,
+ child_local_selectable,
+ primaryjoin=None,
+ secondary=None,
+ secondaryjoin=None,
+ parent_equivalents=None,
+ child_equivalents=None,
+ consider_as_foreign_keys=None,
+ local_remote_pairs=None,
+ remote_side=None,
+ self_referential=False,
+ prop=None,
+ support_sync=True,
+ can_be_synced_fn=lambda *c: True
+ ):
self.parent_selectable = parent_selectable
self.parent_local_selectable = parent_local_selectable
self.child_selectable = child_selectable
@@ -1865,26 +1906,26 @@ class JoinCondition(object):
return
log = self.prop.logger
log.info('%s setup primary join %s', self.prop,
- self.primaryjoin)
+ self.primaryjoin)
log.info('%s setup secondary join %s', self.prop,
- self.secondaryjoin)
+ self.secondaryjoin)
log.info('%s synchronize pairs [%s]', self.prop,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.synchronize_pairs))
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.synchronize_pairs))
log.info('%s secondary synchronize pairs [%s]', self.prop,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.secondary_synchronize_pairs or []))
+ ','.join('(%s => %s)' % (l, r) for (l, r) in
+ self.secondary_synchronize_pairs or []))
log.info('%s local/remote pairs [%s]', self.prop,
- ','.join('(%s / %s)' % (l, r) for (l, r) in
- self.local_remote_pairs))
+ ','.join('(%s / %s)' % (l, r) for (l, r) in
+ self.local_remote_pairs))
log.info('%s remote columns [%s]', self.prop,
- ','.join('%s' % col for col in self.remote_columns)
- )
+ ','.join('%s' % col for col in self.remote_columns)
+ )
log.info('%s local columns [%s]', self.prop,
- ','.join('%s' % col for col in self.local_columns)
- )
+ ','.join('%s' % col for col in self.local_columns)
+ )
log.info('%s relationship direction %s', self.prop,
- self.direction)
+ self.direction)
def _determine_joins(self):
"""Determine the 'primaryjoin' and 'secondaryjoin' attributes,
@@ -1896,9 +1937,9 @@ class JoinCondition(object):
"""
if self.secondaryjoin is not None and self.secondary is None:
raise sa_exc.ArgumentError(
- "Property %s specified with secondary "
- "join condition but "
- "no secondary argument" % self.prop)
+ "Property %s specified with secondary "
+ "join condition but "
+ "no secondary argument" % self.prop)
# find a join between the given mapper's mapped table and
# the given table. will try the mapper's local table first
@@ -1935,47 +1976,47 @@ class JoinCondition(object):
)
except sa_exc.NoForeignKeysError:
if self.secondary is not None:
- raise sa_exc.NoForeignKeysError("Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are no foreign keys "
- "linking these tables via secondary table '%s'. "
- "Ensure that referencing columns are associated "
- "with a ForeignKey or ForeignKeyConstraint, or "
- "specify 'primaryjoin' and 'secondaryjoin' "
- "expressions."
- % (self.prop, self.secondary))
+ raise sa_exc.NoForeignKeysError(
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are no foreign keys "
+ "linking these tables via secondary table '%s'. "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey or ForeignKeyConstraint, or "
+ "specify 'primaryjoin' and 'secondaryjoin' "
+ "expressions." % (self.prop, self.secondary))
else:
- raise sa_exc.NoForeignKeysError("Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are no foreign keys "
- "linking these tables. "
- "Ensure that referencing columns are associated "
- "with a ForeignKey or ForeignKeyConstraint, or "
- "specify a 'primaryjoin' expression."
- % self.prop)
+ raise sa_exc.NoForeignKeysError(
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are no foreign keys "
+ "linking these tables. "
+ "Ensure that referencing columns are associated "
+ "with a ForeignKey or ForeignKeyConstraint, or "
+ "specify a 'primaryjoin' expression." % self.prop)
except sa_exc.AmbiguousForeignKeysError:
if self.secondary is not None:
raise sa_exc.AmbiguousForeignKeysError(
- "Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are multiple foreign key "
- "paths linking the tables via secondary table '%s'. "
- "Specify the 'foreign_keys' "
- "argument, providing a list of those columns which "
- "should be counted as containing a foreign key "
- "reference from the secondary table to each of the "
- "parent and child tables."
- % (self.prop, self.secondary))
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are multiple foreign key "
+ "paths linking the tables via secondary table '%s'. "
+ "Specify the 'foreign_keys' "
+ "argument, providing a list of those columns which "
+ "should be counted as containing a foreign key "
+ "reference from the secondary table to each of the "
+ "parent and child tables."
+ % (self.prop, self.secondary))
else:
raise sa_exc.AmbiguousForeignKeysError(
- "Could not determine join "
- "condition between parent/child tables on "
- "relationship %s - there are multiple foreign key "
- "paths linking the tables. Specify the "
- "'foreign_keys' argument, providing a list of those "
- "columns which should be counted as containing a "
- "foreign key reference to the parent table."
- % self.prop)
+ "Could not determine join "
+ "condition between parent/child tables on "
+ "relationship %s - there are multiple foreign key "
+ "paths linking the tables. Specify the "
+ "'foreign_keys' argument, providing a list of those "
+ "columns which should be counted as containing a "
+ "foreign key reference to the parent table."
+ % self.prop)
@property
def primaryjoin_minus_local(self):
@@ -1983,7 +2024,8 @@ class JoinCondition(object):
@property
def secondaryjoin_minus_local(self):
- return _deep_deannotate(self.secondaryjoin, values=("local", "remote"))
+ return _deep_deannotate(self.secondaryjoin,
+ values=("local", "remote"))
@util.memoized_property
def primaryjoin_reverse_remote(self):
@@ -2009,12 +2051,12 @@ class JoinCondition(object):
v['remote'] = True
return element._with_annotations(v)
return visitors.replacement_traverse(
- self.primaryjoin, {}, replace)
+ self.primaryjoin, {}, replace)
else:
if self._has_foreign_annotations:
# TODO: coverage
return _deep_deannotate(self.primaryjoin,
- values=("local", "remote"))
+ values=("local", "remote"))
else:
return _deep_deannotate(self.primaryjoin)
@@ -2071,7 +2113,7 @@ class JoinCondition(object):
def is_foreign(a, b):
if isinstance(a, schema.Column) and \
- isinstance(b, schema.Column):
+ isinstance(b, schema.Column):
if a.references(b):
return a
elif b.references(a):
@@ -2085,7 +2127,7 @@ class JoinCondition(object):
def visit_binary(binary):
if not isinstance(binary.left, sql.ColumnElement) or \
- not isinstance(binary.right, sql.ColumnElement):
+ not isinstance(binary.right, sql.ColumnElement):
return
if "foreign" not in binary.left._annotations and \
@@ -2094,10 +2136,10 @@ class JoinCondition(object):
if col is not None:
if col.compare(binary.left):
binary.left = binary.left._annotate(
- {"foreign": True})
+ {"foreign": True})
elif col.compare(binary.right):
binary.right = binary.right._annotate(
- {"foreign": True})
+ {"foreign": True})
self.primaryjoin = visitors.cloned_traverse(
self.primaryjoin,
@@ -2123,25 +2165,26 @@ class JoinCondition(object):
def visit_binary(binary):
c, f = binary.left, binary.right
if (
- isinstance(c, expression.ColumnClause) and \
- isinstance(f, expression.ColumnClause) and \
- pt.is_derived_from(c.table) and \
- pt.is_derived_from(f.table) and \
- mt.is_derived_from(c.table) and \
+ isinstance(c, expression.ColumnClause) and
+ isinstance(f, expression.ColumnClause) and
+ pt.is_derived_from(c.table) and
+ pt.is_derived_from(f.table) and
+ mt.is_derived_from(c.table) and
mt.is_derived_from(f.table)
):
result[0] = True
visitors.traverse(
- self.primaryjoin,
- {},
- {"binary": visit_binary}
- )
+ self.primaryjoin,
+ {},
+ {"binary": visit_binary}
+ )
return result[0]
def _tables_overlap(self):
"""Return True if parent/child tables have some overlap."""
- return selectables_overlap(self.parent_selectable, self.child_selectable)
+ return selectables_overlap(
+ self.parent_selectable, self.child_selectable)
def _annotate_remote(self):
"""Annotate the primaryjoin and secondaryjoin
@@ -2172,9 +2215,9 @@ class JoinCondition(object):
if self.secondary.c.contains_column(element):
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
self.secondaryjoin = visitors.replacement_traverse(
- self.secondaryjoin, {}, repl)
+ self.secondaryjoin, {}, repl)
def _annotate_selfref(self, fn):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2190,13 +2233,13 @@ class JoinCondition(object):
binary.left = binary.left._annotate({"remote": True})
if fn(binary.right) and not equated:
binary.right = binary.right._annotate(
- {"remote": True})
+ {"remote": True})
else:
self._warn_non_column_elements()
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
- {"binary": visit_binary})
+ self.primaryjoin, {},
+ {"binary": visit_binary})
def _annotate_remote_from_args(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2207,9 +2250,9 @@ class JoinCondition(object):
if self._local_remote_pairs:
if self._remote_side:
raise sa_exc.ArgumentError(
- "remote_side argument is redundant "
- "against more detailed _local_remote_side "
- "argument.")
+ "remote_side argument is redundant "
+ "against more detailed _local_remote_side "
+ "argument.")
remote_side = [r for (l, r) in self._local_remote_pairs]
else:
@@ -2222,7 +2265,7 @@ class JoinCondition(object):
if element in remote_side:
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
def _annotate_remote_with_overlap(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2233,9 +2276,9 @@ class JoinCondition(object):
"""
def visit_binary(binary):
binary.left, binary.right = proc_left_right(binary.left,
- binary.right)
+ binary.right)
binary.right, binary.left = proc_left_right(binary.right,
- binary.left)
+ binary.left)
def proc_left_right(left, right):
if isinstance(left, expression.ColumnClause) and \
@@ -2249,8 +2292,8 @@ class JoinCondition(object):
return left, right
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
- {"binary": visit_binary})
+ self.primaryjoin, {},
+ {"binary": visit_binary})
def _annotate_remote_distinct_selectables(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
@@ -2260,14 +2303,13 @@ class JoinCondition(object):
"""
def repl(element):
if self.child_selectable.c.contains_column(element) and \
- (
- not self.parent_local_selectable.c.\
- contains_column(element)
- or self.child_local_selectable.c.\
- contains_column(element)):
+ (not self.parent_local_selectable.c.
+ contains_column(element) or
+ self.child_local_selectable.c.
+ contains_column(element)):
return element._annotate({"remote": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, repl)
+ self.primaryjoin, {}, repl)
def _warn_non_column_elements(self):
util.warn(
@@ -2293,7 +2335,7 @@ class JoinCondition(object):
if self._local_remote_pairs:
local_side = util.column_set([l for (l, r)
- in self._local_remote_pairs])
+ in self._local_remote_pairs])
else:
local_side = util.column_set(self.parent_selectable.c)
@@ -2302,20 +2344,20 @@ class JoinCondition(object):
elem in local_side:
return elem._annotate({"local": True})
self.primaryjoin = visitors.replacement_traverse(
- self.primaryjoin, {}, locals_
- )
+ self.primaryjoin, {}, locals_
+ )
def _check_remote_side(self):
if not self.local_remote_pairs:
- raise sa_exc.ArgumentError('Relationship %s could '
- 'not determine any unambiguous local/remote column '
- 'pairs based on join condition and remote_side '
- 'arguments. '
- 'Consider using the remote() annotation to '
- 'accurately mark those elements of the join '
- 'condition that are on the remote side of '
- 'the relationship.'
- % (self.prop, ))
+ raise sa_exc.ArgumentError(
+ 'Relationship %s could '
+ 'not determine any unambiguous local/remote column '
+ 'pairs based on join condition and remote_side '
+ 'arguments. '
+ 'Consider using the remote() annotation to '
+ 'accurately mark those elements of the join '
+ 'condition that are on the remote side of '
+ 'the relationship.' % (self.prop, ))
def _check_foreign_cols(self, join_condition, primary):
"""Check the foreign key columns collected and emit error
@@ -2324,7 +2366,7 @@ class JoinCondition(object):
can_sync = False
foreign_cols = self._gather_columns_with_annotation(
- join_condition, "foreign")
+ join_condition, "foreign")
has_foreign = bool(foreign_cols)
@@ -2342,13 +2384,13 @@ class JoinCondition(object):
# (not just ==), perhaps they need to turn on "viewonly=True".
if self.support_sync and has_foreign and not can_sync:
err = "Could not locate any simple equality expressions "\
- "involving locally mapped foreign key columns for "\
- "%s join condition "\
- "'%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
- self.prop
- )
+ "involving locally mapped foreign key columns for "\
+ "%s join condition "\
+ "'%s' on relationship %s." % (
+ primary and 'primary' or 'secondary',
+ join_condition,
+ self.prop
+ )
err += \
" Ensure that referencing columns are associated "\
"with a ForeignKey or ForeignKeyConstraint, or are "\
@@ -2359,11 +2401,11 @@ class JoinCondition(object):
raise sa_exc.ArgumentError(err)
else:
err = "Could not locate any relevant foreign key columns "\
- "for %s join condition '%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
- self.prop
- )
+ "for %s join condition '%s' on relationship %s." % (
+ primary and 'primary' or 'secondary',
+ join_condition,
+ self.prop
+ )
err += \
' Ensure that referencing columns are associated '\
'with a ForeignKey or ForeignKeyConstraint, or are '\
@@ -2384,12 +2426,12 @@ class JoinCondition(object):
# fk collection which suggests ONETOMANY.
onetomany_fk = targetcols.intersection(
- self.foreign_key_columns)
+ self.foreign_key_columns)
# fk collection which suggests MANYTOONE.
manytoone_fk = parentcols.intersection(
- self.foreign_key_columns)
+ self.foreign_key_columns)
if onetomany_fk and manytoone_fk:
# fks on both sides. test for overlap of local/remote
@@ -2401,23 +2443,23 @@ class JoinCondition(object):
# 1. columns that are both remote and FK suggest
# onetomany.
onetomany_local = self._gather_columns_with_annotation(
- self.primaryjoin, "remote", "foreign")
+ self.primaryjoin, "remote", "foreign")
# 2. columns that are FK but are not remote (e.g. local)
# suggest manytoone.
manytoone_local = set([c for c in
- self._gather_columns_with_annotation(
- self.primaryjoin,
- "foreign")
- if "remote" not in c._annotations])
+ self._gather_columns_with_annotation(
+ self.primaryjoin,
+ "foreign")
+ if "remote" not in c._annotations])
# 3. if both collections are present, remove columns that
# refer to themselves. This is for the case of
# and_(Me.id == Me.remote_id, Me.version == Me.version)
if onetomany_local and manytoone_local:
self_equated = self.remote_columns.intersection(
- self.local_columns
- )
+ self.local_columns
+ )
onetomany_local = onetomany_local.difference(self_equated)
manytoone_local = manytoone_local.difference(self_equated)
@@ -2444,10 +2486,11 @@ class JoinCondition(object):
elif manytoone_fk:
self.direction = MANYTOONE
else:
- raise sa_exc.ArgumentError("Can't determine relationship "
- "direction for relationship '%s' - foreign "
- "key columns are present in neither the parent "
- "nor the child's mapped tables" % self.prop)
+ raise sa_exc.ArgumentError(
+ "Can't determine relationship "
+ "direction for relationship '%s' - foreign "
+ "key columns are present in neither the parent "
+ "nor the child's mapped tables" % self.prop)
def _deannotate_pairs(self, collection):
"""provide deannotation for the various lists of
@@ -2457,7 +2500,7 @@ class JoinCondition(object):
"""
return [(x._deannotate(), y._deannotate())
- for x, y in collection]
+ for x, y in collection]
def _setup_pairs(self):
sync_pairs = []
@@ -2521,12 +2564,12 @@ class JoinCondition(object):
def _gather_join_annotations(self, annotation):
s = set(
self._gather_columns_with_annotation(
- self.primaryjoin, annotation)
+ self.primaryjoin, annotation)
)
if self.secondaryjoin is not None:
s.update(
self._gather_columns_with_annotation(
- self.secondaryjoin, annotation)
+ self.secondaryjoin, annotation)
)
return set([x._deannotate() for x in s])
@@ -2538,9 +2581,9 @@ class JoinCondition(object):
])
def join_targets(self, source_selectable,
- dest_selectable,
- aliased,
- single_crit=None):
+ dest_selectable,
+ aliased,
+ single_crit=None):
"""Given a source and destination selectable, create a
join between them.
@@ -2556,8 +2599,8 @@ class JoinCondition(object):
# its internal structure remains fixed
# regardless of context.
dest_selectable = _shallow_annotate(
- dest_selectable,
- {'no_replacement_traverse': True})
+ dest_selectable,
+ {'no_replacement_traverse': True})
primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
self.secondaryjoin, self.secondary
@@ -2579,24 +2622,26 @@ class JoinCondition(object):
primary_aliasizer = ClauseAdapter(secondary)
secondary_aliasizer = \
ClauseAdapter(dest_selectable,
- equivalents=self.child_equivalents).\
- chain(primary_aliasizer)
+ equivalents=self.child_equivalents).\
+ chain(primary_aliasizer)
if source_selectable is not None:
primary_aliasizer = \
ClauseAdapter(secondary).\
- chain(ClauseAdapter(source_selectable,
+ chain(ClauseAdapter(
+ source_selectable,
equivalents=self.parent_equivalents))
secondaryjoin = \
secondary_aliasizer.traverse(secondaryjoin)
else:
- primary_aliasizer = ClauseAdapter(dest_selectable,
- exclude_fn=_ColInAnnotations("local"),
- equivalents=self.child_equivalents)
+ primary_aliasizer = ClauseAdapter(
+ dest_selectable,
+ exclude_fn=_ColInAnnotations("local"),
+ equivalents=self.child_equivalents)
if source_selectable is not None:
primary_aliasizer.chain(
ClauseAdapter(source_selectable,
- exclude_fn=_ColInAnnotations("remote"),
- equivalents=self.parent_equivalents))
+ exclude_fn=_ColInAnnotations("remote"),
+ equivalents=self.parent_equivalents))
secondary_aliasizer = None
primaryjoin = primary_aliasizer.traverse(primaryjoin)
@@ -2605,7 +2650,7 @@ class JoinCondition(object):
else:
target_adapter = None
return primaryjoin, secondaryjoin, secondary, \
- target_adapter, dest_selectable
+ target_adapter, dest_selectable
def create_lazy_clause(self, reverse_direction=False):
binds = util.column_dict()
@@ -2625,7 +2670,7 @@ class JoinCondition(object):
def col_to_bind(col):
if (reverse_direction and col in lookup) or \
- (not reverse_direction and "local" in col._annotations):
+ (not reverse_direction and "local" in col._annotations):
if col in lookup:
for tobind, equated in lookup[col]:
if equated in binds:
@@ -2639,13 +2684,13 @@ class JoinCondition(object):
lazywhere = self.primaryjoin
if self.secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
- lazywhere, {}, col_to_bind)
+ lazywhere, {}, col_to_bind)
if self.secondaryjoin is not None:
secondaryjoin = self.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.replacement_traverse(
- secondaryjoin, {}, col_to_bind)
+ secondaryjoin, {}, col_to_bind)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = dict((binds[col].key, col) for col in binds)
@@ -2655,11 +2700,13 @@ class JoinCondition(object):
return lazywhere, bind_to_col, equated_columns
+
class _ColInAnnotations(object):
"""Seralizable equivalent to:
lambda c: "name" in c._annotations
"""
+
def __init__(self, name):
self.name = name
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index e3be9ddae..71648d126 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -59,8 +59,8 @@ class scoped_session(object):
if scope is not None:
if self.registry.has():
raise sa_exc.InvalidRequestError(
- "Scoped session is already present; "
- "no new arguments may be specified.")
+ "Scoped session is already present; "
+ "no new arguments may be specified.")
else:
sess = self.session_factory(**kw)
self.registry.set(sess)
@@ -97,8 +97,8 @@ class scoped_session(object):
if self.registry.has():
warn('At least one scoped session is already present. '
- ' configure() can not affect sessions that have '
- 'already been created.')
+ ' configure() can not affect sessions that have '
+ 'already been created.')
self.session_factory.configure(**kwargs)
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 9ce988a12..036045dba 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -7,30 +7,31 @@
"""Provides the Session class and related utilities."""
-
import weakref
from .. import util, sql, engine, exc as sa_exc
from ..sql import util as sql_util, expression
from . import (
SessionExtension, attributes, exc, query,
loading, identity
- )
+)
from ..inspection import inspect
from .base import (
object_mapper, class_mapper,
_class_to_mapper, _state_mapper, object_state,
_none_set, state_str, instance_str
- )
+)
from .unitofwork import UOWTransaction
from . import state as statelib
import sys
-__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
+__all__ = ['Session', 'SessionTransaction',
+ 'SessionExtension', 'sessionmaker']
_sessions = weakref.WeakValueDictionary()
"""Weak-referencing dictionary of :class:`.Session` objects.
"""
+
def _state_session(state):
"""Given an :class:`.InstanceState`, return the :class:`.Session`
associated, if any.
@@ -43,7 +44,6 @@ def _state_session(state):
return None
-
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@@ -81,6 +81,7 @@ COMMITTED = util.symbol('COMMITTED')
DEACTIVE = util.symbol('DEACTIVE')
CLOSED = util.symbol('CLOSED')
+
class SessionTransaction(object):
"""A :class:`.Session`-level transaction.
@@ -185,20 +186,20 @@ class SessionTransaction(object):
return self.session is not None and self._state is ACTIVE
def _assert_active(self, prepared_ok=False,
- rollback_ok=False,
- deactive_ok=False,
- closed_msg="This transaction is closed"):
+ rollback_ok=False,
+ deactive_ok=False,
+ closed_msg="This transaction is closed"):
if self._state is COMMITTED:
raise sa_exc.InvalidRequestError(
- "This session is in 'committed' state; no further "
- "SQL can be emitted within this transaction."
- )
+ "This session is in 'committed' state; no further "
+ "SQL can be emitted within this transaction."
+ )
elif self._state is PREPARED:
if not prepared_ok:
raise sa_exc.InvalidRequestError(
- "This session is in 'prepared' state; no further "
- "SQL can be emitted within this transaction."
- )
+ "This session is in 'prepared' state; no further "
+ "SQL can be emitted within this transaction."
+ )
elif self._state is DEACTIVE:
if not deactive_ok and not rollback_ok:
if self._rollback_exception:
@@ -215,7 +216,7 @@ class SessionTransaction(object):
"This Session's transaction has been rolled back "
"by a nested rollback() call. To begin a new "
"transaction, issue Session.rollback() first."
- )
+ )
elif self._state is CLOSED:
raise sa_exc.ResourceClosedError(closed_msg)
@@ -240,7 +241,7 @@ class SessionTransaction(object):
if self._parent is None:
raise sa_exc.InvalidRequestError(
"Transaction %s is not on the active transaction list" % (
- upto))
+ upto))
return (self,) + self._parent._iterate_parents(upto)
def _take_snapshot(self):
@@ -274,7 +275,7 @@ class SessionTransaction(object):
for s in set(self._deleted).union(self.session._deleted):
if s.deleted:
- #assert s in self._deleted
+ # assert s in self._deleted
del s.deleted
self.session._update_impl(s, discard_existing=True)
@@ -326,7 +327,7 @@ class SessionTransaction(object):
transaction = conn.begin()
self._connections[conn] = self._connections[conn.engine] = \
- (conn, transaction, conn is not bind)
+ (conn, transaction, conn is not bind)
self.session.dispatch.after_begin(self.session, self, conn)
return conn
@@ -354,9 +355,9 @@ class SessionTransaction(object):
self.session.flush()
else:
raise exc.FlushError(
- "Over 100 subsequent flushes have occurred within "
- "session.commit() - is an after_flush() hook "
- "creating new objects?")
+ "Over 100 subsequent flushes have occurred within "
+ "session.commit() - is an after_flush() hook "
+ "creating new objects?")
if self._parent is None and self.session.twophase:
try:
@@ -410,9 +411,9 @@ class SessionTransaction(object):
# if items were added, deleted, or mutated
# here, we need to re-restore the snapshot
util.warn(
- "Session's state has been changed on "
- "a non-active transaction - this state "
- "will be discarded.")
+ "Session's state has been changed on "
+ "a non-active transaction - this state "
+ "will be discarded.")
self._restore_snapshot(dirty_only=self.nested)
self.close()
@@ -486,7 +487,7 @@ class Session(_SessionClassMethods):
'scalar')
def __init__(self, bind=None, autoflush=True, expire_on_commit=True,
- _enable_transaction_accounting=True,
+ _enable_transaction_accounting=True,
autocommit=False, twophase=False,
weak_identity_map=True, binds=None, extension=None,
info=None,
@@ -501,16 +502,16 @@ class Session(_SessionClassMethods):
.. warning::
- The autocommit flag is **not for general use**, and if it is used,
- queries should only be invoked within the span of a
- :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing
+ The autocommit flag is **not for general use**, and if it is
+ used, queries should only be invoked within the span of a
+ :meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing
queries outside of a demarcated transaction is a legacy mode
of usage, and can in some cases lead to concurrent connection
checkouts.
Defaults to ``False``. When ``True``, the
- :class:`.Session` does not keep a persistent transaction running, and
- will acquire connections from the engine on an as-needed basis,
+ :class:`.Session` does not keep a persistent transaction running,
+ and will acquire connections from the engine on an as-needed basis,
returning them immediately after their use. Flushes will begin and
commit (or possibly rollback) their own transaction if no
transaction is present. When using this mode, the
@@ -525,8 +526,8 @@ class Session(_SessionClassMethods):
:meth:`~.Session.flush` call to this ``Session`` before proceeding.
This is a convenience feature so that :meth:`~.Session.flush` need
not be called repeatedly in order for database queries to retrieve
- results. It's typical that ``autoflush`` is used in conjunction with
- ``autocommit=False``. In this scenario, explicit calls to
+ results. It's typical that ``autoflush`` is used in conjunction
+ with ``autocommit=False``. In this scenario, explicit calls to
:meth:`~.Session.flush` are rarely needed; you usually only need to
call :meth:`~.Session.commit` (which flushes) to finalize changes.
@@ -542,8 +543,8 @@ class Session(_SessionClassMethods):
:class:`.Engine` or :class:`.Connection` objects. Operations which
proceed relative to a particular :class:`.Mapper` will consult this
dictionary for the direct :class:`.Mapper` instance as
- well as the mapper's ``mapped_table`` attribute in order to locate a
- connectable to use. The full resolution is described in the
+ well as the mapper's ``mapped_table`` attribute in order to locate
+ a connectable to use. The full resolution is described in the
:meth:`.Session.get_bind`.
Usage looks like::
@@ -566,8 +567,8 @@ class Session(_SessionClassMethods):
legacy-only flag which when ``False`` disables *all* 0.5-style
object accounting on transaction boundaries, including auto-expiry
of instances on rollback and commit, maintenance of the "new" and
- "deleted" lists upon rollback, and autoflush of pending changes upon
- :meth:`~.Session.begin`, all of which are interdependent.
+ "deleted" lists upon rollback, and autoflush of pending changes
+ upon :meth:`~.Session.begin`, all of which are interdependent.
:param expire_on_commit: Defaults to ``True``. When ``True``, all
instances will be fully expired after each :meth:`~.commit`,
@@ -581,25 +582,26 @@ class Session(_SessionClassMethods):
Please see :class:`.SessionEvents`.
:param info: optional dictionary of arbitrary data to be associated
- with this :class:`.Session`. Is available via the :attr:`.Session.info`
- attribute. Note the dictionary is copied at construction time so
- that modifications to the per-:class:`.Session` dictionary will be local
- to that :class:`.Session`.
+ with this :class:`.Session`. Is available via the
+ :attr:`.Session.info` attribute. Note the dictionary is copied at
+ construction time so that modifications to the per-
+ :class:`.Session` dictionary will be local to that
+ :class:`.Session`.
.. versionadded:: 0.9.0
:param query_cls: Class which should be used to create new Query
- objects, as returned by the :meth:`~.Session.query` method. Defaults
- to :class:`.Query`.
+ objects, as returned by the :meth:`~.Session.query` method.
+ Defaults to :class:`.Query`.
:param twophase: When ``True``, all transactions will be started as
a "two phase" transaction, i.e. using the "two phase" semantics
of the database in use along with an XID. During a
:meth:`~.commit`, after :meth:`~.flush` has been issued for all
- attached databases, the :meth:`~.TwoPhaseTransaction.prepare` method
- on each database's :class:`.TwoPhaseTransaction` will be called.
- This allows each database to roll back the entire transaction,
- before each transaction is committed.
+ attached databases, the :meth:`~.TwoPhaseTransaction.prepare`
+ method on each database's :class:`.TwoPhaseTransaction` will be
+ called. This allows each database to roll back the entire
+ transaction, before each transaction is committed.
:param weak_identity_map: Defaults to ``True`` - when set to
``False``, objects placed in the :class:`.Session` will be
@@ -613,7 +615,7 @@ class Session(_SessionClassMethods):
self._identity_cls = identity.WeakInstanceDict
else:
util.warn_deprecated("weak_identity_map=False is deprecated. "
- "This feature is not needed.")
+ "This feature is not needed.")
self._identity_cls = identity.StrongInstanceDict
self.identity_map = self._identity_cls()
@@ -648,7 +650,6 @@ class Session(_SessionClassMethods):
else:
assert False
-
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
@@ -693,7 +694,7 @@ class Session(_SessionClassMethods):
if self.transaction is not None:
if subtransactions or nested:
self.transaction = self.transaction._begin(
- nested=nested)
+ nested=nested)
else:
raise sa_exc.InvalidRequestError(
"A transaction is already begun. Use "
@@ -791,9 +792,9 @@ class Session(_SessionClassMethods):
self.transaction.prepare()
def connection(self, mapper=None, clause=None,
- bind=None,
- close_with_result=False,
- **kw):
+ bind=None,
+ close_with_result=False,
+ **kw):
"""Return a :class:`.Connection` object corresponding to this
:class:`.Session` object's transactional state.
@@ -830,12 +831,12 @@ class Session(_SessionClassMethods):
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
- :param close_with_result: Passed to :meth:`.Engine.connect`, indicating
- the :class:`.Connection` should be considered "single use",
- automatically closing when the first result set is closed. This
- flag only has an effect if this :class:`.Session` is configured with
- ``autocommit=True`` and does not already have a transaction
- in progress.
+ :param close_with_result: Passed to :meth:`.Engine.connect`,
+ indicating the :class:`.Connection` should be considered
+ "single use", automatically closing when the first result set is
+ closed. This flag only has an effect if this :class:`.Session` is
+ configured with ``autocommit=True`` and does not already have a
+ transaction in progress.
:param \**kw:
Additional keyword arguments are sent to :meth:`get_bind()`,
@@ -847,7 +848,7 @@ class Session(_SessionClassMethods):
bind = self.get_bind(mapper, clause=clause, **kw)
return self._connection_for_bind(bind,
- close_with_result=close_with_result)
+ close_with_result=close_with_result)
def _connection_for_bind(self, engine, **kwargs):
if self.transaction is not None:
@@ -870,8 +871,8 @@ class Session(_SessionClassMethods):
user_table.select().where(user_table.c.id == 5)
)
- :meth:`~.Session.execute` accepts any executable clause construct, such
- as :func:`~.sql.expression.select`,
+ :meth:`~.Session.execute` accepts any executable clause construct,
+ such as :func:`~.sql.expression.select`,
:func:`~.sql.expression.insert`,
:func:`~.sql.expression.update`,
:func:`~.sql.expression.delete`, and
@@ -900,7 +901,8 @@ class Session(_SessionClassMethods):
cursor's ``execute()`` or ``executemany()`` is used to execute the
statement. An INSERT construct may be invoked for a single row::
- result = session.execute(users.insert(), {"id": 7, "name": "somename"})
+ result = session.execute(
+ users.insert(), {"id": 7, "name": "somename"})
or for multiple rows::
@@ -918,8 +920,9 @@ class Session(_SessionClassMethods):
:class:`.Connection`, which in the average case is derived directly
from the "bind" of the :class:`.Session` itself, and in other cases
can be based on the :func:`.mapper`
- and :class:`.Table` objects passed to the method; see the documentation
- for :meth:`.Session.get_bind` for a full description of this scheme.
+ and :class:`.Table` objects passed to the method; see the
+ documentation for :meth:`.Session.get_bind` for a full description of
+ this scheme.
The :meth:`.Session.execute` method does *not* invoke autoflush.
@@ -981,8 +984,8 @@ class Session(_SessionClassMethods):
if bind is None:
bind = self.get_bind(mapper, clause=clause, **kw)
- return self._connection_for_bind(bind, close_with_result=True).execute(
- clause, params or {})
+ return self._connection_for_bind(
+ bind, close_with_result=True).execute(clause, params or {})
def scalar(self, clause, params=None, mapper=None, bind=None, **kw):
"""Like :meth:`~.Session.execute` but return a scalar result."""
@@ -1150,7 +1153,7 @@ class Session(_SessionClassMethods):
raise sa_exc.UnboundExecutionError(
"Could not locate a bind configured on %s or this Session" % (
- ', '.join(context)))
+ ', '.join(context)))
def query(self, *entities, **kwargs):
"""Return a new :class:`.Query` object corresponding to this
@@ -1196,9 +1199,9 @@ class Session(_SessionClassMethods):
# with code that catches StatementError, IntegrityError,
# etc.
e.add_detail(
- "raised as a result of Query-invoked autoflush; "
- "consider using a session.no_autoflush block if this "
- "flush is occurring prematurely")
+ "raised as a result of Query-invoked autoflush; "
+ "consider using a session.no_autoflush block if this "
+ "flush is occurring prematurely")
util.raise_from_cause(e)
def refresh(self, instance, attribute_names=None, lockmode=None):
@@ -1335,7 +1338,7 @@ class Session(_SessionClassMethods):
# pre-fetch the full cascade since the expire is going to
# remove associations
cascaded = list(state.manager.mapper.cascade_iterator(
- 'refresh-expire', state))
+ 'refresh-expire', state))
self._conditional_expire(state)
for o, m, st_, dct_ in cascaded:
self._conditional_expire(st_)
@@ -1350,7 +1353,7 @@ class Session(_SessionClassMethods):
state._detach()
@util.deprecated("0.7", "The non-weak-referencing identity map "
- "feature is no longer needed.")
+ "feature is no longer needed.")
def prune(self):
"""Remove unreferenced instances cached in the identity map.
@@ -1381,7 +1384,7 @@ class Session(_SessionClassMethods):
state_str(state))
cascaded = list(state.manager.mapper.cascade_iterator(
- 'expunge', state))
+ 'expunge', state))
self._expunge_state(state)
for o, m, st_, dct_ in cascaded:
self._expunge_state(st_)
@@ -1408,8 +1411,8 @@ class Session(_SessionClassMethods):
instance_key = mapper._identity_key_from_state(state)
if _none_set.intersection(instance_key[1]) and \
- not mapper.allow_partial_pks or \
- _none_set.issuperset(instance_key[1]):
+ not mapper.allow_partial_pks or \
+ _none_set.issuperset(instance_key[1]):
raise exc.FlushError(
"Instance %s has a NULL identity key. If this is an "
"auto-generated value, check that the database table "
@@ -1499,9 +1502,9 @@ class Session(_SessionClassMethods):
mapper = _state_mapper(state)
for o, m, st_, dct_ in mapper.cascade_iterator(
- 'save-update',
- state,
- halt_on=self._contains_state):
+ 'save-update',
+ state,
+ halt_on=self._contains_state):
self._save_or_update_impl(st_)
def delete(self, instance):
@@ -1535,7 +1538,7 @@ class Session(_SessionClassMethods):
# so that autoflush does not delete the item
# the strong reference to the instance itself is significant here
cascade_states = list(state.manager.mapper.cascade_iterator(
- 'delete', state))
+ 'delete', state))
self._deleted[state] = state.obj()
self.identity_map.add(state)
@@ -1552,10 +1555,10 @@ class Session(_SessionClassMethods):
same primary key in the session. If not found locally, it attempts
to load the object from the database based on primary key, and if
none can be located, creates a new instance. The state of each
- attribute on the source instance is then copied to the target instance.
- The resulting target instance is then returned by the method; the
- original source instance is left unmodified, and un-associated with the
- :class:`.Session` if not already.
+ attribute on the source instance is then copied to the target
+ instance. The resulting target instance is then returned by the
+ method; the original source instance is left unmodified, and
+ un-associated with the :class:`.Session` if not already.
This operation cascades to associated instances if the association is
mapped with ``cascade="merge"``.
@@ -1583,7 +1586,8 @@ class Session(_SessionClassMethods):
any existing related objects or collections that might not
be loaded. The resulting objects from ``load=False`` are always
produced as "clean", so it is only appropriate that the given objects
- should be "clean" as well, else this suggests a mis-use of the method.
+ should be "clean" as well, else this suggests a mis-use of the
+ method.
"""
@@ -1601,9 +1605,9 @@ class Session(_SessionClassMethods):
try:
self.autoflush = False
return self._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive=_recursive)
+ attributes.instance_state(instance),
+ attributes.instance_dict(instance),
+ load=load, _recursive=_recursive)
finally:
self.autoflush = autoflush
@@ -1640,8 +1644,8 @@ class Session(_SessionClassMethods):
new_instance = True
elif not _none_set.intersection(key[1]) or \
- (mapper.allow_partial_pks and
- not _none_set.issuperset(key[1])):
+ (mapper.allow_partial_pks and
+ not _none_set.issuperset(key[1])):
merged = self.query(mapper.class_).get(key[1])
else:
merged = None
@@ -1664,38 +1668,38 @@ class Session(_SessionClassMethods):
# version check if applicable
if mapper.version_id_col is not None:
existing_version = mapper._get_state_attr_by_column(
- state,
- state_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ state,
+ state_dict,
+ mapper.version_id_col,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
merged_version = mapper._get_state_attr_by_column(
- merged_state,
- merged_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
+ merged_state,
+ merged_dict,
+ mapper.version_id_col,
+ passive=attributes.PASSIVE_NO_INITIALIZE)
if existing_version is not attributes.PASSIVE_NO_RESULT and \
- merged_version is not attributes.PASSIVE_NO_RESULT and \
- existing_version != merged_version:
+ merged_version is not attributes.PASSIVE_NO_RESULT and \
+ existing_version != merged_version:
raise exc.StaleDataError(
- "Version id '%s' on merged state %s "
- "does not match existing version '%s'. "
- "Leave the version attribute unset when "
- "merging to update the most recent version."
- % (
- existing_version,
- state_str(merged_state),
- merged_version
- ))
+ "Version id '%s' on merged state %s "
+ "does not match existing version '%s'. "
+ "Leave the version attribute unset when "
+ "merging to update the most recent version."
+ % (
+ existing_version,
+ state_str(merged_state),
+ merged_version
+ ))
merged_state.load_path = state.load_path
merged_state.load_options = state.load_options
for prop in mapper.iterate_properties:
prop.merge(self, state, state_dict,
- merged_state, merged_dict,
- load, _recursive)
+ merged_state, merged_dict,
+ load, _recursive)
if not load:
# remove any history
@@ -1714,8 +1718,8 @@ class Session(_SessionClassMethods):
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
- "Object '%s' already has an identity - it can't be registered "
- "as pending" % state_str(state))
+ "Object '%s' already has an identity - "
+ "it can't be registered as pending" % state_str(state))
self._before_attach(state)
if state not in self._new:
@@ -1725,7 +1729,7 @@ class Session(_SessionClassMethods):
def _update_impl(self, state, discard_existing=False):
if (self.identity_map.contains_state(state) and
- state not in self._deleted):
+ state not in self._deleted):
return
if state.key is None:
@@ -1791,8 +1795,8 @@ class Session(_SessionClassMethods):
is what was already loaded from a foreign-key-holding value.
The :meth:`.Session.enable_relationship_loading` method is
- similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike
- that flag, :meth:`.Session.enable_relationship_loading` allows
+ similar to the ``load_on_pending`` flag on :func:`.relationship`.
+ Unlike that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
@@ -1828,10 +1832,10 @@ class Session(_SessionClassMethods):
if state.key and \
state.key in self.identity_map and \
not self.identity_map.contains_state(state):
- raise sa_exc.InvalidRequestError("Can't attach instance "
- "%s; another instance with key %s is already "
- "present in this session."
- % (state_str(state), state.key))
+ raise sa_exc.InvalidRequestError(
+ "Can't attach instance "
+ "%s; another instance with key %s is already "
+ "present in this session." % (state_str(state), state.key))
if state.session_id and \
state.session_id is not self.hash_key and \
@@ -1869,7 +1873,8 @@ class Session(_SessionClassMethods):
Session.
"""
- return iter(list(self._new.values()) + list(self.identity_map.values()))
+ return iter(
+ list(self._new.values()) + list(self.identity_map.values()))
def _contains_state(self, state):
return state in self._new or self.identity_map.contains_state(state)
@@ -1922,8 +1927,8 @@ class Session(_SessionClassMethods):
def _is_clean(self):
return not self.identity_map.check_modified() and \
- not self._deleted and \
- not self._new
+ not self._deleted and \
+ not self._new
def _flush(self, objects=None):
@@ -2002,21 +2007,21 @@ class Session(_SessionClassMethods):
len_ = len(self.identity_map._modified)
statelib.InstanceState._commit_all_states(
- [(state, state.dict) for state in
- self.identity_map._modified],
- instance_dict=self.identity_map)
+ [(state, state.dict) for state in
+ self.identity_map._modified],
+ instance_dict=self.identity_map)
util.warn("Attribute history events accumulated on %d "
- "previously clean instances "
- "within inner-flush event handlers have been reset, "
- "and will not result in database updates. "
- "Consider using set_committed_value() within "
- "inner-flush event handlers to avoid this warning."
- % len_)
+ "previously clean instances "
+ "within inner-flush event handlers have been "
+ "reset, and will not result in database updates. "
+ "Consider using set_committed_value() within "
+ "inner-flush event handlers to avoid this warning."
+ % len_)
# useful assertions:
- #if not objects:
+ # if not objects:
# assert not self.identity_map._modified
- #else:
+ # else:
# assert self.identity_map._modified == \
# self.identity_map._modified.difference(objects)
@@ -2029,7 +2034,7 @@ class Session(_SessionClassMethods):
transaction.rollback(_capture_exception=True)
def is_modified(self, instance, include_collections=True,
- passive=True):
+ passive=True):
"""Return ``True`` if the given instance has locally
modified attributes.
@@ -2057,12 +2062,12 @@ class Session(_SessionClassMethods):
A few caveats to this method apply:
- * Instances present in the :attr:`.Session.dirty` collection may report
- ``False`` when tested with this method. This is because
- the object may have received change events via attribute
- mutation, thus placing it in :attr:`.Session.dirty`,
- but ultimately the state is the same as that loaded from
- the database, resulting in no net change here.
+ * Instances present in the :attr:`.Session.dirty` collection may
+ report ``False`` when tested with this method. This is because
+ the object may have received change events via attribute mutation,
+ thus placing it in :attr:`.Session.dirty`, but ultimately the state
+ is the same as that loaded from the database, resulting in no net
+ change here.
* Scalar attributes may not have recorded the previously set
value when a new value was applied, if the attribute was not loaded,
or was expired, at the time the new value was received - in these
@@ -2103,15 +2108,15 @@ class Session(_SessionClassMethods):
for attr in state.manager.attributes:
if \
- (
- not include_collections and
- hasattr(attr.impl, 'get_collection')
- ) or not hasattr(attr.impl, 'get_history'):
+ (
+ not include_collections and
+ hasattr(attr.impl, 'get_collection')
+ ) or not hasattr(attr.impl, 'get_history'):
continue
(added, unchanged, deleted) = \
- attr.impl.get_history(state, dict_,
- passive=attributes.NO_CHANGE)
+ attr.impl.get_history(state, dict_,
+ passive=attributes.NO_CHANGE)
if added or deleted:
return True
@@ -2148,8 +2153,8 @@ class Session(_SessionClassMethods):
call :meth:`.Session.rollback`, in order to close out the
transaction stack. It is in this "partial rollback" period that the
:attr:`.is_active` flag returns False. After the call to
- :meth:`.Session.rollback`, the :class:`.SessionTransaction` is replaced
- with a new one and :attr:`.is_active` returns ``True`` again.
+ :meth:`.Session.rollback`, the :class:`.SessionTransaction` is
+ replaced with a new one and :attr:`.is_active` returns ``True`` again.
When a :class:`.Session` is used in ``autocommit=True`` mode, the
:class:`.SessionTransaction` is only instantiated within the scope
@@ -2289,9 +2294,9 @@ class sessionmaker(_SessionClassMethods):
"""
def __init__(self, bind=None, class_=Session, autoflush=True,
- autocommit=False,
- expire_on_commit=True,
- info=None, **kw):
+ autocommit=False,
+ expire_on_commit=True,
+ info=None, **kw):
"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
@@ -2315,8 +2320,8 @@ class sessionmaker(_SessionClassMethods):
.. versionadded:: 0.9.0
- :param \**kw: all other keyword arguments are passed to the constructor
- of newly created :class:`.Session` objects.
+ :param \**kw: all other keyword arguments are passed to the
+ constructor of newly created :class:`.Session` objects.
"""
kw['bind'] = bind
@@ -2363,10 +2368,10 @@ class sessionmaker(_SessionClassMethods):
def __repr__(self):
return "%s(class_=%r,%s)" % (
- self.__class__.__name__,
- self.class_.__name__,
- ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
- )
+ self.__class__.__name__,
+ self.class_.__name__,
+ ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
+ )
def make_transient(instance):
@@ -2397,6 +2402,7 @@ def make_transient(instance):
if state.deleted:
del state.deleted
+
def make_transient_to_detached(instance):
"""Make the given transient instance 'detached'.
@@ -2424,7 +2430,7 @@ def make_transient_to_detached(instance):
state = attributes.instance_state(instance)
if state.session_id or state.key:
raise sa_exc.InvalidRequestError(
- "Given object must be transient")
+ "Given object must be transient")
state.key = state.mapper._identity_key_from_state(state)
if state.deleted:
del state.deleted
@@ -2432,7 +2438,6 @@ def make_transient_to_detached(instance):
state._expire_attributes(state.dict, state.unloaded)
-
def object_session(instance):
"""Return the ``Session`` to which instance belongs.
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 573e6515d..a9024b468 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -17,9 +17,10 @@ from .. import util
from . import exc as orm_exc, interfaces
from .path_registry import PathRegistry
from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
- NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
+ NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
from . import base
+
class InstanceState(interfaces._InspectionAttr):
"""tracks state information at the instance level.
@@ -276,8 +277,8 @@ class InstanceState(interfaces._InspectionAttr):
state_dict = {'instance': self.obj()}
state_dict.update(
(k, self.__dict__[k]) for k in (
- 'committed_state', '_pending_mutations', 'modified', 'expired',
- 'callables', 'key', 'parents', 'load_options',
+ 'committed_state', '_pending_mutations', 'modified',
+ 'expired', 'callables', 'key', 'parents', 'load_options',
'class_',
) if k in self.__dict__
)
@@ -315,7 +316,7 @@ class InstanceState(interfaces._InspectionAttr):
if 'load_path' in state_dict:
self.load_path = PathRegistry.\
- deserialize(state_dict['load_path'])
+ deserialize(state_dict['load_path'])
state_dict['manager'](self, inst, state_dict)
@@ -418,7 +419,7 @@ class InstanceState(interfaces._InspectionAttr):
return PASSIVE_NO_RESULT
toload = self.expired_attributes.\
- intersection(self.unmodified)
+ intersection(self.unmodified)
self.manager.deferred_scalar_loader(self, toload)
@@ -441,7 +442,7 @@ class InstanceState(interfaces._InspectionAttr):
"""Return self.unmodified.intersection(keys)."""
return set(keys).intersection(self.manager).\
- difference(self.committed_state)
+ difference(self.committed_state)
@property
def unloaded(self):
@@ -452,15 +453,15 @@ class InstanceState(interfaces._InspectionAttr):
"""
return set(self.manager).\
- difference(self.committed_state).\
- difference(self.dict)
+ difference(self.committed_state).\
+ difference(self.dict)
@property
def _unloaded_non_object(self):
return self.unloaded.intersection(
- attr for attr in self.manager
- if self.manager[attr].impl.accepts_scalar_loader
- )
+ attr for attr in self.manager
+ if self.manager[attr].impl.accepts_scalar_loader
+ )
@property
def expired_attributes(self):
@@ -477,7 +478,8 @@ class InstanceState(interfaces._InspectionAttr):
def _instance_dict(self):
return None
- def _modified_event(self, dict_, attr, previous, collection=False, force=False):
+ def _modified_event(
+ self, dict_, attr, previous, collection=False, force=False):
if not attr.send_modified_events:
return
if attr.key not in self.committed_state or force:
@@ -508,13 +510,13 @@ class InstanceState(interfaces._InspectionAttr):
if inst is None:
raise orm_exc.ObjectDereferencedError(
- "Can't emit change event for attribute '%s' - "
- "parent object of type %s has been garbage "
- "collected."
- % (
- self.manager[attr.key],
- base.state_class_str(self)
- ))
+ "Can't emit change event for attribute '%s' - "
+ "parent object of type %s has been garbage "
+ "collected."
+ % (
+ self.manager[attr.key],
+ base.state_class_str(self)
+ ))
self.modified = True
def _commit(self, dict_, keys):
@@ -533,8 +535,8 @@ class InstanceState(interfaces._InspectionAttr):
self.expired = False
for key in set(self.callables).\
- intersection(keys).\
- intersection(dict_):
+ intersection(keys).\
+ intersection(dict_):
del self.callables[key]
def _commit_all(self, dict_, instance_dict=None):
@@ -617,7 +619,7 @@ class AttributeState(object):
"""
return self.state.manager[self.key].__get__(
- self.state.obj(), self.state.class_)
+ self.state.obj(), self.state.class_)
@property
def history(self):
@@ -636,7 +638,7 @@ class AttributeState(object):
"""
return self.state.get_history(self.key,
- PASSIVE_NO_INITIALIZE)
+ PASSIVE_NO_INITIALIZE)
def load_history(self):
"""Return the current pre-flush change history for
@@ -655,8 +657,7 @@ class AttributeState(object):
"""
return self.state.get_history(self.key,
- PASSIVE_OFF ^ INIT_OK)
-
+ PASSIVE_OFF ^ INIT_OK)
class PendingCollection(object):
@@ -667,6 +668,7 @@ class PendingCollection(object):
PendingCollection are applied to it to produce the final result.
"""
+
def __init__(self):
self.deleted_items = util.IdentitySet()
self.added_items = util.OrderedIdentitySet()
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
index 28130dab5..392f7cec2 100644
--- a/lib/sqlalchemy/orm/strategy_options.py
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -16,17 +16,20 @@ from .. import exc as sa_exc, inspect
from .base import _is_aliased_class, _class_to_mapper
from . import util as orm_util
from .path_registry import PathRegistry, TokenRegistry, \
- _WILDCARD_TOKEN, _DEFAULT_TOKEN
+ _WILDCARD_TOKEN, _DEFAULT_TOKEN
+
class Load(Generative, MapperOption):
"""Represents loader options which modify the state of a
- :class:`.Query` in order to affect how various mapped attributes are loaded.
+ :class:`.Query` in order to affect how various mapped attributes are
+ loaded.
.. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
the existing system of loader options, including options such as
- :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular,
- it introduces a new method-chained system that replaces the need for
- dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`.
+ :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In
+ particular, it introduces a new method-chained system that replaces the
+ need for dot-separated paths as well as "_all()" options such as
+ :func:`.orm.joinedload_all`.
A :class:`.Load` object can be used directly or indirectly. To use one
directly, instantiate given the parent class. This style of usage is
@@ -41,11 +44,12 @@ class Load(Generative, MapperOption):
session.query(MyClass).options(myopt)
The :class:`.Load` construct is invoked indirectly whenever one makes use
- of the various loader options that are present in ``sqlalchemy.orm``, including
- options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`,
- and all the rest. These constructs produce an "anonymous" form of the
- :class:`.Load` object which tracks attributes and options, but is not linked
- to a parent class until it is associated with a parent :class:`.Query`::
+ of the various loader options that are present in ``sqlalchemy.orm``,
+ including options such as :func:`.orm.joinedload`, :func:`.orm.defer`,
+ :func:`.orm.subqueryload`, and all the rest. These constructs produce an
+ "anonymous" form of the :class:`.Load` object which tracks attributes and
+ options, but is not linked to a parent class until it is associated with a
+ parent :class:`.Query`::
# produce "unbound" Load object
myopt = joinedload("widgets")
@@ -55,11 +59,12 @@ class Load(Generative, MapperOption):
session.query(MyClass).options(myopt)
Whether the direct or indirect style is used, the :class:`.Load` object
- returned now represents a specific "path" along the entities of a :class:`.Query`.
- This path can be traversed using a standard method-chaining approach.
- Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``,
- ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety
- of loader options along each element in the "path"::
+ returned now represents a specific "path" along the entities of a
+ :class:`.Query`. This path can be traversed using a standard
+ method-chaining approach. Supposing a class hierarchy such as ``User``,
+ ``User.addresses -> Address``, ``User.orders -> Order`` and
+ ``Order.items -> Item``, we can specify a variety of loader options along
+ each element in the "path"::
session.query(User).options(
joinedload("addresses"),
@@ -67,11 +72,12 @@ class Load(Generative, MapperOption):
)
Where above, the ``addresses`` collection will be joined-loaded, the
- ``orders`` collection will be subquery-loaded, and within that subquery load
- the ``items`` collection will be joined-loaded.
+ ``orders`` collection will be subquery-loaded, and within that subquery
+ load the ``items`` collection will be joined-loaded.
"""
+
def __init__(self, entity):
insp = inspect(entity)
self.path = insp._path_registry
@@ -106,7 +112,7 @@ class Load(Generative, MapperOption):
if raiseerr and not path.has_entity:
if isinstance(path, TokenRegistry):
raise sa_exc.ArgumentError(
- "Wildcard token cannot be followed by another entity")
+ "Wildcard token cannot be followed by another entity")
else:
raise sa_exc.ArgumentError(
"Attribute '%s' of entity '%s' does not "
@@ -145,8 +151,9 @@ class Load(Generative, MapperOption):
if not prop.parent.common_parent(path.mapper):
if raiseerr:
- raise sa_exc.ArgumentError("Attribute '%s' does not "
- "link from element '%s'" % (attr, path.entity))
+ raise sa_exc.ArgumentError(
+ "Attribute '%s' does not "
+ "link from element '%s'" % (attr, path.entity))
else:
return None
@@ -157,11 +164,11 @@ class Load(Generative, MapperOption):
path_element = ext_info.mapper
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
- ext_info.mapper.base_mapper,
- ext_info.mapper, aliased=True,
- _use_mapper_path=True)
- path.entity_path[prop].set(self.context,
- "path_with_polymorphic", inspect(ac))
+ ext_info.mapper.base_mapper,
+ ext_info.mapper, aliased=True,
+ _use_mapper_path=True)
+ path.entity_path[prop].set(
+ self.context, "path_with_polymorphic", inspect(ac))
path = path[prop][path_element]
else:
path = path[prop]
@@ -176,7 +183,8 @@ class Load(Generative, MapperOption):
return strategy
@_generative
- def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True):
+ def set_relationship_strategy(
+ self, attr, strategy, propagate_to_loaders=True):
strategy = self._coerce_strat(strategy)
self.propagate_to_loaders = propagate_to_loaders
@@ -225,14 +233,15 @@ class Load(Generative, MapperOption):
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
- elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key:
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \
+ c_token != p_token.key:
return None
if c_token is p_token:
continue
else:
return None
- return to_chop[i+1:]
+ return to_chop[i + 1:]
class _UnboundLoad(Load):
@@ -245,6 +254,7 @@ class _UnboundLoad(Load):
of freestanding options, e.g. ``joinedload('x.y.z')``.
"""
+
def __init__(self):
self.path = ()
self._to_bind = set()
@@ -318,14 +328,15 @@ class _UnboundLoad(Load):
return opt
-
def _chop_path(self, to_chop, path):
i = -1
- for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())):
+ for i, (c_token, (p_mapper, p_prop)) in enumerate(
+ zip(to_chop, path.pairs())):
if isinstance(c_token, util.string_types):
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
- elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key:
+ elif c_token != 'relationship:%s' % (
+ _WILDCARD_TOKEN,) and c_token != p_prop.key:
return None
elif isinstance(c_token, PropComparator):
if c_token.property is not p_prop:
@@ -335,7 +346,6 @@ class _UnboundLoad(Load):
return to_chop[i:]
-
def _bind_loader(self, query, context, raiseerr):
start_path = self.path
# _current_path implies we're in a
@@ -354,15 +364,15 @@ class _UnboundLoad(Load):
elif isinstance(token, PropComparator):
prop = token.property
entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token._parententity,
- raiseerr)
+ query,
+ prop.key,
+ token._parententity,
+ raiseerr)
else:
raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
+ "mapper option expects "
+ "string key or list of attributes")
if not entity:
return
@@ -378,7 +388,7 @@ class _UnboundLoad(Load):
path = loader.path
for token in start_path:
loader.path = path = loader._generate_path(
- loader.path, token, None, raiseerr)
+ loader.path, token, None, raiseerr)
if path is None:
return
@@ -390,8 +400,8 @@ class _UnboundLoad(Load):
effective_path = loader.path
# prioritize "first class" options over those
- # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z")
- # versus someload("x") / someload("x.y")
+ # that were "links in the chain", e.g. "x" and "y" in
+ # someload("x.y.z") versus someload("x") / someload("x.y")
if self._is_chain_link:
effective_path.setdefault(context, "loader", loader)
else:
@@ -411,7 +421,7 @@ class _UnboundLoad(Load):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
- % (token, )
+ % (token, )
)
else:
raise sa_exc.ArgumentError(
@@ -419,7 +429,7 @@ class _UnboundLoad(Load):
"specified in this Query. Note the full path "
"from root (%s) to target entity must be specified."
% (token, ",".join(str(x) for
- x in query._mapper_entities))
+ x in query._mapper_entities))
)
else:
return None
@@ -429,9 +439,9 @@ class _UnboundLoad(Load):
if len(list(query._mapper_entities)) != 1:
if raiseerr:
raise sa_exc.ArgumentError(
- "Wildcard loader can only be used with exactly "
- "one entity. Use Load(ent) to specify "
- "specific entities.")
+ "Wildcard loader can only be used with exactly "
+ "one entity. Use Load(ent) to specify "
+ "specific entities.")
elif token.endswith(_DEFAULT_TOKEN):
raiseerr = False
@@ -445,13 +455,12 @@ class _UnboundLoad(Load):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
- % (token, )
+ % (token, )
)
else:
return None
-
class loader_option(object):
def __init__(self):
pass
@@ -493,6 +502,7 @@ See :func:`.orm.%(name)s` for usage examples.
""" % {"name": self.name}
return self
+
@loader_option()
def contains_eager(loadopt, attr, alias=None):
"""Indicate that the given attribute should be eagerly loaded from
@@ -533,16 +543,19 @@ def contains_eager(loadopt, attr, alias=None):
alias = info.selectable
cloned = loadopt.set_relationship_strategy(
- attr,
- {"lazy": "joined"},
- propagate_to_loaders=False
- )
+ attr,
+ {"lazy": "joined"},
+ propagate_to_loaders=False
+ )
cloned.local_opts['eager_from_alias'] = alias
return cloned
+
@contains_eager._add_unbound_fn
def contains_eager(*keys, **kw):
- return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw)
+ return _UnboundLoad()._from_keys(
+ _UnboundLoad.contains_eager, keys, True, kw)
+
@loader_option()
def load_only(loadopt, *attrs):
@@ -559,8 +572,8 @@ def load_only(loadopt, *attrs):
session.query(User).options(load_only("name", "fullname"))
Example - given a relationship ``User.addresses -> Address``, specify
- subquery loading for the ``User.addresses`` collection, but on each ``Address``
- object load only the ``email_address`` attribute::
+ subquery loading for the ``User.addresses`` collection, but on each
+ ``Address`` object load only the ``email_address`` attribute::
session.query(User).options(
subqueryload("addreses").load_only("email_address")
@@ -579,18 +592,20 @@ def load_only(loadopt, *attrs):
"""
cloned = loadopt.set_column_strategy(
- attrs,
- {"deferred": False, "instrument": True}
- )
+ attrs,
+ {"deferred": False, "instrument": True}
+ )
cloned.set_column_strategy("*",
- {"deferred": True, "instrument": True},
- {"undefer_pks": True})
+ {"deferred": True, "instrument": True},
+ {"undefer_pks": True})
return cloned
+
@load_only._add_unbound_fn
def load_only(*attrs):
return _UnboundLoad().load_only(*attrs)
+
@loader_option()
def joinedload(loadopt, attr, innerjoin=None):
"""Indicate that the given attribute should be loaded using joined
@@ -618,22 +633,25 @@ def joinedload(loadopt, attr, innerjoin=None):
If the joined-eager load is chained onto an existing LEFT OUTER JOIN,
``innerjoin=True`` will be bypassed and the join will continue to
- chain as LEFT OUTER JOIN so that the results don't change. As an alternative,
- specify the value ``"nested"``. This will instead nest the join
- on the right side, e.g. using the form "a LEFT OUTER JOIN (b JOIN c)".
+ chain as LEFT OUTER JOIN so that the results don't change. As an
+ alternative, specify the value ``"nested"``. This will instead nest the
+ join on the right side, e.g. using the form "a LEFT OUTER JOIN
+ (b JOIN c)".
.. versionadded:: 0.9.4 Added ``innerjoin="nested"`` option to support
nesting of eager "inner" joins.
.. note::
- The joins produced by :func:`.orm.joinedload` are **anonymously aliased**.
- The criteria by which the join proceeds cannot be modified, nor can the
- :class:`.Query` refer to these joins in any way, including ordering.
+ The joins produced by :func:`.orm.joinedload` are **anonymously
+ aliased**. The criteria by which the join proceeds cannot be
+ modified, nor can the :class:`.Query` refer to these joins in any way,
+ including ordering.
To produce a specific SQL JOIN which is explicitly available, use
:meth:`.Query.join`. To combine explicit JOINs with eager loading
- of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`.
+ of collections, use :func:`.orm.contains_eager`; see
+ :ref:`contains_eager`.
.. seealso::
@@ -647,8 +665,8 @@ def joinedload(loadopt, attr, innerjoin=None):
:paramref:`.relationship.lazy`
- :paramref:`.relationship.innerjoin` - :func:`.relationship`-level version
- of the :paramref:`.joinedload.innerjoin` option.
+ :paramref:`.relationship.innerjoin` - :func:`.relationship`-level
+ version of the :paramref:`.joinedload.innerjoin` option.
"""
loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
@@ -656,15 +674,17 @@ def joinedload(loadopt, attr, innerjoin=None):
loader.local_opts['innerjoin'] = innerjoin
return loader
+
@joinedload._add_unbound_fn
def joinedload(*keys, **kw):
return _UnboundLoad._from_keys(
- _UnboundLoad.joinedload, keys, False, kw)
+ _UnboundLoad.joinedload, keys, False, kw)
+
@joinedload._add_unbound_all_fn
def joinedload_all(*keys, **kw):
return _UnboundLoad._from_keys(
- _UnboundLoad.joinedload, keys, True, kw)
+ _UnboundLoad.joinedload, keys, True, kw)
@loader_option()
@@ -701,14 +721,17 @@ def subqueryload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
+
@subqueryload._add_unbound_fn
def subqueryload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
+
@subqueryload._add_unbound_all_fn
def subqueryload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
+
@loader_option()
def lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
@@ -724,14 +747,17 @@ def lazyload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
+
@lazyload._add_unbound_fn
def lazyload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
+
@lazyload._add_unbound_all_fn
def lazyload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
+
@loader_option()
def immediateload(loadopt, attr):
"""Indicate that the given attribute should be loaded using
@@ -754,9 +780,11 @@ def immediateload(loadopt, attr):
loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
return loader
+
@immediateload._add_unbound_fn
def immediateload(*keys):
- return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.immediateload, keys, False, {})
@loader_option()
@@ -773,10 +801,12 @@ def noload(loadopt, attr):
return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
+
@noload._add_unbound_fn
def noload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
+
@loader_option()
def defaultload(loadopt, attr):
"""Indicate an attribute should load using its default loader style.
@@ -797,14 +827,16 @@ def defaultload(loadopt, attr):
"""
return loadopt.set_relationship_strategy(
- attr,
- None
- )
+ attr,
+ None
+ )
+
@defaultload._add_unbound_fn
def defaultload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
+
@loader_option()
def defer(loadopt, key):
"""Indicate that the given column-oriented attribute should be deferred, e.g.
@@ -858,19 +890,21 @@ def defer(loadopt, key):
"""
return loadopt.set_column_strategy(
- (key, ),
- {"deferred": True, "instrument": True}
- )
+ (key, ),
+ {"deferred": True, "instrument": True}
+ )
@defer._add_unbound_fn
def defer(key, *addl_attrs):
- return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+
@loader_option()
def undefer(loadopt, key):
- """Indicate that the given column-oriented attribute should be undeferred, e.g.
- specified within the SELECT statement of the entity as a whole.
+ """Indicate that the given column-oriented attribute should be undeferred,
+ e.g. specified within the SELECT statement of the entity as a whole.
The column being undeferred is typically set up on the mapping as a
:func:`.deferred` attribute.
@@ -884,7 +918,8 @@ def undefer(loadopt, key):
session.query(MyClass).options(undefer("col1"), undefer("col2"))
# undefer all columns specific to a single class using Load + *
- session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+ session.query(MyClass, MyOtherClass).options(
+ Load(MyClass).undefer("*"))
:param key: Attribute to be undeferred.
@@ -902,17 +937,21 @@ def undefer(loadopt, key):
"""
return loadopt.set_column_strategy(
- (key, ),
- {"deferred": False, "instrument": True}
- )
+ (key, ),
+ {"deferred": False, "instrument": True}
+ )
+
@undefer._add_unbound_fn
def undefer(key, *addl_attrs):
- return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+
@loader_option()
def undefer_group(loadopt, name):
- """Indicate that columns within the given deferred group name should be undeferred.
+ """Indicate that columns within the given deferred group name should be
+ undeferred.
The columns being undeferred are set up on the mapping as
:func:`.deferred` attributes and include a "group" name.
@@ -922,9 +961,11 @@ def undefer_group(loadopt, name):
session.query(MyClass).options(undefer_group("large_attrs"))
To undefer a group of attributes on a related entity, the path can be
- spelled out using relationship loader options, such as :func:`.orm.defaultload`::
+ spelled out using relationship loader options, such as
+ :func:`.orm.defaultload`::
- session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs"))
+ session.query(MyClass).options(
+ defaultload("someattr").undefer_group("large_attrs"))
.. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
particiular entity load path.
@@ -939,12 +980,12 @@ def undefer_group(loadopt, name):
"""
return loadopt.set_column_strategy(
- "*",
- None,
- {"undefer_group": name}
- )
+ "*",
+ None,
+ {"undefer_group": name}
+ )
+
@undefer_group._add_unbound_fn
def undefer_group(name):
return _UnboundLoad().undefer_group(name)
-
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index dc59bb27b..e1ef85c1d 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -14,7 +14,7 @@ from . import exc, util as orm_util, attributes
def populate(source, source_mapper, dest, dest_mapper,
- synchronize_pairs, uowcommit, flag_cascaded_pks):
+ synchronize_pairs, uowcommit, flag_cascaded_pks):
source_dict = source.dict
dest_dict = dest.dict
@@ -23,7 +23,7 @@ def populate(source, source_mapper, dest, dest_mapper,
# inline of source_mapper._get_state_attr_by_column
prop = source_mapper._columntoproperty[l]
value = source.manager[prop.key].impl.get(source, source_dict,
- attributes.PASSIVE_OFF)
+ attributes.PASSIVE_OFF)
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
@@ -40,8 +40,8 @@ def populate(source, source_mapper, dest, dest_mapper,
# reasons, since we only need this info for a primary key
# destination.
if flag_cascaded_pks and l.primary_key and \
- r.primary_key and \
- r.references(l):
+ r.primary_key and \
+ r.references(l):
uowcommit.attributes[("pk_cascaded", dest, r)] = True
@@ -49,7 +49,7 @@ def clear(dest, dest_mapper, synchronize_pairs):
for l, r in synchronize_pairs:
if r.primary_key and \
dest_mapper._get_state_attr_by_column(
- dest, dest.dict, r) not in orm_util._none_set:
+ dest, dest.dict, r) not in orm_util._none_set:
raise AssertionError(
"Dependency rule tried to blank-out primary key "
@@ -96,8 +96,8 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
prop = source_mapper._columntoproperty[l]
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, None, r)
- history = uowcommit.get_attribute_history(source, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
+ history = uowcommit.get_attribute_history(
+ source, prop.key, attributes.PASSIVE_NO_INITIALIZE)
if bool(history.deleted):
return True
else:
@@ -107,16 +107,17 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
def _raise_col_to_prop(isdest, source_mapper, source_column,
dest_mapper, dest_column):
if isdest:
- raise exc.UnmappedColumnError("Can't execute sync rule for "
- "destination column '%s'; mapper '%s' does not map "
- "this column. Try using an explicit `foreign_keys` "
- "collection which does not include this column (or use "
- "a viewonly=True relation)." % (dest_column,
- dest_mapper))
+ raise exc.UnmappedColumnError(
+ "Can't execute sync rule for "
+ "destination column '%s'; mapper '%s' does not map "
+ "this column. Try using an explicit `foreign_keys` "
+ "collection which does not include this column (or use "
+ "a viewonly=True relation)." % (dest_column, dest_mapper))
else:
- raise exc.UnmappedColumnError("Can't execute sync rule for "
- "source column '%s'; mapper '%s' does not map this "
- "column. Try using an explicit `foreign_keys` "
- "collection which does not include destination column "
- "'%s' (or use a viewonly=True relation)."
- % (source_column, source_mapper, dest_column))
+ raise exc.UnmappedColumnError(
+ "Can't execute sync rule for "
+ "source column '%s'; mapper '%s' does not map this "
+ "column. Try using an explicit `foreign_keys` "
+ "collection which does not include destination column "
+ "'%s' (or use a viewonly=True relation)." %
+ (source_column, source_mapper, dest_column))
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index b7f739ec5..71e61827b 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -56,16 +56,16 @@ def track_cascade_events(descriptor, prop):
if sess._warn_on_events:
sess._flush_warning(
- "collection remove"
- if prop.uselist
- else "related attribute delete")
+ "collection remove"
+ if prop.uselist
+ else "related attribute delete")
# expunge pending orphans
item_state = attributes.instance_state(item)
if prop._cascade.delete_orphan and \
item_state in sess._new and \
prop.mapper._is_orphan(item_state):
- sess.expunge(item)
+ sess.expunge(item)
def set_(state, newvalue, oldvalue, initiator):
# process "save_update" cascade rules for when an instance
@@ -83,8 +83,8 @@ def track_cascade_events(descriptor, prop):
if newvalue is not None:
newvalue_state = attributes.instance_state(newvalue)
if prop._cascade.save_update and \
- (prop.cascade_backrefs or key == initiator.key) and \
- not sess._contains_state(newvalue_state):
+ (prop.cascade_backrefs or key == initiator.key) and \
+ not sess._contains_state(newvalue_state):
sess._save_or_update_state(newvalue_state)
if oldvalue is not None and \
@@ -95,7 +95,7 @@ def track_cascade_events(descriptor, prop):
oldvalue_state = attributes.instance_state(oldvalue)
if oldvalue_state in sess._new and \
- prop.mapper._is_orphan(oldvalue_state):
+ prop.mapper._is_orphan(oldvalue_state):
sess.expunge(oldvalue)
return newvalue
@@ -175,7 +175,7 @@ class UOWTransaction(object):
self.states[state] = (isdelete, True)
def get_attribute_history(self, state, key,
- passive=attributes.PASSIVE_NO_INITIALIZE):
+ passive=attributes.PASSIVE_NO_INITIALIZE):
"""facade to attributes.get_state_history(), including
caching of results."""
@@ -191,11 +191,11 @@ class UOWTransaction(object):
# we want non-passive, do a non-passive lookup and re-cache
if not cached_passive & attributes.SQL_OK \
- and passive & attributes.SQL_OK:
+ and passive & attributes.SQL_OK:
impl = state.manager[key].impl
history = impl.get_history(state, state.dict,
- attributes.PASSIVE_OFF |
- attributes.LOAD_AGAINST_COMMITTED)
+ attributes.PASSIVE_OFF |
+ attributes.LOAD_AGAINST_COMMITTED)
if history and impl.uses_objects:
state_history = history.as_state()
else:
@@ -206,13 +206,13 @@ class UOWTransaction(object):
# TODO: store the history as (state, object) tuples
# so we don't have to keep converting here
history = impl.get_history(state, state.dict, passive |
- attributes.LOAD_AGAINST_COMMITTED)
+ attributes.LOAD_AGAINST_COMMITTED)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history,
- passive)
+ passive)
return state_history
@@ -225,13 +225,13 @@ class UOWTransaction(object):
self.presort_actions[key] = Preprocess(processor, fromparent)
def register_object(self, state, isdelete=False,
- listonly=False, cancel_delete=False,
- operation=None, prop=None):
+ listonly=False, cancel_delete=False,
+ operation=None, prop=None):
if not self.session._contains_state(state):
if not state.deleted and operation is not None:
util.warn("Object of type %s not in session, %s operation "
- "along '%s' will not proceed" %
- (orm_util.state_class_str(state), operation, prop))
+ "along '%s' will not proceed" %
+ (orm_util.state_class_str(state), operation, prop))
return False
if state not in self.states:
@@ -278,8 +278,8 @@ class UOWTransaction(object):
"""
return util.PopulateDict(
- lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
- )
+ lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
+ )
def filter_states_for_dep(self, dep, states):
"""Filter the given list of InstanceStates to those relevant to the
@@ -314,8 +314,8 @@ class UOWTransaction(object):
# see if the graph of mapper dependencies has cycles.
self.cycles = cycles = topological.find_cycles(
- self.dependencies,
- list(self.postsort_actions.values()))
+ self.dependencies,
+ list(self.postsort_actions.values()))
if cycles:
# if yes, break the per-mapper actions into
@@ -330,8 +330,8 @@ class UOWTransaction(object):
# that were broken up.
for edge in list(self.dependencies):
if None in edge or \
- edge[0].disabled or edge[1].disabled or \
- cycles.issuperset(edge):
+ edge[0].disabled or edge[1].disabled or \
+ cycles.issuperset(edge):
self.dependencies.remove(edge)
elif edge[0] in cycles:
self.dependencies.remove(edge)
@@ -345,30 +345,30 @@ class UOWTransaction(object):
return set([a for a in self.postsort_actions.values()
if not a.disabled
]
- ).difference(cycles)
+ ).difference(cycles)
def execute(self):
postsort_actions = self._generate_actions()
- #sort = topological.sort(self.dependencies, postsort_actions)
- #print "--------------"
- #print "\ndependencies:", self.dependencies
- #print "\ncycles:", self.cycles
- #print "\nsort:", list(sort)
- #print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
+ # sort = topological.sort(self.dependencies, postsort_actions)
+ # print "--------------"
+ # print "\ndependencies:", self.dependencies
+ # print "\ncycles:", self.cycles
+ # print "\nsort:", list(sort)
+ # print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
# execute
if self.cycles:
for set_ in topological.sort_as_subsets(
- self.dependencies,
- postsort_actions):
+ self.dependencies,
+ postsort_actions):
while set_:
n = set_.pop()
n.execute_aggregate(self, set_)
else:
for rec in topological.sort(
- self.dependencies,
- postsort_actions):
+ self.dependencies,
+ postsort_actions):
rec.execute(self)
def finalize_flush_changes(self):
@@ -430,11 +430,11 @@ class Preprocess(IterateMappersMixin):
if (delete_states or save_states):
if not self.setup_flush_actions and (
- self.dependency_processor.\
- prop_has_changes(uow, delete_states, True) or
- self.dependency_processor.\
- prop_has_changes(uow, save_states, False)
- ):
+ self.dependency_processor.
+ prop_has_changes(uow, delete_states, True) or
+ self.dependency_processor.
+ prop_has_changes(uow, save_states, False)
+ ):
self.dependency_processor.per_property_flush_actions(uow)
self.setup_flush_actions = True
return True
@@ -451,8 +451,8 @@ class PostSortRec(object):
return uow.postsort_actions[key]
else:
uow.postsort_actions[key] = \
- ret = \
- object.__new__(cls)
+ ret = \
+ object.__new__(cls)
return ret
def execute_aggregate(self, uow, recs):
@@ -471,7 +471,7 @@ class ProcessAll(IterateMappersMixin, PostSortRec):
self.delete = delete
self.fromparent = fromparent
uow.deps[dependency_processor.parent.base_mapper].\
- add(dependency_processor)
+ add(dependency_processor)
def execute(self, uow):
states = self._elements(uow)
@@ -521,13 +521,14 @@ class SaveUpdateAll(PostSortRec):
def execute(self, uow):
persistence.save_obj(self.mapper,
- uow.states_for_mapper_hierarchy(self.mapper, False, False),
- uow
- )
+ uow.states_for_mapper_hierarchy(
+ self.mapper, False, False),
+ uow
+ )
def per_state_flush_actions(self, uow):
states = list(uow.states_for_mapper_hierarchy(
- self.mapper, False, False))
+ self.mapper, False, False))
base_mapper = self.mapper.base_mapper
delete_all = DeleteAll(uow, base_mapper)
for state in states:
@@ -549,13 +550,14 @@ class DeleteAll(PostSortRec):
def execute(self, uow):
persistence.delete_obj(self.mapper,
- uow.states_for_mapper_hierarchy(self.mapper, True, False),
- uow
- )
+ uow.states_for_mapper_hierarchy(
+ self.mapper, True, False),
+ uow
+ )
def per_state_flush_actions(self, uow):
states = list(uow.states_for_mapper_hierarchy(
- self.mapper, True, False))
+ self.mapper, True, False))
base_mapper = self.mapper.base_mapper
save_all = SaveUpdateAll(uow, base_mapper)
for state in states:
@@ -581,9 +583,9 @@ class ProcessState(PostSortRec):
dependency_processor = self.dependency_processor
delete = self.delete
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.dependency_processor is dependency_processor and
- r.delete is delete]
+ if r.__class__ is cls_ and
+ r.dependency_processor is dependency_processor and
+ r.delete is delete]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
if delete:
@@ -609,13 +611,13 @@ class SaveUpdateState(PostSortRec):
cls_ = self.__class__
mapper = self.mapper
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
+ if r.__class__ is cls_ and
+ r.mapper is mapper]
recs.difference_update(our_recs)
persistence.save_obj(mapper,
- [self.state] +
- [r.state for r in our_recs],
- uow)
+ [self.state] +
+ [r.state for r in our_recs],
+ uow)
def __repr__(self):
return "%s(%s)" % (
@@ -633,13 +635,13 @@ class DeleteState(PostSortRec):
cls_ = self.__class__
mapper = self.mapper
our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
+ if r.__class__ is cls_ and
+ r.mapper is mapper]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
persistence.delete_obj(mapper,
- [s for s in states if uow.states[s][0]],
- uow)
+ [s for s in states if uow.states[s][0]],
+ uow)
def __repr__(self):
return "%s(%s)" % (
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index 3626a8a03..215de5f4b 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -13,7 +13,7 @@ from . import attributes
import re
from .base import instance_str, state_str, state_class_str, attribute_str, \
- state_attribute_str, object_mapper, object_state, _none_set
+ state_attribute_str, object_mapper, object_state, _none_set
from .base import class_mapper, _class_to_mapper
from .base import _InspectionAttr
from .path_registry import PathRegistry
@@ -27,23 +27,23 @@ class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
_add_w_all_cascades = all_cascades.difference([
- 'all', 'none', 'delete-orphan'])
+ 'all', 'none', 'delete-orphan'])
_allowed_cascades = all_cascades
def __new__(cls, arg):
values = set([
- c for c
- in re.split('\s*,\s*', arg or "")
- if c
- ])
+ c for c
+ in re.split('\s*,\s*', arg or "")
+ if c
+ ])
if values.difference(cls._allowed_cascades):
raise sa_exc.ArgumentError(
- "Invalid cascade option(s): %s" %
- ", ".join([repr(x) for x in
- sorted(
- values.difference(cls._allowed_cascades)
- )])
+ "Invalid cascade option(s): %s" %
+ ", ".join([repr(x) for x in
+ sorted(
+ values.difference(cls._allowed_cascades)
+ )])
)
if "all" in values:
@@ -62,7 +62,7 @@ class CascadeOptions(frozenset):
if self.delete_orphan and not self.delete:
util.warn("The 'delete-orphan' cascade "
- "option requires 'delete'.")
+ "option requires 'delete'.")
return self
def __repr__(self):
@@ -71,8 +71,11 @@ class CascadeOptions(frozenset):
)
-def _validator_events(desc, key, validator, include_removes, include_backrefs):
- """Runs a validation method on an attribute value to be set or appended."""
+def _validator_events(
+ desc, key, validator, include_removes, include_backrefs):
+ """Runs a validation method on an attribute value to be set or
+ appended.
+ """
if not include_backrefs:
def detect_is_backref(state, initiator):
@@ -116,7 +119,7 @@ def _validator_events(desc, key, validator, include_removes, include_backrefs):
def polymorphic_union(table_map, typecolname,
- aliasname='p_union', cast_nulls=True):
+ aliasname='p_union', cast_nulls=True):
"""Create a ``UNION`` statement used by a polymorphic mapper.
See :ref:`concrete_inheritance` for an example of how
@@ -168,10 +171,11 @@ def polymorphic_union(table_map, typecolname,
for type, table in table_map.items():
if typecolname is not None:
result.append(
- sql.select([col(name, table) for name in colnames] +
- [sql.literal_column(sql_util._quote_ddl_expr(type)).
- label(typecolname)],
- from_obj=[table]))
+ sql.select([col(name, table) for name in colnames] +
+ [sql.literal_column(
+ sql_util._quote_ddl_expr(type)).
+ label(typecolname)],
+ from_obj=[table]))
else:
result.append(sql.select([col(name, table) for name in colnames],
from_obj=[table]))
@@ -225,7 +229,8 @@ def identity_key(*args, **kwargs):
E.g.::
- >>> row = engine.execute("select * from table where a=1 and b=2").first()
+ >>> row = engine.execute("select * from table where a=1 and b=2").\
+first()
>>> identity_key(MyClass, row=row)
(<class '__main__.MyClass'>, (1, 2))
@@ -246,11 +251,12 @@ def identity_key(*args, **kwargs):
elif len(args) == 3:
class_, ident = args
else:
- raise sa_exc.ArgumentError("expected up to three "
- "positional arguments, got %s" % len(args))
+ raise sa_exc.ArgumentError(
+ "expected up to three positional arguments, "
+ "got %s" % len(args))
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs))
+ % ", ".join(kwargs))
mapper = class_mapper(class_)
if "ident" in locals():
return mapper.identity_key_from_primary_key(util.to_list(ident))
@@ -258,7 +264,7 @@ def identity_key(*args, **kwargs):
instance = kwargs.pop("instance")
if kwargs:
raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys))
+ % ", ".join(kwargs.keys))
mapper = object_mapper(instance)
return mapper.identity_key_from_instance(instance)
@@ -270,8 +276,9 @@ class ORMAdapter(sql_util.ColumnAdapter):
and the AliasedClass if any is referenced.
"""
+
def __init__(self, entity, equivalents=None, adapt_required=False,
- chain_to=None):
+ chain_to=None):
info = inspection.inspect(entity)
self.mapper = info.mapper
@@ -292,6 +299,7 @@ class ORMAdapter(sql_util.ColumnAdapter):
else:
return None
+
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
@@ -332,30 +340,31 @@ class AliasedClass(object):
argument descriptions.
"""
+
def __init__(self, cls, alias=None,
- name=None,
- flat=False,
- adapt_on_names=False,
- # TODO: None for default here?
- with_polymorphic_mappers=(),
- with_polymorphic_discriminator=None,
- base_alias=None,
- use_mapper_path=False):
+ name=None,
+ flat=False,
+ adapt_on_names=False,
+ # TODO: None for default here?
+ with_polymorphic_mappers=(),
+ with_polymorphic_discriminator=None,
+ base_alias=None,
+ use_mapper_path=False):
mapper = _class_to_mapper(cls)
if alias is None:
alias = mapper._with_polymorphic_selectable.alias(
- name=name, flat=flat)
+ name=name, flat=flat)
self._aliased_insp = AliasedInsp(
self,
mapper,
alias,
name,
with_polymorphic_mappers
- if with_polymorphic_mappers
- else mapper.with_polymorphic_mappers,
+ if with_polymorphic_mappers
+ else mapper.with_polymorphic_mappers,
with_polymorphic_discriminator
- if with_polymorphic_discriminator is not None
- else mapper.polymorphic_on,
+ if with_polymorphic_discriminator is not None
+ else mapper.polymorphic_on,
base_alias,
use_mapper_path,
adapt_on_names
@@ -440,8 +449,8 @@ class AliasedInsp(_InspectionAttr):
"""
def __init__(self, entity, mapper, selectable, name,
- with_polymorphic_mappers, polymorphic_on,
- _base_alias, _use_mapper_path, adapt_on_names):
+ with_polymorphic_mappers, polymorphic_on,
+ _base_alias, _use_mapper_path, adapt_on_names):
self.entity = entity
self.mapper = mapper
self.selectable = selectable
@@ -451,9 +460,9 @@ class AliasedInsp(_InspectionAttr):
self._base_alias = _base_alias or self
self._use_mapper_path = _use_mapper_path
- self._adapter = sql_util.ClauseAdapter(selectable,
- equivalents=mapper._equivalent_columns,
- adapt_on_names=adapt_on_names)
+ self._adapter = sql_util.ClauseAdapter(
+ selectable, equivalents=mapper._equivalent_columns,
+ adapt_on_names=adapt_on_names)
self._adapt_on_names = adapt_on_names
self._target = mapper.class_
@@ -461,9 +470,9 @@ class AliasedInsp(_InspectionAttr):
for poly in self.with_polymorphic_mappers:
if poly is not mapper:
setattr(self.entity, poly.class_.__name__,
- AliasedClass(poly.class_, selectable, base_alias=self,
- adapt_on_names=adapt_on_names,
- use_mapper_path=_use_mapper_path))
+ AliasedClass(poly.class_, selectable, base_alias=self,
+ adapt_on_names=adapt_on_names,
+ use_mapper_path=_use_mapper_path))
is_aliased_class = True
"always returns True"
@@ -511,10 +520,10 @@ class AliasedInsp(_InspectionAttr):
def _adapt_element(self, elem):
return self._adapter.traverse(elem).\
- _annotate({
- 'parententity': self.entity,
- 'parentmapper': self.mapper}
- )
+ _annotate({
+ 'parententity': self.entity,
+ 'parentmapper': self.mapper}
+ )
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
@@ -523,7 +532,8 @@ class AliasedInsp(_InspectionAttr):
elif mapper.isa(self.mapper):
return self
else:
- assert False, "mapper %s doesn't correspond to %s" % (mapper, self)
+ assert False, "mapper %s doesn't correspond to %s" % (
+ mapper, self)
def __repr__(self):
return '<AliasedInsp at 0x%x; %s>' % (
@@ -574,11 +584,12 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
attribute name that will be accessible via tuples returned by a
:class:`.Query` object.
- :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias`
- call so that aliases of :class:`.Join` objects don't include an enclosing
- SELECT. This can lead to more efficient queries in many circumstances.
- A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased
- SELECT subquery on backends that don't support this syntax.
+ :param flat: Boolean, will be passed through to the
+ :meth:`.FromClause.alias` call so that aliases of :class:`.Join` objects
+ don't include an enclosing SELECT. This can lead to more efficient
+ queries in many circumstances. A JOIN against a nested JOIN will be
+ rewritten as a JOIN against an aliased SELECT subquery on backends that
+ don't support this syntax.
.. versionadded:: 0.9.0
@@ -624,13 +635,13 @@ def aliased(element, alias=None, name=None, flat=False, adapt_on_names=False):
return element.alias(name, flat=flat)
else:
return AliasedClass(element, alias=alias, flat=flat,
- name=name, adapt_on_names=adapt_on_names)
+ name=name, adapt_on_names=adapt_on_names)
def with_polymorphic(base, classes, selectable=False,
- flat=False,
- polymorphic_on=None, aliased=False,
- innerjoin=False, _use_mapper_path=False):
+ flat=False,
+ polymorphic_on=None, aliased=False,
+ innerjoin=False, _use_mapper_path=False):
"""Produce an :class:`.AliasedClass` construct which specifies
columns for descendant mappers of the given base.
@@ -661,11 +672,12 @@ def with_polymorphic(base, classes, selectable=False,
support parenthesized joins, such as SQLite and older
versions of MySQL.
- :param flat: Boolean, will be passed through to the :meth:`.FromClause.alias`
- call so that aliases of :class:`.Join` objects don't include an enclosing
- SELECT. This can lead to more efficient queries in many circumstances.
- A JOIN against a nested JOIN will be rewritten as a JOIN against an aliased
- SELECT subquery on backends that don't support this syntax.
+ :param flat: Boolean, will be passed through to the
+ :meth:`.FromClause.alias` call so that aliases of :class:`.Join`
+ objects don't include an enclosing SELECT. This can lead to more
+ efficient queries in many circumstances. A JOIN against a nested JOIN
+ will be rewritten as a JOIN against an aliased SELECT subquery on
+ backends that don't support this syntax.
Setting ``flat`` to ``True`` implies the ``aliased`` flag is
also ``True``.
@@ -695,15 +707,15 @@ def with_polymorphic(base, classes, selectable=False,
"""
primary_mapper = _class_to_mapper(base)
mappers, selectable = primary_mapper.\
- _with_polymorphic_args(classes, selectable,
- innerjoin=innerjoin)
+ _with_polymorphic_args(classes, selectable,
+ innerjoin=innerjoin)
if aliased or flat:
selectable = selectable.alias(flat=flat)
return AliasedClass(base,
- selectable,
- with_polymorphic_mappers=mappers,
- with_polymorphic_discriminator=polymorphic_on,
- use_mapper_path=_use_mapper_path)
+ selectable,
+ with_polymorphic_mappers=mappers,
+ with_polymorphic_discriminator=polymorphic_on,
+ use_mapper_path=_use_mapper_path)
def _orm_annotate(element, exclude=None):
@@ -726,8 +738,8 @@ def _orm_deannotate(element):
"""
return sql_util._deep_deannotate(element,
- values=("_orm_adapt", "parententity")
- )
+ values=("_orm_adapt", "parententity")
+ )
def _orm_full_deannotate(element):
@@ -762,18 +774,19 @@ class _ORMJoin(expression.Join):
prop = None
if prop:
- if sql_util.clause_is_present(on_selectable, left_info.selectable):
+ if sql_util.clause_is_present(
+ on_selectable, left_info.selectable):
adapt_from = on_selectable
else:
adapt_from = left_info.selectable
pj, sj, source, dest, \
secondary, target_adapter = prop._create_joins(
- source_selectable=adapt_from,
- dest_selectable=adapt_to,
- source_polymorphic=True,
- dest_polymorphic=True,
- of_type=right_info.mapper)
+ source_selectable=adapt_from,
+ dest_selectable=adapt_to,
+ source_polymorphic=True,
+ dest_polymorphic=True,
+ of_type=right_info.mapper)
if sj is not None:
if isouter:
@@ -886,7 +899,6 @@ def with_parent(instance, prop):
value_is_parent=True)
-
def has_identity(object):
"""Return True if the given object has a database
identity.
@@ -902,6 +914,7 @@ def has_identity(object):
state = attributes.instance_state(object)
return state.has_identity
+
def was_deleted(object):
"""Return True if the given object was deleted
within a session flush.
@@ -914,8 +927,6 @@ def was_deleted(object):
return state.deleted
-
-
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
to detect unit of work sorting issues.
@@ -935,9 +946,9 @@ def randomize_unitofwork():
By calling ``randomize_unitofwork()`` when a script first runs, the
ordering of a key series of sets within the unit of work implementation
- are randomized, so that the script can be minimized down to the fundamental
- mapping and operation that's failing, while still reproducing the issue
- on at least some runs.
+ are randomized, so that the script can be minimized down to the
+ fundamental mapping and operation that's failing, while still reproducing
+ the issue on at least some runs.
This utility is also available when running the test suite via the
``--reversetop`` flag.
@@ -950,5 +961,4 @@ def randomize_unitofwork():
from sqlalchemy.util import topological
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
- dependency.set = RandomSet
-
+ dependency.set = RandomSet
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index a24f0cffa..4d013859c 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -63,10 +63,11 @@ from .expression import (
union,
union_all,
update,
- )
+)
from .visitors import ClauseVisitor
+
def __go(lcls):
global __all__
from .. import util as _sa_util
@@ -74,7 +75,7 @@ def __go(lcls):
import inspect as _inspect
__all__ = sorted(name for name, obj in lcls.items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
from .annotation import _prepare_annotations, Annotated
from .elements import AnnotatedColumnElement, ClauseList
@@ -88,4 +89,3 @@ def __go(lcls):
from . import naming
__go(locals())
-
diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py
index 380624a9b..02f5c3c1c 100644
--- a/lib/sqlalchemy/sql/annotation.py
+++ b/lib/sqlalchemy/sql/annotation.py
@@ -6,13 +6,15 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The :class:`.Annotated` class and related routines; creates hash-equivalent
-copies of SQL constructs which contain context-specific markers and associations.
+copies of SQL constructs which contain context-specific markers and
+associations.
"""
from .. import util
from . import operators
+
class Annotated(object):
"""clones a ClauseElement and applies an 'annotations' dictionary.
@@ -66,7 +68,8 @@ class Annotated(object):
return self._with_annotations(_values)
def _compiler_dispatch(self, visitor, **kw):
- return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
+ return self.__element.__class__._compiler_dispatch(
+ self, visitor, **kw)
@property
def _constructor(self):
@@ -93,14 +96,12 @@ class Annotated(object):
return hash(other) == hash(self)
-
# hard-generate Annotated subclasses. this technique
# is used instead of on-the-fly types (i.e. type.__new__())
# so that the resulting objects are pickleable.
annotated_classes = {}
-
def _deep_annotate(element, annotations, exclude=None):
"""Deep copy the given ClauseElement, annotating each element
with the given annotations dictionary.
@@ -110,8 +111,8 @@ def _deep_annotate(element, annotations, exclude=None):
"""
def clone(elem):
if exclude and \
- hasattr(elem, 'proxy_set') and \
- elem.proxy_set.intersection(exclude):
+ hasattr(elem, 'proxy_set') and \
+ elem.proxy_set.intersection(exclude):
newelem = elem._clone()
elif annotations != elem._annotations:
newelem = elem._annotate(annotations)
@@ -163,6 +164,7 @@ def _shallow_annotate(element, annotations):
element._copy_internals()
return element
+
def _new_annotation_type(cls, base_cls):
if issubclass(cls, Annotated):
return cls
@@ -178,11 +180,12 @@ def _new_annotation_type(cls, base_cls):
break
annotated_classes[cls] = anno_cls = type(
- "Annotated%s" % cls.__name__,
- (base_cls, cls), {})
+ "Annotated%s" % cls.__name__,
+ (base_cls, cls), {})
globals()["Annotated%s" % cls.__name__] = anno_cls
return anno_cls
+
def _prepare_annotations(target_hierarchy, base_cls):
stack = [target_hierarchy]
while stack:
diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py
index 1e02b3fb0..5358d95b5 100644
--- a/lib/sqlalchemy/sql/base.py
+++ b/lib/sqlalchemy/sql/base.py
@@ -19,6 +19,7 @@ import collections
PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
NO_ARG = util.symbol('NO_ARG')
+
class Immutable(object):
"""mark a ClauseElement as 'immutable' when expressions are cloned."""
@@ -32,10 +33,10 @@ class Immutable(object):
return self
-
def _from_objects(*elements):
return itertools.chain(*[element._from_objects for element in elements])
+
@util.decorator
def _generative(fn, *args, **kw):
"""Mark a method as generative."""
@@ -50,6 +51,7 @@ class _DialectArgView(collections.MutableMapping):
<dialectname>_<argument_name>.
"""
+
def __init__(self, obj):
self.obj = obj
@@ -76,7 +78,7 @@ class _DialectArgView(collections.MutableMapping):
dialect, value_key = self._key(key)
except KeyError:
raise exc.ArgumentError(
- "Keys must be of the form <dialectname>_<argname>")
+ "Keys must be of the form <dialectname>_<argname>")
else:
self.obj.dialect_options[dialect][value_key] = value
@@ -86,15 +88,17 @@ class _DialectArgView(collections.MutableMapping):
def __len__(self):
return sum(len(args._non_defaults) for args in
- self.obj.dialect_options.values())
+ self.obj.dialect_options.values())
def __iter__(self):
return (
util.safe_kwarg("%s_%s" % (dialect_name, value_name))
for dialect_name in self.obj.dialect_options
- for value_name in self.obj.dialect_options[dialect_name]._non_defaults
+ for value_name in
+ self.obj.dialect_options[dialect_name]._non_defaults
)
+
class _DialectArgDict(collections.MutableMapping):
"""A dictionary view of dialect-level arguments for a specific
dialect.
@@ -103,6 +107,7 @@ class _DialectArgDict(collections.MutableMapping):
and dialect-specified default arguments.
"""
+
def __init__(self):
self._non_defaults = {}
self._defaults = {}
@@ -150,24 +155,26 @@ class DialectKWArgs(object):
some_index = Index('a', 'b', mydialect_length=5)
The :meth:`.DialectKWArgs.argument_for` method is a per-argument
- way adding extra arguments to the :attr:`.DefaultDialect.construct_arguments`
- dictionary. This dictionary provides a list of argument names accepted by
- various schema-level constructs on behalf of a dialect.
+ way adding extra arguments to the
+ :attr:`.DefaultDialect.construct_arguments` dictionary. This
+ dictionary provides a list of argument names accepted by various
+ schema-level constructs on behalf of a dialect.
- New dialects should typically specify this dictionary all at once as a data
- member of the dialect class. The use case for ad-hoc addition of
+ New dialects should typically specify this dictionary all at once as a
+ data member of the dialect class. The use case for ad-hoc addition of
argument names is typically for end-user code that is also using
a custom compilation scheme which consumes the additional arguments.
- :param dialect_name: name of a dialect. The dialect must be locatable,
- else a :class:`.NoSuchModuleError` is raised. The dialect must
- also include an existing :attr:`.DefaultDialect.construct_arguments` collection,
- indicating that it participates in the keyword-argument validation and
- default system, else :class:`.ArgumentError` is raised.
- If the dialect does not include this collection, then any keyword argument
- can be specified on behalf of this dialect already. All dialects
- packaged within SQLAlchemy include this collection, however for third
- party dialects, support may vary.
+ :param dialect_name: name of a dialect. The dialect must be
+ locatable, else a :class:`.NoSuchModuleError` is raised. The
+ dialect must also include an existing
+ :attr:`.DefaultDialect.construct_arguments` collection, indicating
+ that it participates in the keyword-argument validation and default
+ system, else :class:`.ArgumentError` is raised. If the dialect does
+ not include this collection, then any keyword argument can be
+ specified on behalf of this dialect already. All dialects packaged
+ within SQLAlchemy include this collection, however for third party
+ dialects, support may vary.
:param argument_name: name of the parameter.
@@ -179,9 +186,10 @@ class DialectKWArgs(object):
construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
if construct_arg_dictionary is None:
- raise exc.ArgumentError("Dialect '%s' does have keyword-argument "
- "validation and defaults enabled configured" %
- dialect_name)
+ raise exc.ArgumentError(
+ "Dialect '%s' does have keyword-argument "
+ "validation and defaults enabled configured" %
+ dialect_name)
if cls not in construct_arg_dictionary:
construct_arg_dictionary[cls] = {}
construct_arg_dictionary[cls][argument_name] = default
@@ -243,8 +251,8 @@ class DialectKWArgs(object):
options to this construct.
This is a two-level nested registry, keyed to ``<dialect_name>``
- and ``<argument_name>``. For example, the ``postgresql_where`` argument
- would be locatable as::
+ and ``<argument_name>``. For example, the ``postgresql_where``
+ argument would be locatable as::
arg = my_object.dialect_options['postgresql']['where']
@@ -257,8 +265,8 @@ class DialectKWArgs(object):
"""
return util.PopulateDict(
- util.portable_instancemethod(self._kw_reg_for_dialect_cls)
- )
+ util.portable_instancemethod(self._kw_reg_for_dialect_cls)
+ )
def _validate_dialect_kwargs(self, kwargs):
# validate remaining kwargs that they all specify DB prefixes
@@ -269,29 +277,30 @@ class DialectKWArgs(object):
for k in kwargs:
m = re.match('^(.+?)_(.+)$', k)
if not m:
- raise TypeError("Additional arguments should be "
- "named <dialectname>_<argument>, got '%s'" % k)
+ raise TypeError(
+ "Additional arguments should be "
+ "named <dialectname>_<argument>, got '%s'" % k)
dialect_name, arg_name = m.group(1, 2)
try:
construct_arg_dictionary = self.dialect_options[dialect_name]
except exc.NoSuchModuleError:
util.warn(
- "Can't validate argument %r; can't "
- "locate any SQLAlchemy dialect named %r" %
- (k, dialect_name))
+ "Can't validate argument %r; can't "
+ "locate any SQLAlchemy dialect named %r" %
+ (k, dialect_name))
self.dialect_options[dialect_name] = d = _DialectArgDict()
d._defaults.update({"*": None})
d._non_defaults[arg_name] = kwargs[k]
else:
if "*" not in construct_arg_dictionary and \
- arg_name not in construct_arg_dictionary:
+ arg_name not in construct_arg_dictionary:
raise exc.ArgumentError(
- "Argument %r is not accepted by "
- "dialect %r on behalf of %r" % (
- k,
- dialect_name, self.__class__
- ))
+ "Argument %r is not accepted by "
+ "dialect %r on behalf of %r" % (
+ k,
+ dialect_name, self.__class__
+ ))
else:
construct_arg_dictionary[arg_name] = kwargs[k]
@@ -424,11 +433,13 @@ class SchemaEventTarget(object):
self._set_parent(parent)
self.dispatch.after_parent_attach(self, parent)
+
class SchemaVisitor(ClauseVisitor):
"""Define the visiting for ``SchemaItem`` objects."""
__traverse_options__ = {'schema_visitor': True}
+
class ColumnCollection(util.OrderedProperties):
"""An ordered dictionary that stores a list of ColumnElement
instances.
@@ -478,11 +489,10 @@ class ColumnCollection(util.OrderedProperties):
self._data[column.key] = column
if remove_col is not None:
self._all_columns[:] = [column if c is remove_col
- else c for c in self._all_columns]
+ else c for c in self._all_columns]
else:
self._all_columns.append(column)
-
def add(self, column):
"""Add a column to this collection.
@@ -492,7 +502,7 @@ class ColumnCollection(util.OrderedProperties):
"""
if not column.key:
raise exc.ArgumentError(
- "Can't add unnamed column to column collection")
+ "Can't add unnamed column to column collection")
self[column.key] = column
def __delitem__(self, key):
@@ -512,8 +522,8 @@ class ColumnCollection(util.OrderedProperties):
if not existing.shares_lineage(value):
util.warn('Column %r on table %r being replaced by '
'%r, which has the same key. Consider '
- 'use_labels for select() statements.' % (key,
- getattr(existing, 'table', None), value))
+ 'use_labels for select() statements.' %
+ (key, getattr(existing, 'table', None), value))
# pop out memoized proxy_set as this
# operation may very well be occurring
@@ -530,17 +540,20 @@ class ColumnCollection(util.OrderedProperties):
def remove(self, column):
del self._data[column.key]
self._all_col_set.remove(column)
- self._all_columns[:] = [c for c in self._all_columns if c is not column]
+ self._all_columns[:] = [
+ c for c in self._all_columns if c is not column]
def update(self, iter):
cols = list(iter)
- self._all_columns.extend(c for label, c in cols if c not in self._all_col_set)
+ self._all_columns.extend(
+ c for label, c in cols if c not in self._all_col_set)
self._all_col_set.update(c for label, c in cols)
self._data.update((label, c) for label, c in cols)
def extend(self, iter):
cols = list(iter)
- self._all_columns.extend(c for c in cols if c not in self._all_col_set)
+ self._all_columns.extend(c for c in cols if c not in
+ self._all_col_set)
self._all_col_set.update(cols)
self._data.update((c.key, c) for c in cols)
@@ -574,7 +587,8 @@ class ColumnCollection(util.OrderedProperties):
return col in self._all_col_set
def as_immutable(self):
- return ImmutableColumnCollection(self._data, self._all_col_set, self._all_columns)
+ return ImmutableColumnCollection(
+ self._data, self._all_col_set, self._all_columns)
class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
@@ -609,6 +623,7 @@ class ColumnSet(util.ordered_column_set):
def __hash__(self):
return hash(tuple(x for x in self))
+
def _bind_or_error(schemaitem, msg=None):
bind = schemaitem.bind
if not bind:
@@ -621,7 +636,7 @@ def _bind_or_error(schemaitem, msg=None):
item = '%s object' % name
if msg is None:
msg = "%s is not bound to an Engine or Connection. "\
- "Execution can not proceed without a database to execute "\
- "against." % item
+ "Execution can not proceed without a database to execute "\
+ "against." % item
raise exc.UnboundExecutionError(msg)
return bind
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 384cf27c2..ac45054ae 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -25,7 +25,7 @@ To generate user-defined SQL strings, see
import re
from . import schema, sqltypes, operators, functions, \
- util as sql_util, visitors, elements, selectable, base
+ util as sql_util, visitors, elements, selectable, base
from .. import util, exc
import decimal
import itertools
@@ -158,7 +158,9 @@ COMPOUND_KEYWORDS = {
selectable.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL'
}
+
class Compiled(object):
+
"""Represent a compiled SQL or DDL expression.
The ``__str__`` method of the ``Compiled`` object should produce
@@ -174,7 +176,7 @@ class Compiled(object):
_cached_metadata = None
def __init__(self, dialect, statement, bind=None,
- compile_kwargs=util.immutabledict()):
+ compile_kwargs=util.immutabledict()):
"""Construct a new ``Compiled`` object.
:param dialect: ``Dialect`` to compile against.
@@ -199,7 +201,7 @@ class Compiled(object):
self.string = self.process(self.statement, **compile_kwargs)
@util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
+ "within the constructor.")
def compile(self):
"""Produce the internal string representation of this element.
"""
@@ -247,8 +249,8 @@ class Compiled(object):
e = self.bind
if e is None:
raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
+ "This Compiled object is not bound to any Engine "
+ "or Connection.")
return e._execute_compiled(self, multiparams, params)
def scalar(self, *multiparams, **params):
@@ -259,6 +261,7 @@ class Compiled(object):
class TypeCompiler(object):
+
"""Produces DDL specification for TypeEngine objects."""
def __init__(self, dialect):
@@ -268,8 +271,8 @@ class TypeCompiler(object):
return type_._compiler_dispatch(self)
-
class _CompileLabel(visitors.Visitable):
+
"""lightweight label object which acts as an expression.Label."""
__visit_name__ = 'label'
@@ -290,6 +293,7 @@ class _CompileLabel(visitors.Visitable):
class SQLCompiler(Compiled):
+
"""Default implementation of Compiled.
Compiles ClauseElements into SQL strings. Uses a similar visit
@@ -333,7 +337,7 @@ class SQLCompiler(Compiled):
"""
def __init__(self, dialect, statement, column_keys=None,
- inline=False, **kwargs):
+ inline=False, **kwargs):
"""Construct a new ``DefaultCompiler`` object.
dialect
@@ -412,19 +416,19 @@ class SQLCompiler(Compiled):
def _apply_numbered_params(self):
poscount = itertools.count(1)
self.string = re.sub(
- r'\[_POSITION\]',
- lambda m: str(util.next(poscount)),
- self.string)
+ r'\[_POSITION\]',
+ lambda m: str(util.next(poscount)),
+ self.string)
@util.memoized_property
def _bind_processors(self):
return dict(
- (key, value) for key, value in
- ((self.bind_names[bindparam],
- bindparam.type._cached_bind_processor(self.dialect))
- for bindparam in self.bind_names)
- if value is not None
- )
+ (key, value) for key, value in
+ ((self.bind_names[bindparam],
+ bindparam.type._cached_bind_processor(self.dialect))
+ for bindparam in self.bind_names)
+ if value is not None
+ )
def is_subquery(self):
return len(self.stack) > 1
@@ -491,15 +495,16 @@ class SQLCompiler(Compiled):
return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
def visit_label(self, label,
- add_to_result_map=None,
- within_label_clause=False,
- within_columns_clause=False,
- render_label_as_label=None,
- **kw):
+ add_to_result_map=None,
+ within_label_clause=False,
+ within_columns_clause=False,
+ render_label_as_label=None,
+ **kw):
# only render labels within the columns clause
# or ORDER BY clause of a select. dialect-specific compilers
# can modify this behavior.
- render_label_with_as = within_columns_clause and not within_label_clause
+ render_label_with_as = (within_columns_clause and not
+ within_label_clause)
render_label_only = render_label_as_label is label
if render_label_only or render_label_with_as:
@@ -511,27 +516,25 @@ class SQLCompiler(Compiled):
if render_label_with_as:
if add_to_result_map is not None:
add_to_result_map(
- labelname,
- label.name,
- (label, labelname, ) + label._alt_names,
- label.type
+ labelname,
+ label.name,
+ (label, labelname, ) + label._alt_names,
+ label.type
)
- return label.element._compiler_dispatch(self,
- within_columns_clause=True,
- within_label_clause=True,
- **kw) + \
- OPERATORS[operators.as_] + \
- self.preparer.format_label(label, labelname)
+ return label.element._compiler_dispatch(
+ self, within_columns_clause=True,
+ within_label_clause=True, **kw) + \
+ OPERATORS[operators.as_] + \
+ self.preparer.format_label(label, labelname)
elif render_label_only:
return self.preparer.format_label(label, labelname)
else:
- return label.element._compiler_dispatch(self,
- within_columns_clause=False,
- **kw)
+ return label.element._compiler_dispatch(
+ self, within_columns_clause=False, **kw)
def visit_column(self, column, add_to_result_map=None,
- include_table=True, **kwargs):
+ include_table=True, **kwargs):
name = orig_name = column.name
if name is None:
raise exc.CompileError("Cannot compile Column object until "
@@ -567,8 +570,8 @@ class SQLCompiler(Compiled):
tablename = self._truncated_identifier("alias", tablename)
return schema_prefix + \
- self.preparer.quote(tablename) + \
- "." + name
+ self.preparer.quote(tablename) + \
+ "." + name
def escape_literal_column(self, text):
"""provide escaping for the literal_column() construct."""
@@ -597,37 +600,38 @@ class SQLCompiler(Compiled):
return self.bindparam_string(name, **kw)
# un-escape any \:params
- return BIND_PARAMS_ESC.sub(lambda m: m.group(1),
- BIND_PARAMS.sub(do_bindparam,
- self.post_process_text(textclause.text))
+ return BIND_PARAMS_ESC.sub(
+ lambda m: m.group(1),
+ BIND_PARAMS.sub(
+ do_bindparam,
+ self.post_process_text(textclause.text))
)
def visit_text_as_from(self, taf, iswrapper=False,
- compound_index=0, force_result_map=False,
- asfrom=False,
- parens=True, **kw):
+ compound_index=0, force_result_map=False,
+ asfrom=False,
+ parens=True, **kw):
toplevel = not self.stack
entry = self._default_stack_entry if toplevel else self.stack[-1]
populate_result_map = force_result_map or (
- compound_index == 0 and (
- toplevel or \
- entry['iswrapper']
- )
- )
+ compound_index == 0 and (
+ toplevel or
+ entry['iswrapper']
+ )
+ )
if populate_result_map:
for c in taf.column_args:
self.process(c, within_columns_clause=True,
- add_to_result_map=self._add_to_result_map)
+ add_to_result_map=self._add_to_result_map)
text = self.process(taf.element, **kw)
if asfrom and parens:
text = "(%s)" % text
return text
-
def visit_null(self, expr, **kw):
return 'NULL'
@@ -646,7 +650,7 @@ class SQLCompiler(Compiled):
def visit_clauselist(self, clauselist, order_by_select=None, **kw):
if order_by_select is not None:
return self._order_by_clauselist(
- clauselist, order_by_select, **kw)
+ clauselist, order_by_select, **kw)
sep = clauselist.operator
if sep is None:
@@ -654,11 +658,11 @@ class SQLCompiler(Compiled):
else:
sep = OPERATORS[clauselist.operator]
return sep.join(
- s for s in
- (
- c._compiler_dispatch(self, **kw)
- for c in clauselist.clauses)
- if s)
+ s for s in
+ (
+ c._compiler_dispatch(self, **kw)
+ for c in clauselist.clauses)
+ if s)
def _order_by_clauselist(self, clauselist, order_by_select, **kw):
# look through raw columns collection for labels.
@@ -667,21 +671,21 @@ class SQLCompiler(Compiled):
# label expression in the columns clause.
raw_col = set(l._order_by_label_element.name
- for l in order_by_select._raw_columns
- if l._order_by_label_element is not None)
+ for l in order_by_select._raw_columns
+ if l._order_by_label_element is not None)
return ", ".join(
- s for s in
- (
- c._compiler_dispatch(self,
- render_label_as_label=
- c._order_by_label_element if
- c._order_by_label_element is not None and
- c._order_by_label_element.name in raw_col
- else None,
- **kw)
- for c in clauselist.clauses)
- if s)
+ s for s in
+ (
+ c._compiler_dispatch(
+ self,
+ render_label_as_label=c._order_by_label_element if
+ c._order_by_label_element is not None and
+ c._order_by_label_element.name in raw_col
+ else None,
+ **kw)
+ for c in clauselist.clauses)
+ if s)
def visit_case(self, clause, **kwargs):
x = "CASE "
@@ -689,38 +693,38 @@ class SQLCompiler(Compiled):
x += clause.value._compiler_dispatch(self, **kwargs) + " "
for cond, result in clause.whens:
x += "WHEN " + cond._compiler_dispatch(
- self, **kwargs
- ) + " THEN " + result._compiler_dispatch(
- self, **kwargs) + " "
+ self, **kwargs
+ ) + " THEN " + result._compiler_dispatch(
+ self, **kwargs) + " "
if clause.else_ is not None:
x += "ELSE " + clause.else_._compiler_dispatch(
- self, **kwargs
- ) + " "
+ self, **kwargs
+ ) + " "
x += "END"
return x
def visit_cast(self, cast, **kwargs):
return "CAST(%s AS %s)" % \
- (cast.clause._compiler_dispatch(self, **kwargs),
- cast.typeclause._compiler_dispatch(self, **kwargs))
+ (cast.clause._compiler_dispatch(self, **kwargs),
+ cast.typeclause._compiler_dispatch(self, **kwargs))
def visit_over(self, over, **kwargs):
return "%s OVER (%s)" % (
over.func._compiler_dispatch(self, **kwargs),
' '.join(
- '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs))
- for word, clause in (
- ('PARTITION', over.partition_by),
- ('ORDER', over.order_by)
- )
- if clause is not None and len(clause)
+ '%s BY %s' % (word, clause._compiler_dispatch(self, **kwargs))
+ for word, clause in (
+ ('PARTITION', over.partition_by),
+ ('ORDER', over.order_by)
+ )
+ if clause is not None and len(clause)
)
)
def visit_extract(self, extract, **kwargs):
field = self.extract_map.get(extract.field, extract.field)
- return "EXTRACT(%s FROM %s)" % (field,
- extract.expr._compiler_dispatch(self, **kwargs))
+ return "EXTRACT(%s FROM %s)" % (
+ field, extract.expr._compiler_dispatch(self, **kwargs))
def visit_function(self, func, add_to_result_map=None, **kwargs):
if add_to_result_map is not None:
@@ -734,7 +738,7 @@ class SQLCompiler(Compiled):
else:
name = FUNCTIONS.get(func.__class__, func.name + "%(expr)s")
return ".".join(list(func.packagenames) + [name]) % \
- {'expr': self.function_argspec(func, **kwargs)}
+ {'expr': self.function_argspec(func, **kwargs)}
def visit_next_value_func(self, next_value, **kw):
return self.visit_sequence(next_value.sequence)
@@ -748,39 +752,38 @@ class SQLCompiler(Compiled):
def function_argspec(self, func, **kwargs):
return func.clause_expr._compiler_dispatch(self, **kwargs)
-
def visit_compound_select(self, cs, asfrom=False,
- parens=True, compound_index=0, **kwargs):
+ parens=True, compound_index=0, **kwargs):
toplevel = not self.stack
entry = self._default_stack_entry if toplevel else self.stack[-1]
self.stack.append(
- {
- 'correlate_froms': entry['correlate_froms'],
- 'iswrapper': toplevel,
- 'asfrom_froms': entry['asfrom_froms']
- })
+ {
+ 'correlate_froms': entry['correlate_froms'],
+ 'iswrapper': toplevel,
+ 'asfrom_froms': entry['asfrom_froms']
+ })
keyword = self.compound_keywords.get(cs.keyword)
text = (" " + keyword + " ").join(
- (c._compiler_dispatch(self,
- asfrom=asfrom, parens=False,
- compound_index=i, **kwargs)
- for i, c in enumerate(cs.selects))
- )
+ (c._compiler_dispatch(self,
+ asfrom=asfrom, parens=False,
+ compound_index=i, **kwargs)
+ for i, c in enumerate(cs.selects))
+ )
group_by = cs._group_by_clause._compiler_dispatch(
- self, asfrom=asfrom, **kwargs)
+ self, asfrom=asfrom, **kwargs)
if group_by:
text += " GROUP BY " + group_by
text += self.order_by_clause(cs, **kwargs)
text += (cs._limit_clause is not None or cs._offset_clause is not None) and \
- self.limit_clause(cs) or ""
+ self.limit_clause(cs) or ""
if self.ctes and \
- compound_index == 0 and toplevel:
+ compound_index == 0 and toplevel:
text = self._render_cte_clause() + text
self.stack.pop(-1)
@@ -793,26 +796,26 @@ class SQLCompiler(Compiled):
if unary.operator:
if unary.modifier:
raise exc.CompileError(
- "Unary expression does not support operator "
- "and modifier simultaneously")
+ "Unary expression does not support operator "
+ "and modifier simultaneously")
disp = getattr(self, "visit_%s_unary_operator" %
- unary.operator.__name__, None)
+ unary.operator.__name__, None)
if disp:
return disp(unary, unary.operator, **kw)
else:
- return self._generate_generic_unary_operator(unary,
- OPERATORS[unary.operator], **kw)
+ return self._generate_generic_unary_operator(
+ unary, OPERATORS[unary.operator], **kw)
elif unary.modifier:
disp = getattr(self, "visit_%s_unary_modifier" %
- unary.modifier.__name__, None)
+ unary.modifier.__name__, None)
if disp:
return disp(unary, unary.modifier, **kw)
else:
- return self._generate_generic_unary_modifier(unary,
- OPERATORS[unary.modifier], **kw)
+ return self._generate_generic_unary_modifier(
+ unary, OPERATORS[unary.modifier], **kw)
else:
raise exc.CompileError(
- "Unary expression has no operator or modifier")
+ "Unary expression has no operator or modifier")
def visit_istrue_unary_operator(self, element, operator, **kw):
if self.dialect.supports_native_boolean:
@@ -829,8 +832,8 @@ class SQLCompiler(Compiled):
def visit_binary(self, binary, **kw):
# don't allow "? = ?" to render
if self.ansi_bind_rules and \
- isinstance(binary.left, elements.BindParameter) and \
- isinstance(binary.right, elements.BindParameter):
+ isinstance(binary.left, elements.BindParameter) and \
+ isinstance(binary.right, elements.BindParameter):
kw['literal_binds'] = True
operator = binary.operator
@@ -846,21 +849,21 @@ class SQLCompiler(Compiled):
return self._generate_generic_binary(binary, opstring, **kw)
def visit_custom_op_binary(self, element, operator, **kw):
- return self._generate_generic_binary(element,
- " " + operator.opstring + " ", **kw)
+ return self._generate_generic_binary(
+ element, " " + operator.opstring + " ", **kw)
def visit_custom_op_unary_operator(self, element, operator, **kw):
- return self._generate_generic_unary_operator(element,
- operator.opstring + " ", **kw)
+ return self._generate_generic_unary_operator(
+ element, operator.opstring + " ", **kw)
def visit_custom_op_unary_modifier(self, element, operator, **kw):
- return self._generate_generic_unary_modifier(element,
- " " + operator.opstring, **kw)
+ return self._generate_generic_unary_modifier(
+ element, " " + operator.opstring, **kw)
def _generate_generic_binary(self, binary, opstring, **kw):
return binary.left._compiler_dispatch(self, **kw) + \
- opstring + \
- binary.right._compiler_dispatch(self, **kw)
+ opstring + \
+ binary.right._compiler_dispatch(self, **kw)
def _generate_generic_unary_operator(self, unary, opstring, **kw):
return opstring + unary.element._compiler_dispatch(self, **kw)
@@ -888,16 +891,16 @@ class SQLCompiler(Compiled):
binary = binary._clone()
percent = self._like_percent_literal
binary.right = percent.__radd__(
- binary.right
- )
+ binary.right
+ )
return self.visit_like_op_binary(binary, operator, **kw)
def visit_notstartswith_op_binary(self, binary, operator, **kw):
binary = binary._clone()
percent = self._like_percent_literal
binary.right = percent.__radd__(
- binary.right
- )
+ binary.right
+ )
return self.visit_notlike_op_binary(binary, operator, **kw)
def visit_endswith_op_binary(self, binary, operator, **kw):
@@ -917,77 +920,77 @@ class SQLCompiler(Compiled):
# TODO: use ternary here, not "and"/ "or"
return '%s LIKE %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_notlike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT LIKE %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) LIKE lower(%s)' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) NOT LIKE lower(%s)' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
+ binary.left._compiler_dispatch(self, **kw),
+ binary.right._compiler_dispatch(self, **kw)) \
+ (
' ESCAPE ' +
self.render_literal_value(escape, sqltypes.STRINGTYPE)
if escape else ''
- )
+ )
def visit_between_op_binary(self, binary, operator, **kw):
symmetric = binary.modifiers.get("symmetric", False)
return self._generate_generic_binary(
- binary, " BETWEEN SYMMETRIC "
- if symmetric else " BETWEEN ", **kw)
+ binary, " BETWEEN SYMMETRIC "
+ if symmetric else " BETWEEN ", **kw)
def visit_notbetween_op_binary(self, binary, operator, **kw):
symmetric = binary.modifiers.get("symmetric", False)
return self._generate_generic_binary(
- binary, " NOT BETWEEN SYMMETRIC "
- if symmetric else " NOT BETWEEN ", **kw)
+ binary, " NOT BETWEEN SYMMETRIC "
+ if symmetric else " NOT BETWEEN ", **kw)
def visit_bindparam(self, bindparam, within_columns_clause=False,
- literal_binds=False,
- skip_bind_expression=False,
- **kwargs):
+ literal_binds=False,
+ skip_bind_expression=False,
+ **kwargs):
if not skip_bind_expression and bindparam.type._has_bind_expression:
bind_expression = bindparam.type.bind_expression(bindparam)
return self.process(bind_expression,
skip_bind_expression=True)
if literal_binds or \
- (within_columns_clause and \
+ (within_columns_clause and
self.ansi_bind_rules):
if bindparam.value is None and bindparam.callable is None:
raise exc.CompileError("Bind parameter '%s' without a "
- "renderable value not allowed here."
- % bindparam.key)
- return self.render_literal_bindparam(bindparam,
- within_columns_clause=True, **kwargs)
+ "renderable value not allowed here."
+ % bindparam.key)
+ return self.render_literal_bindparam(
+ bindparam, within_columns_clause=True, **kwargs)
name = self._truncate_bindparam(bindparam)
@@ -995,13 +998,13 @@ class SQLCompiler(Compiled):
existing = self.binds[name]
if existing is not bindparam:
if (existing.unique or bindparam.unique) and \
- not existing.proxy_set.intersection(
- bindparam.proxy_set):
+ not existing.proxy_set.intersection(
+ bindparam.proxy_set):
raise exc.CompileError(
- "Bind parameter '%s' conflicts with "
- "unique bind parameter of the same name" %
- bindparam.key
- )
+ "Bind parameter '%s' conflicts with "
+ "unique bind parameter of the same name" %
+ bindparam.key
+ )
elif existing._is_crud or bindparam._is_crud:
raise exc.CompileError(
"bindparam() name '%s' is reserved "
@@ -1009,8 +1012,8 @@ class SQLCompiler(Compiled):
"clause of this "
"insert/update statement. Please use a "
"name other than column name when using bindparam() "
- "with insert() or update() (for example, 'b_%s')."
- % (bindparam.key, bindparam.key)
+ "with insert() or update() (for example, 'b_%s')." %
+ (bindparam.key, bindparam.key)
)
self.binds[bindparam.key] = self.binds[name] = bindparam
@@ -1037,7 +1040,7 @@ class SQLCompiler(Compiled):
return processor(value)
else:
raise NotImplementedError(
- "Don't know how to literal-quote value %r" % value)
+ "Don't know how to literal-quote value %r" % value)
def _truncate_bindparam(self, bindparam):
if bindparam in self.bind_names:
@@ -1061,7 +1064,7 @@ class SQLCompiler(Compiled):
if len(anonname) > self.label_length:
counter = self.truncated_names.get(ident_class, 1)
truncname = anonname[0:max(self.label_length - 6, 0)] + \
- "_" + hex(counter)[2:]
+ "_" + hex(counter)[2:]
self.truncated_names[ident_class] = counter + 1
else:
truncname = anonname
@@ -1086,8 +1089,8 @@ class SQLCompiler(Compiled):
return self.bindtemplate % {'name': name}
def visit_cte(self, cte, asfrom=False, ashint=False,
- fromhints=None,
- **kwargs):
+ fromhints=None,
+ **kwargs):
self._init_cte_state()
if isinstance(cte.name, elements._truncated_label):
@@ -1108,9 +1111,9 @@ class SQLCompiler(Compiled):
del self.ctes[existing_cte]
else:
raise exc.CompileError(
- "Multiple, unrelated CTEs found with "
- "the same name: %r" %
- cte_name)
+ "Multiple, unrelated CTEs found with "
+ "the same name: %r" %
+ cte_name)
self.ctes_by_name[cte_name] = cte
@@ -1120,7 +1123,8 @@ class SQLCompiler(Compiled):
self.visit_cte(orig_cte)
cte_alias_name = cte._cte_alias.name
if isinstance(cte_alias_name, elements._truncated_label):
- cte_alias_name = self._truncated_identifier("alias", cte_alias_name)
+ cte_alias_name = self._truncated_identifier(
+ "alias", cte_alias_name)
else:
orig_cte = cte
cte_alias_name = None
@@ -1136,20 +1140,20 @@ class SQLCompiler(Compiled):
else:
assert False
recur_cols = [c for c in
- util.unique_list(col_source.inner_columns)
- if c is not None]
+ util.unique_list(col_source.inner_columns)
+ if c is not None]
text += "(%s)" % (", ".join(
- self.preparer.format_column(ident)
- for ident in recur_cols))
+ self.preparer.format_column(ident)
+ for ident in recur_cols))
if self.positional:
kwargs['positional_names'] = self.cte_positional[cte] = []
text += " AS \n" + \
- cte.original._compiler_dispatch(
- self, asfrom=True, **kwargs
- )
+ cte.original._compiler_dispatch(
+ self, asfrom=True, **kwargs
+ )
self.ctes[cte] = text
@@ -1162,8 +1166,8 @@ class SQLCompiler(Compiled):
return text
def visit_alias(self, alias, asfrom=False, ashint=False,
- iscrud=False,
- fromhints=None, **kwargs):
+ iscrud=False,
+ fromhints=None, **kwargs):
if asfrom or ashint:
if isinstance(alias.name, elements._truncated_label):
alias_name = self._truncated_identifier("alias", alias.name)
@@ -1174,13 +1178,13 @@ class SQLCompiler(Compiled):
return self.preparer.format_alias(alias, alias_name)
elif asfrom:
ret = alias.original._compiler_dispatch(self,
- asfrom=True, **kwargs) + \
- " AS " + \
- self.preparer.format_alias(alias, alias_name)
+ asfrom=True, **kwargs) + \
+ " AS " + \
+ self.preparer.format_alias(alias, alias_name)
if fromhints and alias in fromhints:
ret = self.format_from_hint_text(ret, alias,
- fromhints[alias], iscrud)
+ fromhints[alias], iscrud)
return ret
else:
@@ -1201,19 +1205,19 @@ class SQLCompiler(Compiled):
self.result_map[keyname] = name, objects, type_
def _label_select_column(self, select, column,
- populate_result_map,
- asfrom, column_clause_args,
- name=None,
- within_columns_clause=True):
+ populate_result_map,
+ asfrom, column_clause_args,
+ name=None,
+ within_columns_clause=True):
"""produce labeled columns present in a select()."""
if column.type._has_column_expression and \
populate_result_map:
col_expr = column.type.column_expression(column)
add_to_result_map = lambda keyname, name, objects, type_: \
- self._add_to_result_map(
- keyname, name,
- objects + (column,), type_)
+ self._add_to_result_map(
+ keyname, name,
+ objects + (column,), type_)
else:
col_expr = column
if populate_result_map:
@@ -1226,19 +1230,19 @@ class SQLCompiler(Compiled):
elif isinstance(column, elements.Label):
if col_expr is not column:
result_expr = _CompileLabel(
- col_expr,
- column.name,
- alt_names=(column.element,)
- )
+ col_expr,
+ column.name,
+ alt_names=(column.element,)
+ )
else:
result_expr = col_expr
elif select is not None and name:
result_expr = _CompileLabel(
- col_expr,
- name,
- alt_names=(column._key_label,)
- )
+ col_expr,
+ name,
+ alt_names=(column._key_label,)
+ )
elif \
asfrom and \
@@ -1247,30 +1251,30 @@ class SQLCompiler(Compiled):
column.table is not None and \
not isinstance(column.table, selectable.Select):
result_expr = _CompileLabel(col_expr,
- elements._as_truncated(column.name),
- alt_names=(column.key,))
+ elements._as_truncated(column.name),
+ alt_names=(column.key,))
elif not isinstance(column,
- (elements.UnaryExpression, elements.TextClause)) \
- and (not hasattr(column, 'name') or \
- isinstance(column, functions.Function)):
+ (elements.UnaryExpression, elements.TextClause)) \
+ and (not hasattr(column, 'name') or
+ isinstance(column, functions.Function)):
result_expr = _CompileLabel(col_expr, column.anon_label)
elif col_expr is not column:
# TODO: are we sure "column" has a .name and .key here ?
# assert isinstance(column, elements.ColumnClause)
result_expr = _CompileLabel(col_expr,
- elements._as_truncated(column.name),
- alt_names=(column.key,))
+ elements._as_truncated(column.name),
+ alt_names=(column.key,))
else:
result_expr = col_expr
column_clause_args.update(
- within_columns_clause=within_columns_clause,
- add_to_result_map=add_to_result_map
- )
+ within_columns_clause=within_columns_clause,
+ add_to_result_map=add_to_result_map
+ )
return result_expr._compiler_dispatch(
- self,
- **column_clause_args
- )
+ self,
+ **column_clause_args
+ )
def format_from_hint_text(self, sqltext, table, hint, iscrud):
hinttext = self.get_from_hint_text(table, hint)
@@ -1307,7 +1311,7 @@ class SQLCompiler(Compiled):
newelem = cloned[element] = element._clone()
if newelem.is_selectable and newelem._is_join and \
- isinstance(newelem.right, selectable.FromGrouping):
+ isinstance(newelem.right, selectable.FromGrouping):
newelem._reset_exported()
newelem.left = visit(newelem.left, **kw)
@@ -1376,24 +1380,24 @@ class SQLCompiler(Compiled):
return visit(select)
- def _transform_result_map_for_nested_joins(self, select, transformed_select):
+ def _transform_result_map_for_nested_joins(
+ self, select, transformed_select):
inner_col = dict((c._key_label, c) for
- c in transformed_select.inner_columns)
+ c in transformed_select.inner_columns)
d = dict(
- (inner_col[c._key_label], c)
- for c in select.inner_columns
- )
+ (inner_col[c._key_label], c)
+ for c in select.inner_columns
+ )
for key, (name, objs, typ) in list(self.result_map.items()):
objs = tuple([d.get(col, col) for col in objs])
self.result_map[key] = (name, objs, typ)
-
_default_stack_entry = util.immutabledict([
- ('iswrapper', False),
- ('correlate_froms', frozenset()),
- ('asfrom_froms', frozenset())
- ])
+ ('iswrapper', False),
+ ('correlate_froms', frozenset()),
+ ('asfrom_froms', frozenset())
+ ])
def _display_froms_for_select(self, select, asfrom):
# utility method to help external dialects
@@ -1408,53 +1412,53 @@ class SQLCompiler(Compiled):
if asfrom:
froms = select._get_display_froms(
- explicit_correlate_froms=\
- correlate_froms.difference(asfrom_froms),
- implicit_correlate_froms=())
+ explicit_correlate_froms=correlate_froms.difference(
+ asfrom_froms),
+ implicit_correlate_froms=())
else:
froms = select._get_display_froms(
- explicit_correlate_froms=correlate_froms,
- implicit_correlate_froms=asfrom_froms)
+ explicit_correlate_froms=correlate_froms,
+ implicit_correlate_froms=asfrom_froms)
return froms
def visit_select(self, select, asfrom=False, parens=True,
- iswrapper=False, fromhints=None,
- compound_index=0,
- force_result_map=False,
- nested_join_translation=False,
- **kwargs):
+ iswrapper=False, fromhints=None,
+ compound_index=0,
+ force_result_map=False,
+ nested_join_translation=False,
+ **kwargs):
needs_nested_translation = \
- select.use_labels and \
- not nested_join_translation and \
- not self.stack and \
- not self.dialect.supports_right_nested_joins
+ select.use_labels and \
+ not nested_join_translation and \
+ not self.stack and \
+ not self.dialect.supports_right_nested_joins
if needs_nested_translation:
- transformed_select = self._transform_select_for_nested_joins(select)
+ transformed_select = self._transform_select_for_nested_joins(
+ select)
text = self.visit_select(
- transformed_select, asfrom=asfrom, parens=parens,
- iswrapper=iswrapper, fromhints=fromhints,
- compound_index=compound_index,
- force_result_map=force_result_map,
- nested_join_translation=True, **kwargs
- )
+ transformed_select, asfrom=asfrom, parens=parens,
+ iswrapper=iswrapper, fromhints=fromhints,
+ compound_index=compound_index,
+ force_result_map=force_result_map,
+ nested_join_translation=True, **kwargs
+ )
toplevel = not self.stack
entry = self._default_stack_entry if toplevel else self.stack[-1]
-
populate_result_map = force_result_map or (
- compound_index == 0 and (
- toplevel or \
- entry['iswrapper']
- )
- )
+ compound_index == 0 and (
+ toplevel or
+ entry['iswrapper']
+ )
+ )
if needs_nested_translation:
if populate_result_map:
self._transform_result_map_for_nested_joins(
- select, transformed_select)
+ select, transformed_select)
return text
correlate_froms = entry['correlate_froms']
@@ -1462,48 +1466,49 @@ class SQLCompiler(Compiled):
if asfrom:
froms = select._get_display_froms(
- explicit_correlate_froms=
- correlate_froms.difference(asfrom_froms),
- implicit_correlate_froms=())
+ explicit_correlate_froms=correlate_froms.difference(
+ asfrom_froms),
+ implicit_correlate_froms=())
else:
froms = select._get_display_froms(
- explicit_correlate_froms=correlate_froms,
- implicit_correlate_froms=asfrom_froms)
+ explicit_correlate_froms=correlate_froms,
+ implicit_correlate_froms=asfrom_froms)
new_correlate_froms = set(selectable._from_objects(*froms))
all_correlate_froms = new_correlate_froms.union(correlate_froms)
new_entry = {
- 'asfrom_froms': new_correlate_froms,
- 'iswrapper': iswrapper,
- 'correlate_froms': all_correlate_froms
- }
+ 'asfrom_froms': new_correlate_froms,
+ 'iswrapper': iswrapper,
+ 'correlate_froms': all_correlate_froms
+ }
self.stack.append(new_entry)
column_clause_args = kwargs.copy()
column_clause_args.update({
- 'within_label_clause': False,
- 'within_columns_clause': False
- })
+ 'within_label_clause': False,
+ 'within_columns_clause': False
+ })
text = "SELECT " # we're off to a good start !
if select._hints:
byfrom = dict([
- (from_, hinttext % {
- 'name':from_._compiler_dispatch(
- self, ashint=True)
- })
- for (from_, dialect), hinttext in
- select._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
+ (from_, hinttext % {
+ 'name': from_._compiler_dispatch(
+ self, ashint=True)
+ })
+ for (from_, dialect), hinttext in
+ select._hints.items()
+ if dialect in ('*', self.dialect.name)
+ ])
hint_text = self.get_select_hint_text(byfrom)
if hint_text:
text += hint_text + " "
if select._prefixes:
- text += self._generate_prefixes(select, select._prefixes, **kwargs)
+ text += self._generate_prefixes(
+ select, select._prefixes, **kwargs)
text += self.get_select_precolumns(select)
@@ -1511,12 +1516,12 @@ class SQLCompiler(Compiled):
inner_columns = [
c for c in [
self._label_select_column(select,
- column,
- populate_result_map, asfrom,
- column_clause_args,
- name=name)
+ column,
+ populate_result_map, asfrom,
+ column_clause_args,
+ name=name)
for name, column in select._columns_plus_names
- ]
+ ]
if c is not None
]
@@ -1526,14 +1531,14 @@ class SQLCompiler(Compiled):
text += " \nFROM "
if select._hints:
- text += ', '.join([f._compiler_dispatch(self,
- asfrom=True, fromhints=byfrom,
- **kwargs)
- for f in froms])
+ text += ', '.join(
+ [f._compiler_dispatch(self, asfrom=True,
+ fromhints=byfrom, **kwargs)
+ for f in froms])
else:
- text += ', '.join([f._compiler_dispatch(self,
- asfrom=True, **kwargs)
- for f in froms])
+ text += ', '.join(
+ [f._compiler_dispatch(self, asfrom=True, **kwargs)
+ for f in froms])
else:
text += self.default_from()
@@ -1544,7 +1549,7 @@ class SQLCompiler(Compiled):
if select._group_by_clause.clauses:
group_by = select._group_by_clause._compiler_dispatch(
- self, **kwargs)
+ self, **kwargs)
if group_by:
text += " GROUP BY " + group_by
@@ -1559,17 +1564,18 @@ class SQLCompiler(Compiled):
else:
order_by_select = None
- text += self.order_by_clause(select,
- order_by_select=order_by_select, **kwargs)
+ text += self.order_by_clause(
+ select, order_by_select=order_by_select, **kwargs)
- if select._limit_clause is not None or select._offset_clause is not None:
+ if (select._limit_clause is not None or
+ select._offset_clause is not None):
text += self.limit_clause(select)
if select._for_update_arg is not None:
text += self.for_update_clause(select)
if self.ctes and \
- compound_index == 0 and toplevel:
+ compound_index == 0 and toplevel:
text = self._render_cte_clause() + text
self.stack.pop(-1)
@@ -1581,11 +1587,11 @@ class SQLCompiler(Compiled):
def _generate_prefixes(self, stmt, prefixes, **kw):
clause = " ".join(
- prefix._compiler_dispatch(self, **kw)
- for prefix, dialect_name in prefixes
- if dialect_name is None or
- dialect_name == self.dialect.name
- )
+ prefix._compiler_dispatch(self, **kw)
+ for prefix, dialect_name in prefixes
+ if dialect_name is None or
+ dialect_name == self.dialect.name
+ )
if clause:
clause += " "
return clause
@@ -1593,9 +1599,9 @@ class SQLCompiler(Compiled):
def _render_cte_clause(self):
if self.positional:
self.positiontup = sum([
- self.cte_positional[cte]
- for cte in self.ctes], []) + \
- self.positiontup
+ self.cte_positional[cte]
+ for cte in self.ctes], []) + \
+ self.positiontup
cte_text = self.get_cte_preamble(self.ctes_recursive) + " "
cte_text += ", \n".join(
[txt for txt in self.ctes.values()]
@@ -1628,8 +1634,8 @@ class SQLCompiler(Compiled):
def returning_clause(self, stmt, returning_cols):
raise exc.CompileError(
- "RETURNING is not supported by this "
- "dialect's statement compiler.")
+ "RETURNING is not supported by this "
+ "dialect's statement compiler.")
def limit_clause(self, select):
text = ""
@@ -1642,16 +1648,16 @@ class SQLCompiler(Compiled):
return text
def visit_table(self, table, asfrom=False, iscrud=False, ashint=False,
- fromhints=None, **kwargs):
+ fromhints=None, **kwargs):
if asfrom or ashint:
if getattr(table, "schema", None):
ret = self.preparer.quote_schema(table.schema) + \
- "." + self.preparer.quote(table.name)
+ "." + self.preparer.quote(table.name)
else:
ret = self.preparer.quote(table.name)
if fromhints and table in fromhints:
ret = self.format_from_hint_text(ret, table,
- fromhints[table], iscrud)
+ fromhints[table], iscrud)
return ret
else:
return ""
@@ -1673,21 +1679,21 @@ class SQLCompiler(Compiled):
not self.dialect.supports_default_values and \
not self.dialect.supports_empty_insert:
raise exc.CompileError("The '%s' dialect with current database "
- "version settings does not support empty "
- "inserts." %
- self.dialect.name)
+ "version settings does not support empty "
+ "inserts." %
+ self.dialect.name)
if insert_stmt._has_multi_parameters:
if not self.dialect.supports_multivalues_insert:
- raise exc.CompileError("The '%s' dialect with current database "
- "version settings does not support "
- "in-place multirow inserts." %
- self.dialect.name)
+ raise exc.CompileError(
+ "The '%s' dialect with current database "
+ "version settings does not support "
+ "in-place multirow inserts." %
+ self.dialect.name)
colparams_single = colparams[0]
else:
colparams_single = colparams
-
preparer = self.preparer
supports_default_values = self.dialect.supports_default_values
@@ -1695,7 +1701,7 @@ class SQLCompiler(Compiled):
if insert_stmt._prefixes:
text += self._generate_prefixes(insert_stmt,
- insert_stmt._prefixes, **kw)
+ insert_stmt._prefixes, **kw)
text += "INTO "
table_text = preparer.format_table(insert_stmt.table)
@@ -1709,22 +1715,22 @@ class SQLCompiler(Compiled):
])
if insert_stmt.table in dialect_hints:
table_text = self.format_from_hint_text(
- table_text,
- insert_stmt.table,
- dialect_hints[insert_stmt.table],
- True
- )
+ table_text,
+ insert_stmt.table,
+ dialect_hints[insert_stmt.table],
+ True
+ )
text += table_text
if colparams_single or not supports_default_values:
text += " (%s)" % ', '.join([preparer.format_column(c[0])
- for c in colparams_single])
+ for c in colparams_single])
if self.returning or insert_stmt._returning:
self.returning = self.returning or insert_stmt._returning
returning_clause = self.returning_clause(
- insert_stmt, self.returning)
+ insert_stmt, self.returning)
if self.returning_precedes_values:
text += " " + returning_clause
@@ -1735,16 +1741,16 @@ class SQLCompiler(Compiled):
text += " DEFAULT VALUES"
elif insert_stmt._has_multi_parameters:
text += " VALUES %s" % (
- ", ".join(
- "(%s)" % (
- ', '.join(c[1] for c in colparam_set)
- )
- for colparam_set in colparams
- )
- )
+ ", ".join(
+ "(%s)" % (
+ ', '.join(c[1] for c in colparam_set)
+ )
+ for colparam_set in colparams
+ )
+ )
else:
text += " VALUES (%s)" % \
- ', '.join([c[1] for c in colparams])
+ ', '.join([c[1] for c in colparams])
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
@@ -1756,7 +1762,7 @@ class SQLCompiler(Compiled):
return None
def update_tables_clause(self, update_stmt, from_table,
- extra_froms, **kw):
+ extra_froms, **kw):
"""Provide a hook to override the initial table clause
in an UPDATE statement.
@@ -1764,12 +1770,12 @@ class SQLCompiler(Compiled):
"""
return from_table._compiler_dispatch(self, asfrom=True,
- iscrud=True, **kw)
+ iscrud=True, **kw)
def update_from_clause(self, update_stmt,
- from_table, extra_froms,
- from_hints,
- **kw):
+ from_table, extra_froms,
+ from_hints,
+ **kw):
"""Provide a hook to override the generation of an
UPDATE..FROM clause.
@@ -1777,15 +1783,15 @@ class SQLCompiler(Compiled):
"""
return "FROM " + ', '.join(
- t._compiler_dispatch(self, asfrom=True,
- fromhints=from_hints, **kw)
- for t in extra_froms)
+ t._compiler_dispatch(self, asfrom=True,
+ fromhints=from_hints, **kw)
+ for t in extra_froms)
def visit_update(self, update_stmt, **kw):
self.stack.append(
- {'correlate_froms': set([update_stmt.table]),
- "iswrapper": False,
- "asfrom_froms": set([update_stmt.table])})
+ {'correlate_froms': set([update_stmt.table]),
+ "iswrapper": False,
+ "asfrom_froms": set([update_stmt.table])})
self.isupdate = True
@@ -1795,7 +1801,7 @@ class SQLCompiler(Compiled):
if update_stmt._prefixes:
text += self._generate_prefixes(update_stmt,
- update_stmt._prefixes, **kw)
+ update_stmt._prefixes, **kw)
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
@@ -1811,11 +1817,11 @@ class SQLCompiler(Compiled):
])
if update_stmt.table in dialect_hints:
table_text = self.format_from_hint_text(
- table_text,
- update_stmt.table,
- dialect_hints[update_stmt.table],
- True
- )
+ table_text,
+ update_stmt.table,
+ dialect_hints[update_stmt.table],
+ True
+ )
else:
dialect_hints = None
@@ -1823,26 +1829,26 @@ class SQLCompiler(Compiled):
text += ' SET '
include_table = extra_froms and \
- self.render_table_with_column_in_update_from
+ self.render_table_with_column_in_update_from
text += ', '.join(
- c[0]._compiler_dispatch(self,
- include_table=include_table) +
- '=' + c[1] for c in colparams
- )
+ c[0]._compiler_dispatch(self,
+ include_table=include_table) +
+ '=' + c[1] for c in colparams
+ )
if self.returning or update_stmt._returning:
if not self.returning:
self.returning = update_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, self.returning)
+ update_stmt, self.returning)
if extra_froms:
extra_from_text = self.update_from_clause(
- update_stmt,
- update_stmt.table,
- extra_froms,
- dialect_hints, **kw)
+ update_stmt,
+ update_stmt.table,
+ extra_froms,
+ dialect_hints, **kw)
if extra_from_text:
text += " " + extra_from_text
@@ -1857,7 +1863,7 @@ class SQLCompiler(Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, self.returning)
+ update_stmt, self.returning)
self.stack.pop(-1)
@@ -1867,7 +1873,7 @@ class SQLCompiler(Compiled):
if name is None:
name = col.key
bindparam = elements.BindParameter(name, value,
- type_=col.type, required=required)
+ type_=col.type, required=required)
bindparam._is_crud = True
return bindparam._compiler_dispatch(self)
@@ -1881,17 +1887,20 @@ class SQLCompiler(Compiled):
# allowing the most compatibility with a non-multi-table
# statement.
_et = set(self.statement._extra_froms)
+
def _column_as_key(key):
str_key = elements._column_as_key(key)
if hasattr(key, 'table') and key.table in _et:
return (key.table.name, str_key)
else:
return str_key
+
def _getattr_col_key(col):
if col.table in _et:
return (col.table.name, col.key)
else:
return col.key
+
def _col_bind_name(col):
if col.table in _et:
return "%s_%s" % (col.table.name, col.key)
@@ -1923,10 +1932,10 @@ class SQLCompiler(Compiled):
# compiled params - return binds for all columns
if self.column_keys is None and stmt.parameters is None:
return [
- (c, self._create_crud_bind_param(c,
- None, required=True))
- for c in stmt.table.columns
- ]
+ (c, self._create_crud_bind_param(c,
+ None, required=True))
+ for c in stmt.table.columns
+ ]
if stmt._has_multi_parameters:
stmt_parameters = stmt.parameters[0]
@@ -1937,7 +1946,7 @@ class SQLCompiler(Compiled):
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
_column_as_key, _getattr_col_key, _col_bind_name = \
- self._key_getters_for_crud_column
+ self._key_getters_for_crud_column
# if we have statement parameters - set defaults in the
# compiled params
@@ -1963,26 +1972,27 @@ class SQLCompiler(Compiled):
# coercing right side to bound param
if elements._is_literal(v):
v = self.process(
- elements.BindParameter(None, v, type_=k.type),
- **kw)
+ elements.BindParameter(None, v, type_=k.type),
+ **kw)
else:
v = self.process(v.self_group(), **kw)
values.append((k, v))
need_pks = self.isinsert and \
- not self.inline and \
- not stmt._returning
+ not self.inline and \
+ not stmt._returning
implicit_returning = need_pks and \
- self.dialect.implicit_returning and \
- stmt.table.implicit_returning
+ self.dialect.implicit_returning and \
+ stmt.table.implicit_returning
if self.isinsert:
- implicit_return_defaults = implicit_returning and stmt._return_defaults
+ implicit_return_defaults = (implicit_returning and
+ stmt._return_defaults)
elif self.isupdate:
- implicit_return_defaults = self.dialect.implicit_returning and \
- stmt.table.implicit_returning and \
- stmt._return_defaults
+ implicit_return_defaults = (self.dialect.implicit_returning and
+ stmt.table.implicit_returning and
+ stmt._return_defaults)
else:
implicit_return_defaults = False
@@ -2025,20 +2035,21 @@ class SQLCompiler(Compiled):
for c in t.c:
if c in normalized_params:
continue
- elif c.onupdate is not None and not c.onupdate.is_sequence:
+ elif (c.onupdate is not None and not
+ c.onupdate.is_sequence):
if c.onupdate.is_clause_element:
values.append(
(c, self.process(
- c.onupdate.arg.self_group(),
- **kw)
- )
+ c.onupdate.arg.self_group(),
+ **kw)
+ )
)
self.postfetch.append(c)
else:
values.append(
(c, self._create_crud_bind_param(
- c, None, name=_col_bind_name(c)
- )
+ c, None, name=_col_bind_name(c)
+ )
)
)
self.prefetch.append(c)
@@ -2049,7 +2060,7 @@ class SQLCompiler(Compiled):
# for an insert from select, we can only use names that
# are given, so only select for those names.
cols = (stmt.table.c[_column_as_key(name)]
- for name in stmt.select_names)
+ for name in stmt.select_names)
else:
# iterate through all table columns to maintain
# ordering, even for those cols that aren't included
@@ -2061,14 +2072,14 @@ class SQLCompiler(Compiled):
value = parameters.pop(col_key)
if elements._is_literal(value):
value = self._create_crud_bind_param(
- c, value, required=value is REQUIRED,
- name=_col_bind_name(c)
- if not stmt._has_multi_parameters
- else "%s_0" % _col_bind_name(c)
- )
+ c, value, required=value is REQUIRED,
+ name=_col_bind_name(c)
+ if not stmt._has_multi_parameters
+ else "%s_0" % _col_bind_name(c)
+ )
else:
if isinstance(value, elements.BindParameter) and \
- value.type._isnull:
+ value.type._isnull:
value = value._clone()
value.type = c.type
@@ -2076,7 +2087,7 @@ class SQLCompiler(Compiled):
self.returning.append(c)
value = self.process(value.self_group(), **kw)
elif implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
value = self.process(value.self_group(), **kw)
else:
@@ -2086,26 +2097,26 @@ class SQLCompiler(Compiled):
elif self.isinsert:
if c.primary_key and \
- need_pks and \
- (
- implicit_returning or
- not postfetch_lastrowid or
- c is not stmt.table._autoincrement_column
- ):
+ need_pks and \
+ (
+ implicit_returning or
+ not postfetch_lastrowid or
+ c is not stmt.table._autoincrement_column
+ ):
if implicit_returning:
if c.default is not None:
if c.default.is_sequence:
if self.dialect.supports_sequences and \
- (not c.default.optional or \
- not self.dialect.sequences_optional):
+ (not c.default.optional or
+ not self.dialect.sequences_optional):
proc = self.process(c.default, **kw)
values.append((c, proc))
self.returning.append(c)
elif c.default.is_clause_element:
values.append(
- (c,
- self.process(c.default.arg.self_group(), **kw))
+ (c, self.process(
+ c.default.arg.self_group(), **kw))
)
self.returning.append(c)
else:
@@ -2117,16 +2128,14 @@ class SQLCompiler(Compiled):
self.returning.append(c)
else:
if (
- c.default is not None and
- (
- not c.default.is_sequence or
- self.dialect.supports_sequences
- )
- ) or \
- c is stmt.table._autoincrement_column and (
- self.dialect.supports_sequences or
- self.dialect.preexecute_autoincrement_sequences
- ):
+ (c.default is not None and
+ (not c.default.is_sequence or
+ self.dialect.supports_sequences)) or
+ c is stmt.table._autoincrement_column and
+ (self.dialect.supports_sequences or
+ self.dialect.
+ preexecute_autoincrement_sequences)
+ ):
values.append(
(c, self._create_crud_bind_param(c, None))
@@ -2137,22 +2146,23 @@ class SQLCompiler(Compiled):
elif c.default is not None:
if c.default.is_sequence:
if self.dialect.supports_sequences and \
- (not c.default.optional or \
- not self.dialect.sequences_optional):
+ (not c.default.optional or
+ not self.dialect.sequences_optional):
proc = self.process(c.default, **kw)
values.append((c, proc))
if implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
elif not c.primary_key:
self.postfetch.append(c)
elif c.default.is_clause_element:
values.append(
- (c, self.process(c.default.arg.self_group(), **kw))
+ (c, self.process(
+ c.default.arg.self_group(), **kw))
)
if implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
elif not c.primary_key:
# don't add primary key column to postfetch
@@ -2164,22 +2174,23 @@ class SQLCompiler(Compiled):
self.prefetch.append(c)
elif c.server_default is not None:
if implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
elif not c.primary_key:
self.postfetch.append(c)
elif implicit_return_defaults and \
c in implicit_return_defaults:
- self.returning.append(c)
+ self.returning.append(c)
elif self.isupdate:
if c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
- (c, self.process(c.onupdate.arg.self_group(), **kw))
+ (c, self.process(
+ c.onupdate.arg.self_group(), **kw))
)
if implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
else:
self.postfetch.append(c)
@@ -2190,13 +2201,13 @@ class SQLCompiler(Compiled):
self.prefetch.append(c)
elif c.server_onupdate is not None:
if implicit_return_defaults and \
- c in implicit_return_defaults:
+ c in implicit_return_defaults:
self.returning.append(c)
else:
self.postfetch.append(c)
elif implicit_return_defaults and \
c in implicit_return_defaults:
- self.returning.append(c)
+ self.returning.append(c)
if parameters and stmt_parameters:
check = set(parameters).intersection(
@@ -2216,13 +2227,13 @@ class SQLCompiler(Compiled):
[
(
c,
- (self._create_crud_bind_param(
- c, row[c.key],
- name="%s_%d" % (c.key, i + 1)
- ) if elements._is_literal(row[c.key])
- else self.process(
- row[c.key].self_group(), **kw))
- if c.key in row else param
+ (self._create_crud_bind_param(
+ c, row[c.key],
+ name="%s_%d" % (c.key, i + 1)
+ ) if elements._is_literal(row[c.key])
+ else self.process(
+ row[c.key].self_group(), **kw))
+ if c.key in row else param
)
for (c, param) in values_0
]
@@ -2233,19 +2244,19 @@ class SQLCompiler(Compiled):
def visit_delete(self, delete_stmt, **kw):
self.stack.append({'correlate_froms': set([delete_stmt.table]),
- "iswrapper": False,
- "asfrom_froms": set([delete_stmt.table])})
+ "iswrapper": False,
+ "asfrom_froms": set([delete_stmt.table])})
self.isdelete = True
text = "DELETE "
if delete_stmt._prefixes:
text += self._generate_prefixes(delete_stmt,
- delete_stmt._prefixes, **kw)
+ delete_stmt._prefixes, **kw)
text += "FROM "
- table_text = delete_stmt.table._compiler_dispatch(self,
- asfrom=True, iscrud=True)
+ table_text = delete_stmt.table._compiler_dispatch(
+ self, asfrom=True, iscrud=True)
if delete_stmt._hints:
dialect_hints = dict([
@@ -2256,11 +2267,11 @@ class SQLCompiler(Compiled):
])
if delete_stmt.table in dialect_hints:
table_text = self.format_from_hint_text(
- table_text,
- delete_stmt.table,
- dialect_hints[delete_stmt.table],
- True
- )
+ table_text,
+ delete_stmt.table,
+ dialect_hints[delete_stmt.table],
+ True
+ )
else:
dialect_hints = None
@@ -2271,7 +2282,7 @@ class SQLCompiler(Compiled):
self.returning = delete_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
- delete_stmt, delete_stmt._returning)
+ delete_stmt, delete_stmt._returning)
if delete_stmt._whereclause is not None:
t = delete_stmt._whereclause._compiler_dispatch(self)
@@ -2280,7 +2291,7 @@ class SQLCompiler(Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
- delete_stmt, delete_stmt._returning)
+ delete_stmt, delete_stmt._returning)
self.stack.pop(-1)
@@ -2291,11 +2302,11 @@ class SQLCompiler(Compiled):
def visit_rollback_to_savepoint(self, savepoint_stmt):
return "ROLLBACK TO SAVEPOINT %s" % \
- self.preparer.format_savepoint(savepoint_stmt)
+ self.preparer.format_savepoint(savepoint_stmt)
def visit_release_savepoint(self, savepoint_stmt):
return "RELEASE SAVEPOINT %s" % \
- self.preparer.format_savepoint(savepoint_stmt)
+ self.preparer.format_savepoint(savepoint_stmt)
class DDLCompiler(Compiled):
@@ -2349,11 +2360,11 @@ class DDLCompiler(Compiled):
table = create.element
preparer = self.dialect.identifier_preparer
- text = "\n" + " ".join(['CREATE'] + \
- table._prefixes + \
- ['TABLE',
- preparer.format_table(table),
- "("])
+ text = "\n" + " ".join(['CREATE'] +
+ table._prefixes +
+ ['TABLE',
+ preparer.format_table(table),
+ "("])
separator = "\n"
# if only one primary key, specify it along with the column
@@ -2362,8 +2373,8 @@ class DDLCompiler(Compiled):
column = create_column.element
try:
processed = self.process(create_column,
- first_pk=column.primary_key
- and not first_pk)
+ first_pk=column.primary_key
+ and not first_pk)
if processed is not None:
text += separator
separator = ", \n"
@@ -2372,11 +2383,10 @@ class DDLCompiler(Compiled):
first_pk = True
except exc.CompileError as ce:
util.raise_from_cause(
- exc.CompileError(util.u("(in table '%s', column '%s'): %s") % (
- table.description,
- column.name,
- ce.args[0]
- )))
+ exc.CompileError(
+ util.u("(in table '%s', column '%s'): %s") %
+ (table.description, column.name, ce.args[0])
+ ))
const = self.create_table_constraints(table)
if const:
@@ -2392,11 +2402,11 @@ class DDLCompiler(Compiled):
return None
text = self.get_column_specification(
- column,
- first_pk=first_pk
- )
- const = " ".join(self.process(constraint) \
- for constraint in column.constraints)
+ column,
+ first_pk=first_pk
+ )
+ const = " ".join(self.process(constraint)
+ for constraint in column.constraints)
if const:
text += " " + const
@@ -2411,19 +2421,19 @@ class DDLCompiler(Compiled):
constraints.append(table.primary_key)
constraints.extend([c for c in table._sorted_constraints
- if c is not table.primary_key])
+ if c is not table.primary_key])
return ", \n\t".join(p for p in
- (self.process(constraint)
- for constraint in constraints
- if (
- constraint._create_rule is None or
- constraint._create_rule(self))
- and (
- not self.dialect.supports_alter or
- not getattr(constraint, 'use_alter', False)
- )) if p is not None
- )
+ (self.process(constraint)
+ for constraint in constraints
+ if (
+ constraint._create_rule is None or
+ constraint._create_rule(self))
+ and (
+ not self.dialect.supports_alter or
+ not getattr(constraint, 'use_alter', False)
+ )) if p is not None
+ )
def visit_drop_table(self, drop):
return "\nDROP TABLE " + self.preparer.format_table(drop.element)
@@ -2431,15 +2441,13 @@ class DDLCompiler(Compiled):
def visit_drop_view(self, drop):
return "\nDROP VIEW " + self.preparer.format_table(drop.element)
-
def _verify_index_table(self, index):
if index.table is None:
raise exc.CompileError("Index '%s' is not associated "
- "with any table." % index.name)
-
+ "with any table." % index.name)
def visit_create_index(self, create, include_schema=False,
- include_table_schema=True):
+ include_table_schema=True):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
@@ -2447,22 +2455,22 @@ class DDLCompiler(Compiled):
if index.unique:
text += "UNIQUE "
text += "INDEX %s ON %s (%s)" \
- % (
- self._prepared_index_name(index,
- include_schema=include_schema),
- preparer.format_table(index.table,
- use_schema=include_table_schema),
- ', '.join(
- self.sql_compiler.process(expr,
- include_table=False, literal_binds=True) for
- expr in index.expressions)
- )
+ % (
+ self._prepared_index_name(index,
+ include_schema=include_schema),
+ preparer.format_table(index.table,
+ use_schema=include_table_schema),
+ ', '.join(
+ self.sql_compiler.process(
+ expr, include_table=False, literal_binds=True) for
+ expr in index.expressions)
+ )
return text
def visit_drop_index(self, drop):
index = drop.element
- return "\nDROP INDEX " + self._prepared_index_name(index,
- include_schema=True)
+ return "\nDROP INDEX " + self._prepared_index_name(
+ index, include_schema=True)
def _prepared_index_name(self, index, include_schema=False):
if include_schema and index.table is not None and index.table.schema:
@@ -2474,10 +2482,10 @@ class DDLCompiler(Compiled):
ident = index.name
if isinstance(ident, elements._truncated_label):
max_ = self.dialect.max_index_name_length or \
- self.dialect.max_identifier_length
+ self.dialect.max_identifier_length
if len(ident) > max_:
ident = ident[0:max_ - 8] + \
- "_" + util.md5_hex(ident)[-4:]
+ "_" + util.md5_hex(ident)[-4:]
else:
self.dialect.validate_identifier(ident)
@@ -2495,7 +2503,7 @@ class DDLCompiler(Compiled):
def visit_create_sequence(self, create):
text = "CREATE SEQUENCE %s" % \
- self.preparer.format_sequence(create.element)
+ self.preparer.format_sequence(create.element)
if create.element.increment is not None:
text += " INCREMENT BY %d" % create.element.increment
if create.element.start is not None:
@@ -2504,7 +2512,7 @@ class DDLCompiler(Compiled):
def visit_drop_sequence(self, drop):
return "DROP SEQUENCE %s" % \
- self.preparer.format_sequence(drop.element)
+ self.preparer.format_sequence(drop.element)
def visit_drop_constraint(self, drop):
return "ALTER TABLE %s DROP CONSTRAINT %s%s" % (
@@ -2515,7 +2523,7 @@ class DDLCompiler(Compiled):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + \
- self.dialect.type_compiler.process(column.type)
+ self.dialect.type_compiler.process(column.type)
default = self.get_column_default_string(column)
if default is not None:
colspec += " DEFAULT " + default
@@ -2543,8 +2551,8 @@ class DDLCompiler(Compiled):
if formatted_name is not None:
text += "CONSTRAINT %s " % formatted_name
text += "CHECK (%s)" % self.sql_compiler.process(constraint.sqltext,
- include_table=False,
- literal_binds=True)
+ include_table=False,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2568,7 +2576,7 @@ class DDLCompiler(Compiled):
text += "CONSTRAINT %s " % formatted_name
text += "PRIMARY KEY "
text += "(%s)" % ', '.join(self.preparer.quote(c.name)
- for c in constraint)
+ for c in constraint)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2607,7 +2615,7 @@ class DDLCompiler(Compiled):
text += "CONSTRAINT %s " % formatted_name
text += "UNIQUE (%s)" % (
', '.join(self.preparer.quote(c.name)
- for c in constraint))
+ for c in constraint))
text += self.define_constraint_deferrability(constraint)
return text
@@ -2650,22 +2658,22 @@ class GenericTypeCompiler(TypeCompiler):
return "NUMERIC"
elif type_.scale is None:
return "NUMERIC(%(precision)s)" % \
- {'precision': type_.precision}
+ {'precision': type_.precision}
else:
return "NUMERIC(%(precision)s, %(scale)s)" % \
- {'precision': type_.precision,
- 'scale': type_.scale}
+ {'precision': type_.precision,
+ 'scale': type_.scale}
def visit_DECIMAL(self, type_):
if type_.precision is None:
return "DECIMAL"
elif type_.scale is None:
return "DECIMAL(%(precision)s)" % \
- {'precision': type_.precision}
+ {'precision': type_.precision}
else:
return "DECIMAL(%(precision)s, %(scale)s)" % \
- {'precision': type_.precision,
- 'scale': type_.scale}
+ {'precision': type_.precision,
+ 'scale': type_.scale}
def visit_INTEGER(self, type_):
return "INTEGER"
@@ -2780,8 +2788,8 @@ class GenericTypeCompiler(TypeCompiler):
def visit_null(self, type_):
raise exc.CompileError("Can't generate DDL for %r; "
- "did you forget to specify a "
- "type on this Column?" % type_)
+ "did you forget to specify a "
+ "type on this Column?" % type_)
def visit_type_decorator(self, type_):
return self.process(type_.type_engine(self.dialect))
@@ -2791,6 +2799,7 @@ class GenericTypeCompiler(TypeCompiler):
class IdentifierPreparer(object):
+
"""Handle quoting and case-folding of identifiers based on options."""
reserved_words = RESERVED_WORDS
@@ -2800,7 +2809,7 @@ class IdentifierPreparer(object):
illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
def __init__(self, dialect, initial_quote='"',
- final_quote=None, escape_quote='"', omit_schema=False):
+ final_quote=None, escape_quote='"', omit_schema=False):
"""Construct a new ``IdentifierPreparer`` object.
initial_quote
@@ -2849,8 +2858,8 @@ class IdentifierPreparer(object):
"""
return self.initial_quote + \
- self._escape_identifier(value) + \
- self.final_quote
+ self._escape_identifier(value) + \
+ self.final_quote
def _requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
@@ -2895,7 +2904,8 @@ class IdentifierPreparer(object):
def format_sequence(self, sequence, use_schema=True):
name = self.quote(sequence.name)
- if not self.omit_schema and use_schema and sequence.schema is not None:
+ if (not self.omit_schema and use_schema and
+ sequence.schema is not None):
name = self.quote_schema(sequence.schema) + "." + name
return name
@@ -2912,7 +2922,7 @@ class IdentifierPreparer(object):
def format_constraint(self, naming, constraint):
if isinstance(constraint.name, elements._defer_name):
name = naming._constraint_name_for_table(
- constraint, constraint.table)
+ constraint, constraint.table)
if name:
return self.quote(name)
elif isinstance(constraint.name, elements._defer_none_name):
@@ -2926,7 +2936,7 @@ class IdentifierPreparer(object):
name = table.name
result = self.quote(name)
if not self.omit_schema and use_schema \
- and getattr(table, "schema", None):
+ and getattr(table, "schema", None):
result = self.quote_schema(table.schema) + "." + result
return result
@@ -2936,7 +2946,7 @@ class IdentifierPreparer(object):
return self.quote(name, quote)
def format_column(self, column, use_table=False,
- name=None, table_name=None):
+ name=None, table_name=None):
"""Prepare a quoted column name."""
if name is None:
@@ -2944,8 +2954,8 @@ class IdentifierPreparer(object):
if not getattr(column, 'is_literal', False):
if use_table:
return self.format_table(
- column.table, use_schema=False,
- name=table_name) + "." + self.quote(name)
+ column.table, use_schema=False,
+ name=table_name) + "." + self.quote(name)
else:
return self.quote(name)
else:
@@ -2953,8 +2963,9 @@ class IdentifierPreparer(object):
# which shouldn't get quoted
if use_table:
- return self.format_table(column.table,
- use_schema=False, name=table_name) + '.' + name
+ return self.format_table(
+ column.table, use_schema=False,
+ name=table_name) + '.' + name
else:
return name
@@ -2975,9 +2986,9 @@ class IdentifierPreparer(object):
@util.memoized_property
def _r_identifiers(self):
initial, final, escaped_final = \
- [re.escape(s) for s in
- (self.initial_quote, self.final_quote,
- self._escape_identifier(self.final_quote))]
+ [re.escape(s) for s in
+ (self.initial_quote, self.final_quote,
+ self._escape_identifier(self.final_quote))]
r = re.compile(
r'(?:'
r'(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s'
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py
index d8627a08d..1f2c448ea 100644
--- a/lib/sqlalchemy/sql/ddl.py
+++ b/lib/sqlalchemy/sql/ddl.py
@@ -18,6 +18,7 @@ from ..util import topological
from .. import event
from .. import exc
+
class _DDLCompiles(ClauseElement):
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
@@ -57,7 +58,7 @@ class DDLElement(Executable, _DDLCompiles):
"""
_execution_options = Executable.\
- _execution_options.union({'autocommit': True})
+ _execution_options.union({'autocommit': True})
target = None
on = None
@@ -96,10 +97,10 @@ class DDLElement(Executable, _DDLCompiles):
return bind.execute(self.against(target))
else:
bind.engine.logger.info(
- "DDL execution skipped, criteria not met.")
+ "DDL execution skipped, criteria not met.")
@util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
- ":meth:`.DDLElement.execute_if`.")
+ ":meth:`.DDLElement.execute_if`.")
def execute_at(self, event_name, target):
"""Link execution of this DDL to the DDL lifecycle of a SchemaItem.
@@ -130,7 +131,7 @@ class DDLElement(Executable, _DDLCompiles):
def call_event(target, connection, **kw):
if self._should_execute_deprecated(event_name,
- target, connection, **kw):
+ target, connection, **kw):
return connection.execute(self.against(target))
event.listen(target, "" + event_name.replace('-', '_'), call_event)
@@ -212,7 +213,7 @@ class DDLElement(Executable, _DDLCompiles):
def _should_execute(self, target, bind, **kw):
if self.on is not None and \
- not self._should_execute_deprecated(None, target, bind, **kw):
+ not self._should_execute_deprecated(None, target, bind, **kw):
return False
if isinstance(self.dialect, util.string_types):
@@ -221,8 +222,9 @@ class DDLElement(Executable, _DDLCompiles):
elif isinstance(self.dialect, (tuple, list, set)):
if bind.engine.name not in self.dialect:
return False
- if self.callable_ is not None and \
- not self.callable_(self, target, bind, state=self.state, **kw):
+ if (self.callable_ is not None and
+ not self.callable_(self, target, bind,
+ state=self.state, **kw)):
return False
return True
@@ -246,7 +248,7 @@ class DDLElement(Executable, _DDLCompiles):
def _check_ddl_on(self, on):
if (on is not None and
(not isinstance(on, util.string_types + (tuple, list, set)) and
- not util.callable(on))):
+ not util.callable(on))):
raise exc.ArgumentError(
"Expected the name of a database dialect, a tuple "
"of names, or a callable for "
@@ -393,7 +395,6 @@ class DDL(DDLElement):
if getattr(self, key)]))
-
class _CreateDropBase(DDLElement):
"""Base class for DDL constructs that represent CREATE and DROP or
equivalents.
@@ -474,8 +475,8 @@ class CreateTable(_CreateDropBase):
"""
super(CreateTable, self).__init__(element, on=on, bind=bind)
self.columns = [CreateColumn(column)
- for column in element.columns
- ]
+ for column in element.columns
+ ]
class _DropView(_CreateDropBase):
@@ -561,9 +562,10 @@ class CreateColumn(_DDLCompiles):
as an implicitly-present "system" column.
For example, suppose we wish to produce a :class:`.Table` which skips
- rendering of the Postgresql ``xmin`` column against the Postgresql backend,
- but on other backends does render it, in anticipation of a triggered rule.
- A conditional compilation rule could skip this name only on Postgresql::
+ rendering of the Postgresql ``xmin`` column against the Postgresql
+ backend, but on other backends does render it, in anticipation of a
+ triggered rule. A conditional compilation rule could skip this name only
+ on Postgresql::
from sqlalchemy.schema import CreateColumn
@@ -585,7 +587,8 @@ class CreateColumn(_DDLCompiles):
will be omitted, but only against the Postgresql backend.
.. versionadded:: 0.8.3 The :class:`.CreateColumn` construct supports
- skipping of columns by returning ``None`` from a custom compilation rule.
+ skipping of columns by returning ``None`` from a custom compilation
+ rule.
.. versionadded:: 0.8 The :class:`.CreateColumn` construct was added
to support custom column creation styles.
@@ -635,7 +638,7 @@ class AddConstraint(_CreateDropBase):
def __init__(self, element, *args, **kw):
super(AddConstraint, self).__init__(element, *args, **kw)
element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
+ self._create_rule_disable)
class DropConstraint(_CreateDropBase):
@@ -647,7 +650,7 @@ class DropConstraint(_CreateDropBase):
self.cascade = cascade
super(DropConstraint, self).__init__(element, **kw)
element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
+ self._create_rule_disable)
class DDLBase(SchemaVisitor):
@@ -671,21 +674,21 @@ class SchemaGenerator(DDLBase):
if table.schema:
self.dialect.validate_identifier(table.schema)
return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
+ not self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
def _can_create_sequence(self, sequence):
return self.dialect.supports_sequences and \
(
(not self.dialect.sequences_optional or
not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
+ (
+ not self.checkfirst or
+ not self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema)
+ )
)
def visit_metadata(self, metadata):
@@ -694,14 +697,14 @@ class SchemaGenerator(DDLBase):
else:
tables = list(metadata.tables.values())
collection = [t for t in sort_tables(tables)
- if self._can_create_table(t)]
+ if self._can_create_table(t)]
seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_create_sequence(s)]
+ if s.column is None and self._can_create_sequence(s)]
metadata.dispatch.before_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
for seq in seq_coll:
self.traverse_single(seq, create_ok=True)
@@ -710,17 +713,17 @@ class SchemaGenerator(DDLBase):
self.traverse_single(table, create_ok=True)
metadata.dispatch.after_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
def visit_table(self, table, create_ok=False):
if not create_ok and not self._can_create_table(table):
return
table.dispatch.before_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
for column in table.columns:
if column.default is not None:
@@ -733,8 +736,8 @@ class SchemaGenerator(DDLBase):
self.traverse_single(index)
table.dispatch.after_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
def visit_sequence(self, sequence, create_ok=False):
if not create_ok and not self._can_create_sequence(sequence):
@@ -792,19 +795,19 @@ class SchemaDropper(DDLBase):
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
+ return not self.checkfirst or self.dialect.has_table(
+ self.connection, table.name, schema=table.schema)
def _can_drop_sequence(self, sequence):
return self.dialect.supports_sequences and \
((not self.dialect.sequences_optional or
- not sequence.optional) and
+ not sequence.optional) and
(not self.checkfirst or
- self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema))
- )
+ self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema))
+ )
def visit_index(self, index):
self.connection.execute(DropIndex(index))
@@ -814,8 +817,8 @@ class SchemaDropper(DDLBase):
return
table.dispatch.before_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
for column in table.columns:
if column.default is not None:
@@ -824,14 +827,15 @@ class SchemaDropper(DDLBase):
self.connection.execute(DropTable(table))
table.dispatch.after_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
def visit_sequence(self, sequence, drop_ok=False):
if not drop_ok and not self._can_drop_sequence(sequence):
return
self.connection.execute(DropSequence(sequence))
+
def sort_tables(tables, skip_fn=None, extra_dependencies=None):
"""sort a collection of Table objects in order of
their foreign-key dependency."""
@@ -854,12 +858,11 @@ def sort_tables(tables, skip_fn=None, extra_dependencies=None):
for table in tables:
traverse(table,
- {'schema_visitor': True},
- {'foreign_key': visit_foreign_key})
+ {'schema_visitor': True},
+ {'foreign_key': visit_foreign_key})
tuples.extend(
[parent, table] for parent in table._extra_dependencies
)
return list(topological.sort(tuples, tables))
-
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
index 977ed25c2..4f53e2979 100644
--- a/lib/sqlalchemy/sql/default_comparator.py
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -12,11 +12,12 @@ from .. import exc, util
from . import operators
from . import type_api
from .elements import BindParameter, True_, False_, BinaryExpression, \
- Null, _const_expr, _clause_element_as_expr, \
- ClauseList, ColumnElement, TextClause, UnaryExpression, \
- collate, _is_literal, _literal_as_text, ClauseElement, and_, or_
+ Null, _const_expr, _clause_element_as_expr, \
+ ClauseList, ColumnElement, TextClause, UnaryExpression, \
+ collate, _is_literal, _literal_as_text, ClauseElement, and_, or_
from .selectable import SelectBase, Alias, Selectable, ScalarSelect
+
class _DefaultColumnComparator(operators.ColumnOperators):
"""Defines comparison and math operations.
@@ -35,7 +36,8 @@ class _DefaultColumnComparator(operators.ColumnOperators):
def reverse_operate(self, op, other, **kwargs):
o = self.operators[op.__name__]
- return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
+ return o[0](self, self.expr, op, other,
+ reverse=True, *o[1:], **kwargs)
def _adapt_expression(self, op, other_comparator):
"""evaluate the return type of <self> <op> <othertype>,
@@ -65,8 +67,8 @@ class _DefaultColumnComparator(operators.ColumnOperators):
return op, other_comparator.type
def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
- _python_is_types=(util.NoneType, bool),
- **kwargs):
+ _python_is_types=(util.NoneType, bool),
+ **kwargs):
if isinstance(obj, _python_is_types + (Null, True_, False_)):
@@ -76,20 +78,20 @@ class _DefaultColumnComparator(operators.ColumnOperators):
if op in (operators.eq, operators.ne) and \
isinstance(obj, (bool, True_, False_)):
return BinaryExpression(expr,
- _literal_as_text(obj),
- op,
- type_=type_api.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
+ _literal_as_text(obj),
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
else:
# all other None/True/False uses IS, IS NOT
if op in (operators.eq, operators.is_):
return BinaryExpression(expr, _const_expr(obj),
- operators.is_,
- negate=operators.isnot)
+ operators.is_,
+ negate=operators.isnot)
elif op in (operators.ne, operators.isnot):
return BinaryExpression(expr, _const_expr(obj),
- operators.isnot,
- negate=operators.is_)
+ operators.isnot,
+ negate=operators.is_)
else:
raise exc.ArgumentError(
"Only '=', '!=', 'is_()', 'isnot()' operators can "
@@ -99,19 +101,19 @@ class _DefaultColumnComparator(operators.ColumnOperators):
if reverse:
return BinaryExpression(obj,
- expr,
- op,
- type_=type_api.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
+ expr,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
else:
return BinaryExpression(expr,
- obj,
- op,
- type_=type_api.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
+ obj,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
- **kw):
+ **kw):
obj = self._check_literal(expr, op, obj)
if reverse:
@@ -121,7 +123,7 @@ class _DefaultColumnComparator(operators.ColumnOperators):
if result_type is None:
op, result_type = left.comparator._adapt_expression(
- op, right.comparator)
+ op, right.comparator)
return BinaryExpression(left, right, op, type_=result_type)
@@ -141,7 +143,7 @@ class _DefaultColumnComparator(operators.ColumnOperators):
if isinstance(seq_or_selectable, ScalarSelect):
return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op)
+ negate=negate_op)
elif isinstance(seq_or_selectable, SelectBase):
# TODO: if we ever want to support (x, y, z) IN (select x,
@@ -154,20 +156,22 @@ class _DefaultColumnComparator(operators.ColumnOperators):
negate=negate_op, **kw)
elif isinstance(seq_or_selectable, (Selectable, TextClause)):
return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op, **kw)
+ negate=negate_op, **kw)
elif isinstance(seq_or_selectable, ClauseElement):
- raise exc.InvalidRequestError('in_() accepts'
- ' either a list of expressions '
- 'or a selectable: %r' % seq_or_selectable)
+ raise exc.InvalidRequestError(
+ 'in_() accepts'
+ ' either a list of expressions '
+ 'or a selectable: %r' % seq_or_selectable)
# Handle non selectable arguments as sequences
args = []
for o in seq_or_selectable:
if not _is_literal(o):
if not isinstance(o, operators.ColumnOperators):
- raise exc.InvalidRequestError('in_() accepts'
- ' either a list of expressions '
- 'or a selectable: %r' % o)
+ raise exc.InvalidRequestError(
+ 'in_() accepts'
+ ' either a list of expressions '
+ 'or a selectable: %r' % o)
elif o is None:
o = Null()
else:
@@ -192,12 +196,12 @@ class _DefaultColumnComparator(operators.ColumnOperators):
return expr == expr
return self._boolean_compare(expr, op,
- ClauseList(*args).self_group(against=op),
- negate=negate_op)
+ ClauseList(*args).self_group(against=op),
+ negate=negate_op)
def _unsupported_impl(self, expr, op, *arg, **kw):
raise NotImplementedError("Operator '%s' is not supported on "
- "this expression" % op.__name__)
+ "this expression" % op.__name__)
def _inv_impl(self, expr, op, **kw):
"""See :meth:`.ColumnOperators.__inv__`."""
@@ -212,29 +216,31 @@ class _DefaultColumnComparator(operators.ColumnOperators):
def _match_impl(self, expr, op, other, **kw):
"""See :meth:`.ColumnOperators.match`."""
- return self._boolean_compare(expr, operators.match_op,
- self._check_literal(expr, operators.match_op,
- other), **kw)
+ return self._boolean_compare(
+ expr, operators.match_op,
+ self._check_literal(
+ expr, operators.match_op, other),
+ **kw)
def _distinct_impl(self, expr, op, **kw):
"""See :meth:`.ColumnOperators.distinct`."""
return UnaryExpression(expr, operator=operators.distinct_op,
- type_=expr.type)
+ type_=expr.type)
def _between_impl(self, expr, op, cleft, cright, **kw):
"""See :meth:`.ColumnOperators.between`."""
return BinaryExpression(
- expr,
- ClauseList(
- self._check_literal(expr, operators.and_, cleft),
- self._check_literal(expr, operators.and_, cright),
- operator=operators.and_,
- group=False, group_contents=False),
- op,
- negate=operators.notbetween_op
- if op is operators.between_op
- else operators.between_op,
- modifiers=kw)
+ expr,
+ ClauseList(
+ self._check_literal(expr, operators.and_, cleft),
+ self._check_literal(expr, operators.and_, cright),
+ operator=operators.and_,
+ group=False, group_contents=False),
+ op,
+ negate=operators.notbetween_op
+ if op is operators.between_op
+ else operators.between_op,
+ modifiers=kw)
def _collate_impl(self, expr, op, other, **kw):
return collate(expr, other)
@@ -303,4 +309,3 @@ class _DefaultColumnComparator(operators.ColumnOperators):
return expr._bind_param(operator, other)
else:
return other
-
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index 2368c3eec..f7e033d85 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -15,6 +15,7 @@ from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes
from .. import util
from .. import exc
+
class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
"""Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
@@ -37,9 +38,8 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
else:
return p
- if isinstance(parameters, (list, tuple)) and \
- parameters and \
- isinstance(parameters[0], (list, tuple, dict)):
+ if (isinstance(parameters, (list, tuple)) and parameters and
+ isinstance(parameters[0], (list, tuple, dict))):
if not self._supports_multi_parameters:
raise exc.InvalidRequestError(
@@ -83,7 +83,8 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
stmt = table.update().\\
where(table.c.data == 'value').\\
values(status='X').\\
- returning(table.c.server_flag, table.c.updated_timestamp)
+ returning(table.c.server_flag,
+ table.c.updated_timestamp)
for server_flag, updated_timestamp in connection.execute(stmt):
print(server_flag, updated_timestamp)
@@ -94,21 +95,20 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
objects are typical, the elements can also be expressions::
stmt = table.insert().returning(
- (table.c.first_name + " " + table.c.last_name).label('fullname')
- )
+ (table.c.first_name + " " + table.c.last_name).
+ label('fullname'))
Upon compilation, a RETURNING clause, or database equivalent,
will be rendered within the statement. For INSERT and UPDATE,
the values are the newly inserted/updated values. For DELETE,
the values are those of the rows which were deleted.
- Upon execution, the values of the columns to be returned
- are made available via the result set and can be iterated
- using :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not
- natively support returning values (i.e. cx_oracle),
- SQLAlchemy will approximate this behavior at the result level
- so that a reasonable amount of behavioral neutrality is
- provided.
+ Upon execution, the values of the columns to be returned are made
+ available via the result set and can be iterated using
+ :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not
+ natively support returning values (i.e. cx_oracle), SQLAlchemy will
+ approximate this behavior at the result level so that a reasonable
+ amount of behavioral neutrality is provided.
Note that not all databases/DBAPIs
support RETURNING. For those backends with no support,
@@ -129,7 +129,6 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
"""
self._returning = cols
-
@_generative
def with_hint(self, text, selectable=None, dialect_name="*"):
"""Add a table hint for a single table to this
@@ -167,7 +166,7 @@ class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
selectable = self.table
self._hints = self._hints.union(
- {(selectable, dialect_name): text})
+ {(selectable, dialect_name): text})
class ValuesBase(UpdateBase):
@@ -183,7 +182,7 @@ class ValuesBase(UpdateBase):
def __init__(self, table, values, prefixes):
self.table = _interpret_as_from(table)
self.parameters, self._has_multi_parameters = \
- self._process_colparams(values)
+ self._process_colparams(values)
if prefixes:
self._setup_prefixes(prefixes)
@@ -194,9 +193,9 @@ class ValuesBase(UpdateBase):
Note that the :class:`.Insert` and :class:`.Update` constructs support
per-execution time formatting of the VALUES and/or SET clauses,
- based on the arguments passed to :meth:`.Connection.execute`. However,
- the :meth:`.ValuesBase.values` method can be used to "fix" a particular
- set of parameters into the statement.
+ based on the arguments passed to :meth:`.Connection.execute`.
+ However, the :meth:`.ValuesBase.values` method can be used to "fix" a
+ particular set of parameters into the statement.
Multiple calls to :meth:`.ValuesBase.values` will produce a new
construct, each one with the parameter list modified to include
@@ -229,8 +228,8 @@ class ValuesBase(UpdateBase):
The :class:`.Insert` construct also supports multiply-rendered VALUES
construct, for those backends which support this SQL syntax
- (SQLite, Postgresql, MySQL). This mode is indicated by passing a list
- of one or more dictionaries/tuples::
+ (SQLite, Postgresql, MySQL). This mode is indicated by passing a
+ list of one or more dictionaries/tuples::
users.insert().values([
{"name": "some name"},
@@ -248,9 +247,10 @@ class ValuesBase(UpdateBase):
.. note::
Passing a multiple values list is *not* the same
- as passing a multiple values list to the :meth:`.Connection.execute`
- method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
- produces a construct of this form::
+ as passing a multiple values list to the
+ :meth:`.Connection.execute` method. Passing a list of parameter
+ sets to :meth:`.ValuesBase.values` produces a construct of this
+ form::
INSERT INTO table (col1, col2, col3) VALUES
(col1_0, col2_0, col3_0),
@@ -282,23 +282,23 @@ class ValuesBase(UpdateBase):
"""
if self.select is not None:
raise exc.InvalidRequestError(
- "This construct already inserts from a SELECT")
+ "This construct already inserts from a SELECT")
if self._has_multi_parameters and kwargs:
raise exc.InvalidRequestError(
- "This construct already has multiple parameter sets.")
+ "This construct already has multiple parameter sets.")
if args:
if len(args) > 1:
raise exc.ArgumentError(
- "Only a single dictionary/tuple or list of "
- "dictionaries/tuples is accepted positionally.")
+ "Only a single dictionary/tuple or list of "
+ "dictionaries/tuples is accepted positionally.")
v = args[0]
else:
v = {}
if self.parameters is None:
self.parameters, self._has_multi_parameters = \
- self._process_colparams(v)
+ self._process_colparams(v)
else:
if self._has_multi_parameters:
self.parameters = list(self.parameters)
@@ -321,8 +321,8 @@ class ValuesBase(UpdateBase):
if kwargs:
if self._has_multi_parameters:
raise exc.ArgumentError(
- "Can't pass kwargs and multiple parameter sets "
- "simultaenously")
+ "Can't pass kwargs and multiple parameter sets "
+ "simultaenously")
else:
self.parameters.update(kwargs)
@@ -340,40 +340,40 @@ class ValuesBase(UpdateBase):
server_created_at = result.returned_defaults['created_at']
When used against a backend that supports RETURNING, all column
- values generated by SQL expression or server-side-default will be added
- to any existing RETURNING clause, provided that
- :meth:`.UpdateBase.returning` is not used simultaneously. The column values
- will then be available on the result using the
- :attr:`.ResultProxy.returned_defaults` accessor as a
- dictionary, referring to values keyed to the :class:`.Column` object
- as well as its ``.key``.
+ values generated by SQL expression or server-side-default will be
+ added to any existing RETURNING clause, provided that
+ :meth:`.UpdateBase.returning` is not used simultaneously. The column
+ values will then be available on the result using the
+ :attr:`.ResultProxy.returned_defaults` accessor as a dictionary,
+ referring to values keyed to the :class:`.Column` object as well as
+ its ``.key``.
This method differs from :meth:`.UpdateBase.returning` in these ways:
1. :meth:`.ValuesBase.return_defaults` is only intended for use with
an INSERT or an UPDATE statement that matches exactly one row.
- While the RETURNING construct in the general sense supports multiple
- rows for a multi-row UPDATE or DELETE statement, or for special
- cases of INSERT that return multiple rows (e.g. INSERT from SELECT,
- multi-valued VALUES clause), :meth:`.ValuesBase.return_defaults`
- is intended only
- for an "ORM-style" single-row INSERT/UPDATE statement. The row
- returned by the statement is also consumed implcitly when
+ While the RETURNING construct in the general sense supports
+ multiple rows for a multi-row UPDATE or DELETE statement, or for
+ special cases of INSERT that return multiple rows (e.g. INSERT from
+ SELECT, multi-valued VALUES clause),
+ :meth:`.ValuesBase.return_defaults` is intended only for an
+ "ORM-style" single-row INSERT/UPDATE statement. The row returned
+ by the statement is also consumed implcitly when
:meth:`.ValuesBase.return_defaults` is used. By contrast,
- :meth:`.UpdateBase.returning` leaves the RETURNING result-set intact
- with a collection of any number of rows.
+ :meth:`.UpdateBase.returning` leaves the RETURNING result-set
+ intact with a collection of any number of rows.
2. It is compatible with the existing logic to fetch auto-generated
- primary key values, also known as "implicit returning". Backends that
- support RETURNING will automatically make use of RETURNING in order
- to fetch the value of newly generated primary keys; while the
+ primary key values, also known as "implicit returning". Backends
+ that support RETURNING will automatically make use of RETURNING in
+ order to fetch the value of newly generated primary keys; while the
:meth:`.UpdateBase.returning` method circumvents this behavior,
:meth:`.ValuesBase.return_defaults` leaves it intact.
3. It can be called against any backend. Backends that don't support
RETURNING will skip the usage of the feature, rather than raising
- an exception. The return value of :attr:`.ResultProxy.returned_defaults`
- will be ``None``
+ an exception. The return value of
+ :attr:`.ResultProxy.returned_defaults` will be ``None``
:meth:`.ValuesBase.return_defaults` is used by the ORM to provide
an efficient implementation for the ``eager_defaults`` feature of
@@ -411,21 +411,22 @@ class Insert(ValuesBase):
_supports_multi_parameters = True
def __init__(self,
- table,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- return_defaults=False,
- **dialect_kw):
+ table,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
"""Construct an :class:`.Insert` object.
Similar functionality is available via the
:meth:`~.TableClause.insert` method on
:class:`~.schema.Table`.
- :param table: :class:`.TableClause` which is the subject of the insert.
+ :param table: :class:`.TableClause` which is the subject of the
+ insert.
:param values: collection of values to be inserted; see
:meth:`.Insert.values` for a description of allowed formats here.
@@ -433,15 +434,16 @@ class Insert(ValuesBase):
dynamically render the VALUES clause at execution time based on
the parameters passed to :meth:`.Connection.execute`.
- :param inline: if True, SQL defaults will be compiled 'inline' into the
- statement and not pre-executed.
+ :param inline: if True, SQL defaults will be compiled 'inline' into
+ the statement and not pre-executed.
If both `values` and compile-time bind parameters are present, the
compile-time bind parameters override the information specified
within `values` on a per-key basis.
- The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
- objects or their string identifiers. Each key may reference one of:
+ The keys within `values` can be either
+ :class:`~sqlalchemy.schema.Column` objects or their string
+ identifiers. Each key may reference one of:
* a literal data value (i.e. string, number, etc.);
* a Column object;
@@ -498,8 +500,9 @@ class Insert(ValuesBase):
Depending on backend, it may be necessary for the :class:`.Insert`
statement to be constructed using the ``inline=True`` flag; this
flag will prevent the implicit usage of ``RETURNING`` when the
- ``INSERT`` statement is rendered, which isn't supported on a backend
- such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
+ ``INSERT`` statement is rendered, which isn't supported on a
+ backend such as Oracle in conjunction with an ``INSERT..SELECT``
+ combination::
sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
@@ -516,10 +519,10 @@ class Insert(ValuesBase):
"""
if self.parameters:
raise exc.InvalidRequestError(
- "This construct already inserts value expressions")
+ "This construct already inserts value expressions")
self.parameters, self._has_multi_parameters = \
- self._process_colparams(dict((n, Null()) for n in names))
+ self._process_colparams(dict((n, Null()) for n in names))
self.select_names = names
self.select = _interpret_as_select(select)
@@ -534,21 +537,22 @@ class Insert(ValuesBase):
class Update(ValuesBase):
"""Represent an Update construct.
- The :class:`.Update` object is created using the :func:`update()` function.
+ The :class:`.Update` object is created using the :func:`update()`
+ function.
"""
__visit_name__ = 'update'
def __init__(self,
- table,
- whereclause=None,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- return_defaults=False,
- **dialect_kw):
+ table,
+ whereclause=None,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
"""Construct an :class:`.Update` object.
E.g.::
@@ -662,7 +666,6 @@ class Update(ValuesBase):
self._validate_dialect_kwargs(dialect_kw)
self._return_defaults = return_defaults
-
def get_children(self, **kwargs):
if self._whereclause is not None:
return self._whereclause,
@@ -682,7 +685,7 @@ class Update(ValuesBase):
"""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
+ _literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
@@ -705,19 +708,20 @@ class Update(ValuesBase):
class Delete(UpdateBase):
"""Represent a DELETE construct.
- The :class:`.Delete` object is created using the :func:`delete()` function.
+ The :class:`.Delete` object is created using the :func:`delete()`
+ function.
"""
__visit_name__ = 'delete'
def __init__(self,
- table,
- whereclause=None,
- bind=None,
- returning=None,
- prefixes=None,
- **dialect_kw):
+ table,
+ whereclause=None,
+ bind=None,
+ returning=None,
+ prefixes=None,
+ **dialect_kw):
"""Construct :class:`.Delete` object.
Similar functionality is available via the
@@ -761,11 +765,10 @@ class Delete(UpdateBase):
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
+ _literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
def _copy_internals(self, clone=_clone, **kw):
# TODO: coverage
self._whereclause = clone(self._whereclause, **kw)
-
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index ab07efee3..6114460dc 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -24,9 +24,11 @@ from .base import _generative, Generative
import re
import operator
+
def _clone(element, **kw):
return element._clone()
+
def collate(expression, collation):
"""Return the clause ``expression COLLATE collation``.
@@ -46,6 +48,7 @@ def collate(expression, collation):
_literal_as_text(collation),
operators.collate, type_=expr.type)
+
def between(expr, lower_bound, upper_bound, symmetric=False):
"""Produce a ``BETWEEN`` predicate clause.
@@ -80,8 +83,8 @@ def between(expr, lower_bound, upper_bound, symmetric=False):
into a column expression, serving as the left side of the ``BETWEEN``
expression.
- :param lower_bound: a column or Python scalar expression serving as the lower
- bound of the right side of the ``BETWEEN`` expression.
+ :param lower_bound: a column or Python scalar expression serving as the
+ lower bound of the right side of the ``BETWEEN`` expression.
:param upper_bound: a column or Python scalar expression serving as the
upper bound of the right side of the ``BETWEEN`` expression.
@@ -99,15 +102,15 @@ def between(expr, lower_bound, upper_bound, symmetric=False):
expr = _literal_as_binds(expr)
return expr.between(lower_bound, upper_bound, symmetric=symmetric)
+
def literal(value, type_=None):
"""Return a literal clause, bound to a bind parameter.
- Literal clauses are created automatically when non- :class:`.ClauseElement`
- objects (such as strings, ints, dates, etc.) are used in a comparison
- operation with a :class:`.ColumnElement`
- subclass, such as a :class:`~sqlalchemy.schema.Column` object.
- Use this function to force the
- generation of a literal clause, which will be created as a
+ Literal clauses are created automatically when non-
+ :class:`.ClauseElement` objects (such as strings, ints, dates, etc.) are
+ used in a comparison operation with a :class:`.ColumnElement` subclass,
+ such as a :class:`~sqlalchemy.schema.Column` object. Use this function
+ to force the generation of a literal clause, which will be created as a
:class:`BindParameter` with a bound value.
:param value: the value to be bound. Can be any Python object supported by
@@ -120,7 +123,6 @@ def literal(value, type_=None):
return BindParameter(None, value, type_=type_, unique=True)
-
def type_coerce(expression, type_):
"""Associate a SQL expression with a particular type, without rendering
``CAST``.
@@ -155,8 +157,9 @@ def type_coerce(expression, type_):
except that it does not render the ``CAST`` expression in the resulting
statement.
- :param expression: A SQL expression, such as a :class:`.ColumnElement` expression
- or a Python string which will be coerced into a bound literal value.
+ :param expression: A SQL expression, such as a :class:`.ColumnElement`
+ expression or a Python string which will be coerced into a bound literal
+ value.
:param type_: A :class:`.TypeEngine` class or instance indicating
the type to which the expression is coerced.
@@ -183,9 +186,6 @@ def type_coerce(expression, type_):
return Label(None, expression, type_=type_)
-
-
-
def outparam(key, type_=None):
"""Create an 'OUT' parameter for usage in functions (stored procedures),
for databases which support them.
@@ -197,9 +197,7 @@ def outparam(key, type_=None):
"""
return BindParameter(
- key, None, type_=type_, unique=False, isoutparam=True)
-
-
+ key, None, type_=type_, unique=False, isoutparam=True)
def not_(clause):
@@ -213,7 +211,6 @@ def not_(clause):
return operators.inv(_literal_as_binds(clause))
-
@inspection._self_inspects
class ClauseElement(Visitable):
"""Base class for elements of a programmatically constructed SQL
@@ -451,8 +448,8 @@ class ClauseElement(Visitable):
:param dialect: A ``Dialect`` instance from which a ``Compiled``
will be acquired. This argument takes precedence over the `bind`
- argument as well as this :class:`.ClauseElement`'s bound engine, if
- any.
+ argument as well as this :class:`.ClauseElement`'s bound engine,
+ if any.
:param inline: Used for INSERT statements, for a dialect which does
not support inline retrieval of newly generated primary key
@@ -532,16 +529,15 @@ class ClauseElement(Visitable):
def _negate(self):
return UnaryExpression(
- self.self_group(against=operators.inv),
- operator=operators.inv,
- negate=None)
+ self.self_group(against=operators.inv),
+ operator=operators.inv,
+ negate=None)
def __bool__(self):
raise TypeError("Boolean value of this clause is not defined")
__nonzero__ = __bool__
-
def __repr__(self):
friendly = getattr(self, 'description', None)
if friendly is None:
@@ -562,40 +558,42 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
literal expressions, keywords such as ``NULL``, etc.
:class:`.ColumnElement` is the ultimate base class for all such elements.
- A wide variety of SQLAlchemy Core functions work at the SQL expression level,
- and are intended to accept instances of :class:`.ColumnElement` as arguments.
- These functions will typically document that they accept a "SQL expression"
- as an argument. What this means in terms of SQLAlchemy usually refers
- to an input which is either already in the form of a :class:`.ColumnElement`
- object, or a value which can be **coerced** into one. The coercion
- rules followed by most, but not all, SQLAlchemy Core functions with regards
- to SQL expressions are as follows:
+ A wide variety of SQLAlchemy Core functions work at the SQL expression
+ level, and are intended to accept instances of :class:`.ColumnElement` as
+ arguments. These functions will typically document that they accept a
+ "SQL expression" as an argument. What this means in terms of SQLAlchemy
+ usually refers to an input which is either already in the form of a
+ :class:`.ColumnElement` object, or a value which can be **coerced** into
+ one. The coercion rules followed by most, but not all, SQLAlchemy Core
+ functions with regards to SQL expressions are as follows:
* a literal Python value, such as a string, integer or floating
point value, boolean, datetime, ``Decimal`` object, or virtually
- any other Python object, will be coerced into a "literal bound value".
- This generally means that a :func:`.bindparam` will be produced
- featuring the given value embedded into the construct; the resulting
- :class:`.BindParameter` object is an instance of :class:`.ColumnElement`.
- The Python value will ultimately be sent to the DBAPI at execution time as a
- paramterized argument to the ``execute()`` or ``executemany()`` methods,
- after SQLAlchemy type-specific converters (e.g. those provided by
- any associated :class:`.TypeEngine` objects) are applied to the value.
-
- * any special object value, typically ORM-level constructs, which feature
- a method called ``__clause_element__()``. The Core expression system
- looks for this method when an object of otherwise unknown type is passed
- to a function that is looking to coerce the argument into a :class:`.ColumnElement`
- expression. The ``__clause_element__()`` method, if present, should
- return a :class:`.ColumnElement` instance. The primary use of
- ``__clause_element__()`` within SQLAlchemy is that of class-bound attributes
- on ORM-mapped classes; a ``User`` class which contains a mapped attribute
- named ``.name`` will have a method ``User.name.__clause_element__()``
- which when invoked returns the :class:`.Column` called ``name`` associated
- with the mapped table.
-
- * The Python ``None`` value is typically interpreted as ``NULL``, which
- in SQLAlchemy Core produces an instance of :func:`.null`.
+ any other Python object, will be coerced into a "literal bound
+ value". This generally means that a :func:`.bindparam` will be
+ produced featuring the given value embedded into the construct; the
+ resulting :class:`.BindParameter` object is an instance of
+ :class:`.ColumnElement`. The Python value will ultimately be sent
+ to the DBAPI at execution time as a paramterized argument to the
+ ``execute()`` or ``executemany()`` methods, after SQLAlchemy
+ type-specific converters (e.g. those provided by any associated
+ :class:`.TypeEngine` objects) are applied to the value.
+
+ * any special object value, typically ORM-level constructs, which
+ feature a method called ``__clause_element__()``. The Core
+ expression system looks for this method when an object of otherwise
+ unknown type is passed to a function that is looking to coerce the
+ argument into a :class:`.ColumnElement` expression. The
+ ``__clause_element__()`` method, if present, should return a
+ :class:`.ColumnElement` instance. The primary use of
+ ``__clause_element__()`` within SQLAlchemy is that of class-bound
+ attributes on ORM-mapped classes; a ``User`` class which contains a
+ mapped attribute named ``.name`` will have a method
+ ``User.name.__clause_element__()`` which when invoked returns the
+ :class:`.Column` called ``name`` associated with the mapped table.
+
+ * The Python ``None`` value is typically interpreted as ``NULL``,
+ which in SQLAlchemy Core produces an instance of :func:`.null`.
A :class:`.ColumnElement` provides the ability to generate new
:class:`.ColumnElement`
@@ -631,8 +629,9 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
_alt_names = ()
def self_group(self, against=None):
- if against in (operators.and_, operators.or_, operators._asbool) and \
- self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ if (against in (operators.and_, operators.or_, operators._asbool) and
+ self.type._type_affinity
+ is type_api.BOOLEANTYPE._type_affinity):
return AsBoolean(self, operators.istrue, operators.isfalse)
else:
return self
@@ -656,7 +655,7 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
return getattr(self.comparator, key)
except AttributeError:
raise AttributeError(
- 'Neither %r object nor %r object has an attribute %r' % (
+ 'Neither %r object nor %r object has an attribute %r' % (
type(self).__name__,
type(self.comparator).__name__,
key)
@@ -670,8 +669,8 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
def _bind_param(self, operator, obj):
return BindParameter(None, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
+ _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
@property
def expression(self):
@@ -689,7 +688,7 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
@util.memoized_property
def base_columns(self):
return util.column_set(c for c in self.proxy_set
- if not hasattr(c, '_proxies'))
+ if not hasattr(c, '_proxies'))
@util.memoized_property
def proxy_set(self):
@@ -710,9 +709,10 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
when targeting within a result row."""
return hasattr(other, 'name') and hasattr(self, 'name') and \
- other.name == self.name
+ other.name == self.name
- def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
+ def _make_proxy(
+ self, selectable, name=None, name_is_truncatable=False, **kw):
"""Create a new :class:`.ColumnElement` representing this
:class:`.ColumnElement` as it appears in the select list of a
descending selectable.
@@ -731,10 +731,10 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
else:
key = name
co = ColumnClause(
- _as_truncated(name) if name_is_truncatable else name,
- type_=getattr(self, 'type', None),
- _selectable=selectable
- )
+ _as_truncated(name) if name_is_truncatable else name,
+ type_=getattr(self, 'type', None),
+ _selectable=selectable
+ )
co._proxies = [self]
if selectable._is_clone_of is not None:
co._is_clone_of = \
@@ -752,8 +752,8 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
:param equivalents: a dictionary of columns as keys mapped to sets
of columns. If the given "other" column is present in this
- dictionary, if any of the columns in the corresponding set() pass the
- comparison test, the result is True. This is used to expand the
+ dictionary, if any of the columns in the corresponding set() pass
+ the comparison test, the result is True. This is used to expand the
comparison to other columns that may be known to be equivalent to
this one via foreign key or other criterion.
@@ -794,8 +794,9 @@ class ColumnElement(operators.ColumnOperators, ClauseElement):
expressions and function calls.
"""
- return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
- 'name', 'anon')))
+ return _anonymous_label(
+ '%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon'))
+ )
class BindParameter(ColumnElement):
@@ -823,18 +824,18 @@ class BindParameter(ColumnElement):
_is_crud = False
def __init__(self, key, value=NO_ARG, type_=None,
- unique=False, required=NO_ARG,
- quote=None, callable_=None,
- isoutparam=False,
- _compared_to_operator=None,
- _compared_to_type=None):
+ unique=False, required=NO_ARG,
+ quote=None, callable_=None,
+ isoutparam=False,
+ _compared_to_operator=None,
+ _compared_to_type=None):
"""Produce a "bound expression".
The return value is an instance of :class:`.BindParameter`; this
is a :class:`.ColumnElement` subclass which represents a so-called
- "placeholder" value in a SQL expression, the value of which is supplied
- at the point at which the statement in executed against a database
- connection.
+ "placeholder" value in a SQL expression, the value of which is
+ supplied at the point at which the statement in executed against a
+ database connection.
In SQLAlchemy, the :func:`.bindparam` construct has
the ability to carry along the actual value that will be ultimately
@@ -870,27 +871,29 @@ class BindParameter(ColumnElement):
where the WHERE criterion of the statement is to change on each
invocation, such as::
- stmt = users_table.update().\\
- where(user_table.c.name == bindparam('username')).\\
- values(fullname=bindparam('fullname'))
+ stmt = (users_table.update().
+ where(user_table.c.name == bindparam('username')).
+ values(fullname=bindparam('fullname'))
+ )
- connection.execute(stmt, [
- {"username": "wendy", "fullname": "Wendy Smith"},
- {"username": "jack", "fullname": "Jack Jones"},
- ])
+ connection.execute(
+ stmt, [{"username": "wendy", "fullname": "Wendy Smith"},
+ {"username": "jack", "fullname": "Jack Jones"},
+ ]
+ )
- SQLAlchemy's Core expression system makes wide use of :func:`.bindparam`
- in an implicit sense. It is typical that Python literal values passed to
- virtually all SQL expression functions are coerced into fixed
- :func:`.bindparam` constructs. For example, given a comparison operation
- such as::
+ SQLAlchemy's Core expression system makes wide use of
+ :func:`.bindparam` in an implicit sense. It is typical that Python
+ literal values passed to virtually all SQL expression functions are
+ coerced into fixed :func:`.bindparam` constructs. For example, given
+ a comparison operation such as::
expr = users_table.c.name == 'Wendy'
The above expression will produce a :class:`.BinaryExpression`
construct, where the left side is the :class:`.Column` object
- representing the ``name`` column, and the right side is a :class:`.BindParameter`
- representing the literal value::
+ representing the ``name`` column, and the right side is a
+ :class:`.BindParameter` representing the literal value::
print(repr(expr.right))
BindParameter('%(4327771088 name)s', 'Wendy', type_=String())
@@ -921,8 +924,8 @@ class BindParameter(ColumnElement):
Similarly, :func:`.bindparam` is invoked automatically
when working with :term:`CRUD` statements as far as the "VALUES"
portion is concerned. The :func:`.insert` construct produces an
- ``INSERT`` expression which will, at statement execution time, generate
- bound placeholders based on the arguments passed, as in::
+ ``INSERT`` expression which will, at statement execution time,
+ generate bound placeholders based on the arguments passed, as in::
stmt = users_table.insert()
result = connection.execute(stmt, name='Wendy')
@@ -989,8 +992,8 @@ class BindParameter(ColumnElement):
If ``True``, a value is required at execution time. If not passed,
it defaults to ``True`` if neither :paramref:`.bindparam.value`
or :paramref:`.bindparam.callable` were passed. If either of these
- parameters are present, then :paramref:`.bindparam.required` defaults
- to ``False``.
+ parameters are present, then :paramref:`.bindparam.required`
+ defaults to ``False``.
.. versionchanged:: 0.8 If the ``required`` flag is not specified,
it will be set automatically to ``True`` or ``False`` depending
@@ -1030,10 +1033,10 @@ class BindParameter(ColumnElement):
if unique:
self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
- or 'param'))
+ or 'param'))
else:
self.key = key or _anonymous_label('%%(%d param)s'
- % id(self))
+ % id(self))
# identifying key that won't change across
# clones, used to identify the bind's logical
@@ -1056,21 +1059,23 @@ class BindParameter(ColumnElement):
_compared_to_operator, value)
else:
self.type = type_api._type_map.get(type(value),
- type_api.NULLTYPE)
+ type_api.NULLTYPE)
elif isinstance(type_, type):
self.type = type_()
else:
self.type = type_
def _with_value(self, value):
- """Return a copy of this :class:`.BindParameter` with the given value set."""
+ """Return a copy of this :class:`.BindParameter` with the given value
+ set.
+ """
cloned = self._clone()
cloned.value = value
cloned.callable = None
cloned.required = False
if cloned.type is type_api.NULLTYPE:
cloned.type = type_api._type_map.get(type(value),
- type_api.NULLTYPE)
+ type_api.NULLTYPE)
return cloned
@property
@@ -1092,14 +1097,14 @@ class BindParameter(ColumnElement):
c = ClauseElement._clone(self)
if self.unique:
c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
- or 'param'))
+ or 'param'))
return c
def _convert_to_unique(self):
if not self.unique:
self.unique = True
- self.key = _anonymous_label('%%(%d %s)s' % (id(self),
- self._orig_key or 'param'))
+ self.key = _anonymous_label(
+ '%%(%d %s)s' % (id(self), self._orig_key or 'param'))
def compare(self, other, **kw):
"""Compare this :class:`BindParameter` to the given
@@ -1122,7 +1127,7 @@ class BindParameter(ColumnElement):
def __repr__(self):
return 'BindParameter(%r, %r, type_=%r)' % (self.key,
- self.value, self.type)
+ self.value, self.type)
class TypeClause(ClauseElement):
@@ -1176,9 +1181,9 @@ class TextClause(Executable, ClauseElement):
_hide_froms = []
def __init__(
- self,
- text,
- bind=None):
+ self,
+ text,
+ bind=None):
self._bind = bind
self._bindparams = {}
@@ -1192,7 +1197,7 @@ class TextClause(Executable, ClauseElement):
@classmethod
def _create_text(self, text, bind=None, bindparams=None,
- typemap=None, autocommit=None):
+ typemap=None, autocommit=None):
"""Construct a new :class:`.TextClause` clause, representing
a textual SQL string directly.
@@ -1226,10 +1231,10 @@ class TextClause(Executable, ClauseElement):
The :class:`.TextClause` construct includes methods which can
provide information about the bound parameters as well as the column
values which would be returned from the textual statement, assuming
- it's an executable SELECT type of statement. The :meth:`.TextClause.bindparams`
- method is used to provide bound parameter detail, and
- :meth:`.TextClause.columns` method allows specification of
- return columns including names and types::
+ it's an executable SELECT type of statement. The
+ :meth:`.TextClause.bindparams` method is used to provide bound
+ parameter detail, and :meth:`.TextClause.columns` method allows
+ specification of return columns including names and types::
t = text("SELECT * FROM users WHERE id=:user_id").\\
bindparams(user_id=7).\\
@@ -1252,8 +1257,8 @@ class TextClause(Executable, ClauseElement):
to it as an :class:`.Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`.text` construct that should be subject to "autocommit"
- can be set explicitly so using the :paramref:`.Connection.execution_options.autocommit`
- option::
+ can be set explicitly so using the
+ :paramref:`.Connection.execution_options.autocommit` option::
t = text("EXEC my_procedural_thing()").\\
execution_options(autocommit=True)
@@ -1298,9 +1303,10 @@ class TextClause(Executable, ClauseElement):
represented in the columns clause of a ``SELECT`` statement
to type objects,
which will be used to perform post-processing on columns within
- the result set. This parameter now invokes the :meth:`.TextClause.columns`
- method, which returns a :class:`.TextAsFrom` construct that gains
- a ``.c`` collection and can be embedded in other expressions. E.g.::
+ the result set. This parameter now invokes the
+ :meth:`.TextClause.columns` method, which returns a
+ :class:`.TextAsFrom` construct that gains a ``.c`` collection and
+ can be embedded in other expressions. E.g.::
stmt = text("SELECT * FROM table",
typemap={'id': Integer, 'name': String},
@@ -1308,7 +1314,8 @@ class TextClause(Executable, ClauseElement):
Is equivalent to::
- stmt = text("SELECT * FROM table").columns(id=Integer, name=String)
+ stmt = text("SELECT * FROM table").columns(id=Integer,
+ name=String)
Or alternatively::
@@ -1361,8 +1368,8 @@ class TextClause(Executable, ClauseElement):
When specific typing behavior is needed, the positional ``*binds``
argument can be used in which to specify :func:`.bindparam` constructs
- directly. These constructs must include at least the ``key`` argument,
- then an optional value and type::
+ directly. These constructs must include at least the ``key``
+ argument, then an optional value and type::
from sqlalchemy import bindparam
stmt = stmt.bindparams(
@@ -1370,9 +1377,10 @@ class TextClause(Executable, ClauseElement):
bindparam('timestamp', type_=DateTime)
)
- Above, we specified the type of :class:`.DateTime` for the ``timestamp``
- bind, and the type of :class:`.String` for the ``name`` bind. In
- the case of ``name`` we also set the default value of ``"jack"``.
+ Above, we specified the type of :class:`.DateTime` for the
+ ``timestamp`` bind, and the type of :class:`.String` for the ``name``
+ bind. In the case of ``name`` we also set the default value of
+ ``"jack"``.
Additional bound parameters can be supplied at statement execution
time, e.g.::
@@ -1380,26 +1388,27 @@ class TextClause(Executable, ClauseElement):
result = connection.execute(stmt,
timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
- The :meth:`.TextClause.bindparams` method can be called repeatedly, where
- it will re-use existing :class:`.BindParameter` objects to add new information.
- For example, we can call :meth:`.TextClause.bindparams` first with
- typing information, and a second time with value information, and it
- will be combined::
+ The :meth:`.TextClause.bindparams` method can be called repeatedly,
+ where it will re-use existing :class:`.BindParameter` objects to add
+ new information. For example, we can call
+ :meth:`.TextClause.bindparams` first with typing information, and a
+ second time with value information, and it will be combined::
stmt = text("SELECT id, name FROM user WHERE name=:name "
"AND timestamp=:timestamp")
stmt = stmt.bindparams(
- bindparam('name', type_=String),
- bindparam('timestamp', type_=DateTime)
- )
+ bindparam('name', type_=String),
+ bindparam('timestamp', type_=DateTime)
+ )
stmt = stmt.bindparams(
- name='jack',
- timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
- )
+ name='jack',
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+ )
- .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method supersedes
- the argument ``bindparams`` passed to :func:`~.expression.text`.
+ .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method
+ supersedes the argument ``bindparams`` passed to
+ :func:`~.expression.text`.
"""
@@ -1410,8 +1419,8 @@ class TextClause(Executable, ClauseElement):
existing = new_params[bind.key]
except KeyError:
raise exc.ArgumentError(
- "This text() construct doesn't define a "
- "bound parameter named %r" % bind.key)
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % bind.key)
else:
new_params[existing.key] = bind
@@ -1420,13 +1429,11 @@ class TextClause(Executable, ClauseElement):
existing = new_params[key]
except KeyError:
raise exc.ArgumentError(
- "This text() construct doesn't define a "
- "bound parameter named %r" % key)
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % key)
else:
new_params[key] = existing._with_value(value)
-
-
@util.dependencies('sqlalchemy.sql.selectable')
def columns(self, selectable, *cols, **types):
"""Turn this :class:`.TextClause` object into a :class:`.TextAsFrom`
@@ -1447,8 +1454,8 @@ class TextClause(Executable, ClauseElement):
).where(stmt.c.id > 5)
Above, we used untyped :func:`.column` elements. These can also have
- types specified, which will impact how the column behaves in expressions
- as well as determining result set behavior::
+ types specified, which will impact how the column behaves in
+ expressions as well as determining result set behavior::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
@@ -1460,8 +1467,9 @@ class TextClause(Executable, ClauseElement):
for id, name, timestamp in connection.execute(stmt):
print(id, name, timestamp)
- Keyword arguments allow just the names and types of columns to be specified,
- where the :func:`.column` elements will be generated automatically::
+ Keyword arguments allow just the names and types of columns to be
+ specified, where the :func:`.column` elements will be generated
+ automatically::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
@@ -1474,24 +1482,24 @@ class TextClause(Executable, ClauseElement):
print(id, name, timestamp)
The :meth:`.TextClause.columns` method provides a direct
- route to calling :meth:`.FromClause.alias` as well as :meth:`.SelectBase.cte`
- against a textual SELECT statement::
+ route to calling :meth:`.FromClause.alias` as well as
+ :meth:`.SelectBase.cte` against a textual SELECT statement::
stmt = stmt.columns(id=Integer, name=String).cte('st')
stmt = select([sometable]).where(sometable.c.id == stmt.c.id)
- .. versionadded:: 0.9.0 :func:`.text` can now be converted into a fully
- featured "selectable" construct using the :meth:`.TextClause.columns`
- method. This method supersedes the ``typemap`` argument to
- :func:`.text`.
+ .. versionadded:: 0.9.0 :func:`.text` can now be converted into a
+ fully featured "selectable" construct using the
+ :meth:`.TextClause.columns` method. This method supersedes the
+ ``typemap`` argument to :func:`.text`.
"""
input_cols = [
ColumnClause(col.key, types.pop(col.key))
- if col.key in types
- else col
+ if col.key in types
+ else col
for col in cols
] + [ColumnClause(key, type_) for key, type_ in types.items()]
return selectable.TextAsFrom(self, input_cols)
@@ -1512,7 +1520,7 @@ class TextClause(Executable, ClauseElement):
def _copy_internals(self, clone=_clone, **kw):
self._bindparams = dict((b.key, clone(b, **kw))
- for b in self._bindparams.values())
+ for b in self._bindparams.values())
def get_children(self, **kwargs):
return list(self._bindparams.values())
@@ -1520,6 +1528,7 @@ class TextClause(Executable, ClauseElement):
def compare(self, other):
return isinstance(other, TextClause) and other.text == self.text
+
class Null(ColumnElement):
"""Represent the NULL keyword in a SQL statement.
@@ -1602,6 +1611,7 @@ class False_(ColumnElement):
def compare(self, other):
return isinstance(other, False_)
+
class True_(ColumnElement):
"""Represent the ``true`` keyword, or equivalent, in a SQL statement.
@@ -1671,6 +1681,7 @@ NULL = Null()
FALSE = False_()
TRUE = True_()
+
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
@@ -1704,7 +1715,7 @@ class ClauseList(ClauseElement):
def append(self, clause):
if self.group_contents:
- self.clauses.append(_literal_as_text(clause).\
+ self.clauses.append(_literal_as_text(clause).
self_group(against=self.operator))
else:
self.clauses.append(_literal_as_text(clause))
@@ -1743,13 +1754,12 @@ class ClauseList(ClauseElement):
return False
-
class BooleanClauseList(ClauseList, ColumnElement):
__visit_name__ = 'clauselist'
def __init__(self, *arg, **kw):
raise NotImplementedError(
- "BooleanClauseList has a private constructor")
+ "BooleanClauseList has a private constructor")
@classmethod
def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
@@ -1772,7 +1782,7 @@ class BooleanClauseList(ClauseList, ColumnElement):
return clauses[0].self_group(against=operators._asbool)
convert_clauses = [c.self_group(against=operator)
- for c in convert_clauses]
+ for c in convert_clauses]
self = cls.__new__(cls)
self.clauses = convert_clauses
@@ -1872,6 +1882,7 @@ class BooleanClauseList(ClauseList, ColumnElement):
and_ = BooleanClauseList.and_
or_ = BooleanClauseList.or_
+
class Tuple(ClauseList, ColumnElement):
"""Represent a SQL tuple."""
@@ -1899,7 +1910,7 @@ class Tuple(ClauseList, ColumnElement):
clauses = [_literal_as_binds(c) for c in clauses]
self._type_tuple = [arg.type for arg in clauses]
self.type = kw.pop('type_', self._type_tuple[0]
- if self._type_tuple else type_api.NULLTYPE)
+ if self._type_tuple else type_api.NULLTYPE)
super(Tuple, self).__init__(*clauses, **kw)
@@ -1910,7 +1921,7 @@ class Tuple(ClauseList, ColumnElement):
def _bind_param(self, operator, obj):
return Tuple(*[
BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=type_, unique=True)
+ _compared_to_type=type_, unique=True)
for o, type_ in zip(obj, self._type_tuple)
]).self_group()
@@ -1981,9 +1992,9 @@ class Case(ColumnElement):
used via the
:paramref:`.case.value` parameter, which is passed a column
expression to be compared. In this form, the :paramref:`.case.whens`
- parameter is passed as a dictionary containing expressions to be compared
- against keyed to result expressions. The statement below is equivalent
- to the preceding statement::
+ parameter is passed as a dictionary containing expressions to be
+ compared against keyed to result expressions. The statement below is
+ equivalent to the preceding statement::
stmt = select([users_table]).\\
where(
@@ -2028,23 +2039,24 @@ class Case(ColumnElement):
ELSE 'lessthan10'
END
- :param whens: The criteria to be compared against, :paramref:`.case.whens`
- accepts two different forms, based on whether or not :paramref:`.case.value`
- is used.
+ :param whens: The criteria to be compared against,
+ :paramref:`.case.whens` accepts two different forms, based on
+ whether or not :paramref:`.case.value` is used.
- In the first form, it accepts a list of 2-tuples; each 2-tuple consists
- of ``(<sql expression>, <value>)``, where the SQL expression is a
- boolean expression and "value" is a resulting value, e.g.::
+ In the first form, it accepts a list of 2-tuples; each 2-tuple
+ consists of ``(<sql expression>, <value>)``, where the SQL
+ expression is a boolean expression and "value" is a resulting value,
+ e.g.::
case([
(users_table.c.name == 'wendy', 'W'),
(users_table.c.name == 'jack', 'J')
])
- In the second form, it accepts a Python dictionary of comparison values
- mapped to a resulting value; this form requires :paramref:`.case.value`
- to be present, and values will be compared using the ``==`` operator,
- e.g.::
+ In the second form, it accepts a Python dictionary of comparison
+ values mapped to a resulting value; this form requires
+ :paramref:`.case.value` to be present, and values will be compared
+ using the ``==`` operator, e.g.::
case(
{"wendy": "W", "jack": "J"},
@@ -2072,12 +2084,12 @@ class Case(ColumnElement):
if value is not None:
whenlist = [
(_literal_as_binds(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
+ _literal_as_binds(r)) for (c, r) in whens
]
else:
whenlist = [
(_no_literals(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
+ _literal_as_binds(r)) for (c, r) in whens
]
if whenlist:
@@ -2101,7 +2113,7 @@ class Case(ColumnElement):
if self.value is not None:
self.value = clone(self.value, **kw)
self.whens = [(clone(x, **kw), clone(y, **kw))
- for x, y in self.whens]
+ for x, y in self.whens]
if self.else_ is not None:
self.else_ = clone(self.else_, **kw)
@@ -2117,7 +2129,7 @@ class Case(ColumnElement):
@property
def _from_objects(self):
return list(itertools.chain(*[x._from_objects for x in
- self.get_children()]))
+ self.get_children()]))
def literal_column(text, type_=None):
@@ -2144,7 +2156,6 @@ def literal_column(text, type_=None):
return ColumnClause(text, type_=type_, is_literal=True)
-
class Cast(ColumnElement):
"""Represent a ``CAST`` expression.
@@ -2276,10 +2287,11 @@ class UnaryExpression(ColumnElement):
__visit_name__ = 'unary'
def __init__(self, element, operator=None, modifier=None,
- type_=None, negate=None):
+ type_=None, negate=None):
self.operator = operator
self.modifier = modifier
- self.element = element.self_group(against=self.operator or self.modifier)
+ self.element = element.self_group(
+ against=self.operator or self.modifier)
self.type = type_api.to_instance(type_)
self.negate = negate
@@ -2302,11 +2314,13 @@ class UnaryExpression(ColumnElement):
SELECT id, name FROM user ORDER BY name DESC NULLS FIRST
Like :func:`.asc` and :func:`.desc`, :func:`.nullsfirst` is typically
- invoked from the column expression itself using :meth:`.ColumnElement.nullsfirst`,
- rather than as its standalone function version, as in::
+ invoked from the column expression itself using
+ :meth:`.ColumnElement.nullsfirst`, rather than as its standalone
+ function version, as in::
- stmt = select([users_table]).\\
- order_by(users_table.c.name.desc().nullsfirst())
+ stmt = (select([users_table]).
+ order_by(users_table.c.name.desc().nullsfirst())
+ )
.. seealso::
@@ -2320,8 +2334,7 @@ class UnaryExpression(ColumnElement):
"""
return UnaryExpression(
- _literal_as_text(column), modifier=operators.nullsfirst_op)
-
+ _literal_as_text(column), modifier=operators.nullsfirst_op)
@classmethod
def _create_nullslast(cls, column):
@@ -2342,8 +2355,9 @@ class UnaryExpression(ColumnElement):
SELECT id, name FROM user ORDER BY name DESC NULLS LAST
Like :func:`.asc` and :func:`.desc`, :func:`.nullslast` is typically
- invoked from the column expression itself using :meth:`.ColumnElement.nullslast`,
- rather than as its standalone function version, as in::
+ invoked from the column expression itself using
+ :meth:`.ColumnElement.nullslast`, rather than as its standalone
+ function version, as in::
stmt = select([users_table]).\\
order_by(users_table.c.name.desc().nullslast())
@@ -2362,7 +2376,6 @@ class UnaryExpression(ColumnElement):
return UnaryExpression(
_literal_as_text(column), modifier=operators.nullslast_op)
-
@classmethod
def _create_desc(cls, column):
"""Produce a descending ``ORDER BY`` clause element.
@@ -2475,8 +2488,8 @@ class UnaryExpression(ColumnElement):
"""
expr = _literal_as_binds(expr)
- return UnaryExpression(expr,
- operator=operators.distinct_op, type_=expr.type)
+ return UnaryExpression(
+ expr, operator=operators.distinct_op, type_=expr.type)
@util.memoized_property
def _order_by_label_element(self):
@@ -2557,7 +2570,7 @@ class BinaryExpression(ColumnElement):
__visit_name__ = 'binary'
def __init__(self, left, right, operator, type_=None,
- negate=None, modifiers=None):
+ negate=None, modifiers=None):
# allow compatibility with libraries that
# refer to BinaryExpression directly and pass strings
if isinstance(operator, util.string_types):
@@ -2634,8 +2647,6 @@ class BinaryExpression(ColumnElement):
return super(BinaryExpression, self)._negate()
-
-
class Grouping(ColumnElement):
"""Represent a grouping within a column expression"""
@@ -2747,7 +2758,7 @@ class Over(ColumnElement):
return list(itertools.chain(
*[c._from_objects for c in
(self.func, self.partition_by, self.order_by)
- if c is not None]
+ if c is not None]
))
@@ -2781,8 +2792,9 @@ class Label(ColumnElement):
if name:
self.name = name
else:
- self.name = _anonymous_label('%%(%d %s)s' % (id(self),
- getattr(element, 'name', 'anon')))
+ self.name = _anonymous_label(
+ '%%(%d %s)s' % (id(self), getattr(element, 'name', 'anon'))
+ )
self.key = self._label = self._key_label = self.name
self._element = element
self._type = type_
@@ -2798,8 +2810,8 @@ class Label(ColumnElement):
@util.memoized_property
def type(self):
return type_api.to_instance(
- self._type or getattr(self._element, 'type', None)
- )
+ self._type or getattr(self._element, 'type', None)
+ )
@util.memoized_property
def element(self):
@@ -2809,8 +2821,8 @@ class Label(ColumnElement):
sub_element = self._element.self_group(against=against)
if sub_element is not self._element:
return Label(self.name,
- sub_element,
- type_=self._type)
+ sub_element,
+ type_=self._type)
else:
return self
@@ -2834,7 +2846,7 @@ class Label(ColumnElement):
def _make_proxy(self, selectable, name=None, **kw):
e = self.element._make_proxy(selectable,
- name=name if name else self.name)
+ name=name if name else self.name)
e._proxies.append(self)
if self._type is not None:
e.type = self._type
@@ -2861,10 +2873,10 @@ class ColumnClause(Immutable, ColumnElement):
:class:`.Column` object. While the :class:`.Column` class has all the
same capabilities as :class:`.ColumnClause`, the :class:`.ColumnClause`
class is usable by itself in those cases where behavioral requirements
- are limited to simple SQL expression generation. The object has none of the
- associations with schema-level metadata or with execution-time behavior
- that :class:`.Column` does, so in that sense is a "lightweight" version
- of :class:`.Column`.
+ are limited to simple SQL expression generation. The object has none of
+ the associations with schema-level metadata or with execution-time
+ behavior that :class:`.Column` does, so in that sense is a "lightweight"
+ version of :class:`.Column`.
Full details on :class:`.ColumnClause` usage is at :func:`.column`.
@@ -2897,8 +2909,8 @@ class ColumnClause(Immutable, ColumnElement):
SELECT id, name FROM user
- Once constructed, :func:`.column` may be used like any other SQL expression
- element such as within :func:`.select` constructs::
+ Once constructed, :func:`.column` may be used like any other SQL
+ expression element such as within :func:`.select` constructs::
from sqlalchemy.sql import column
@@ -2932,8 +2944,9 @@ class ColumnClause(Immutable, ColumnElement):
A :func:`.column` / :func:`.table` construct like that illustrated
above can be created in an
- ad-hoc fashion and is not associated with any :class:`.schema.MetaData`,
- DDL, or events, unlike its :class:`.Table` counterpart.
+ ad-hoc fashion and is not associated with any
+ :class:`.schema.MetaData`, DDL, or events, unlike its
+ :class:`.Table` counterpart.
:param text: the text of the element.
@@ -2943,8 +2956,8 @@ class ColumnClause(Immutable, ColumnElement):
:param is_literal: if True, the :class:`.ColumnClause` is assumed to
be an exact expression that will be delivered to the output with no
quoting rules applied regardless of case sensitive settings. the
- :func:`.literal_column()` function essentially invokes :func:`.column`
- while passing ``is_literal=True``.
+ :func:`.literal_column()` function essentially invokes
+ :func:`.column` while passing ``is_literal=True``.
.. seealso::
@@ -2965,13 +2978,13 @@ class ColumnClause(Immutable, ColumnElement):
def _compare_name_for_result(self, other):
if self.is_literal or \
- self.table is None or self.table._textual or \
- not hasattr(other, 'proxy_set') or (
- isinstance(other, ColumnClause) and
- (other.is_literal or
- other.table is None or
- other.table._textual)
- ):
+ self.table is None or self.table._textual or \
+ not hasattr(other, 'proxy_set') or (
+ isinstance(other, ColumnClause) and
+ (other.is_literal or
+ other.table is None or
+ other.table._textual)
+ ):
return (hasattr(other, 'name') and self.name == other.name) or \
(hasattr(other, '_label') and self._label == other._label)
else:
@@ -3020,7 +3033,7 @@ class ColumnClause(Immutable, ColumnElement):
elif t is not None and t.named_with_column:
if getattr(t, 'schema', None):
label = t.schema.replace('.', '_') + "_" + \
- t.name + "_" + name
+ t.name + "_" + name
else:
label = t.name + "_" + name
@@ -3053,23 +3066,23 @@ class ColumnClause(Immutable, ColumnElement):
def _bind_param(self, operator, obj):
return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
def _make_proxy(self, selectable, name=None, attach=True,
- name_is_truncatable=False, **kw):
+ name_is_truncatable=False, **kw):
# propagate the "is_literal" flag only if we are keeping our name,
# otherwise its considered to be a label
is_literal = self.is_literal and (name is None or name == self.name)
c = self._constructor(
- _as_truncated(name or self.name) if \
- name_is_truncatable else \
- (name or self.name),
- type_=self.type,
- _selectable=selectable,
- is_literal=is_literal
- )
+ _as_truncated(name or self.name) if
+ name_is_truncatable else
+ (name or self.name),
+ type_=self.type,
+ _selectable=selectable,
+ is_literal=is_literal
+ )
if name is None:
c.key = self.key
c._proxies = [self]
@@ -3128,11 +3141,11 @@ class quoted_name(util.text_type):
such a backend.
The :class:`.quoted_name` object is normally created automatically
- when specifying the name for key schema constructs such as :class:`.Table`,
- :class:`.Column`, and others. The class can also be passed explicitly
- as the name to any function that receives a name which can be quoted.
- Such as to use the :meth:`.Engine.has_table` method with an unconditionally
- quoted name::
+ when specifying the name for key schema constructs such as
+ :class:`.Table`, :class:`.Column`, and others. The class can also be
+ passed explicitly as the name to any function that receives a name which
+ can be quoted. Such as to use the :meth:`.Engine.has_table` method with
+ an unconditionally quoted name::
from sqlaclchemy import create_engine
from sqlalchemy.sql.elements import quoted_name
@@ -3157,8 +3170,8 @@ class quoted_name(util.text_type):
# elif not sprcls and quote is None:
# return value
elif isinstance(value, cls) and (
- quote is None or value.quote == quote
- ):
+ quote is None or value.quote == quote
+ ):
return value
self = super(quoted_name, cls).__new__(cls, value)
self.quote = quote
@@ -3187,13 +3200,14 @@ class quoted_name(util.text_type):
backslashed = backslashed.decode('ascii')
return "'%s'" % backslashed
+
class _truncated_label(quoted_name):
"""A unicode subclass used to identify symbolic "
"names that may require truncation."""
def __new__(cls, value, quote=None):
quote = getattr(value, "quote", quote)
- #return super(_truncated_label, cls).__new__(cls, value, quote, True)
+ # return super(_truncated_label, cls).__new__(cls, value, quote, True)
return super(_truncated_label, cls).__new__(cls, value, quote)
def __reduce__(self):
@@ -3281,17 +3295,17 @@ class _anonymous_label(_truncated_label):
def __add__(self, other):
return _anonymous_label(
- quoted_name(
- util.text_type.__add__(self, util.text_type(other)),
- self.quote)
- )
+ quoted_name(
+ util.text_type.__add__(self, util.text_type(other)),
+ self.quote)
+ )
def __radd__(self, other):
return _anonymous_label(
- quoted_name(
- util.text_type.__add__(util.text_type(other), self),
- self.quote)
- )
+ quoted_name(
+ util.text_type.__add__(util.text_type(other), self),
+ self.quote)
+ )
def apply_map(self, map_):
if self.quote is not None:
@@ -3353,10 +3367,11 @@ def _cloned_intersection(a, b):
return set(elem for elem in a
if all_overlap.intersection(elem._cloned_set))
+
def _cloned_difference(a, b):
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(elem for elem in a
- if not all_overlap.intersection(elem._cloned_set))
+ if not all_overlap.intersection(elem._cloned_set))
def _labeled(element):
@@ -3438,7 +3453,7 @@ def _no_literals(element):
def _is_literal(element):
return not isinstance(element, Visitable) and \
- not hasattr(element, '__clause_element__')
+ not hasattr(element, '__clause_element__')
def _only_column_elements_or_none(element, name):
@@ -3453,10 +3468,11 @@ def _only_column_elements(element, name):
element = element.__clause_element__()
if not isinstance(element, ColumnElement):
raise exc.ArgumentError(
- "Column-based expression object expected for argument "
- "'%s'; got: '%s', type %s" % (name, element, type(element)))
+ "Column-based expression object expected for argument "
+ "'%s'; got: '%s', type %s" % (name, element, type(element)))
return element
+
def _literal_as_binds(element, name=None, type_=None):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
@@ -3509,18 +3525,18 @@ def _type_from_args(args):
def _corresponding_column_or_error(fromclause, column,
- require_embedded=False):
+ require_embedded=False):
c = fromclause.corresponding_column(column,
- require_embedded=require_embedded)
+ require_embedded=require_embedded)
if c is None:
raise exc.InvalidRequestError(
- "Given column '%s', attached to table '%s', "
- "failed to locate a corresponding column from table '%s'"
- %
- (column,
- getattr(column, 'table', None),
- fromclause.description)
- )
+ "Given column '%s', attached to table '%s', "
+ "failed to locate a corresponding column from table '%s'"
+ %
+ (column,
+ getattr(column, 'table', None),
+ fromclause.description)
+ )
return c
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index c2fd0907d..fd57f9be8 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -31,23 +31,23 @@ from .visitors import Visitable
from .functions import func, modifier, FunctionElement, Function
from ..util.langhelpers import public_factory
from .elements import ClauseElement, ColumnElement,\
- BindParameter, UnaryExpression, BooleanClauseList, \
- Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
- True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
- Grouping, not_, \
- collate, literal_column, between,\
- literal, outparam, type_coerce, ClauseList
+ BindParameter, UnaryExpression, BooleanClauseList, \
+ Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
+ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
+ Grouping, not_, \
+ collate, literal_column, between,\
+ literal, outparam, type_coerce, ClauseList
from .elements import SavepointClause, RollbackToSavepointClause, \
- ReleaseSavepointClause
+ ReleaseSavepointClause
from .base import ColumnCollection, Generative, Executable, \
- PARSE_AUTOCOMMIT
+ PARSE_AUTOCOMMIT
from .selectable import Alias, Join, Select, Selectable, TableClause, \
- CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
- alias, GenerativeSelect, \
- subquery, HasPrefixes, Exists, ScalarSelect, TextAsFrom
+ CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
+ alias, GenerativeSelect, \
+ subquery, HasPrefixes, Exists, ScalarSelect, TextAsFrom
from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
@@ -71,17 +71,24 @@ cast = public_factory(Cast, ".expression.cast")
extract = public_factory(Extract, ".expression.extract")
tuple_ = public_factory(Tuple, ".expression.tuple_")
except_ = public_factory(CompoundSelect._create_except, ".expression.except_")
-except_all = public_factory(CompoundSelect._create_except_all, ".expression.except_all")
-intersect = public_factory(CompoundSelect._create_intersect, ".expression.intersect")
-intersect_all = public_factory(CompoundSelect._create_intersect_all, ".expression.intersect_all")
+except_all = public_factory(
+ CompoundSelect._create_except_all, ".expression.except_all")
+intersect = public_factory(
+ CompoundSelect._create_intersect, ".expression.intersect")
+intersect_all = public_factory(
+ CompoundSelect._create_intersect_all, ".expression.intersect_all")
union = public_factory(CompoundSelect._create_union, ".expression.union")
-union_all = public_factory(CompoundSelect._create_union_all, ".expression.union_all")
+union_all = public_factory(
+ CompoundSelect._create_union_all, ".expression.union_all")
exists = public_factory(Exists, ".expression.exists")
-nullsfirst = public_factory(UnaryExpression._create_nullsfirst, ".expression.nullsfirst")
-nullslast = public_factory(UnaryExpression._create_nullslast, ".expression.nullslast")
+nullsfirst = public_factory(
+ UnaryExpression._create_nullsfirst, ".expression.nullsfirst")
+nullslast = public_factory(
+ UnaryExpression._create_nullslast, ".expression.nullslast")
asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
-distinct = public_factory(UnaryExpression._create_distinct, ".expression.distinct")
+distinct = public_factory(
+ UnaryExpression._create_distinct, ".expression.distinct")
true = public_factory(True_._singleton, ".expression.true")
false = public_factory(False_._singleton, ".expression.false")
null = public_factory(Null._singleton, ".expression.null")
@@ -96,14 +103,13 @@ delete = public_factory(Delete, ".expression.delete")
# these might be better off in some other namespace
from .base import _from_objects
from .elements import _literal_as_text, _clause_element_as_expr,\
- _is_column, _labeled, _only_column_elements, _string_or_unprintable, \
+ _is_column, _labeled, _only_column_elements, _string_or_unprintable, \
_truncated_label, _clone, _cloned_difference, _cloned_intersection,\
_column_as_key, _literal_as_binds, _select_iterables, \
_corresponding_column_or_error
from .selectable import _interpret_as_from
-
# old names for compatibility
_Executable = Executable
_BindParamClause = BindParameter
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index a18ebf7e2..11e758364 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -11,8 +11,8 @@
from . import sqltypes, schema
from .base import Executable
from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
- literal_column, _type_from_args, ColumnElement, _clone,\
- Over, BindParameter
+ literal_column, _type_from_args, ColumnElement, _clone,\
+ Over, BindParameter
from .selectable import FromClause, Select
from . import operators
@@ -58,9 +58,9 @@ class FunctionElement(Executable, ColumnElement, FromClause):
"""
args = [_literal_as_binds(c, self.name) for c in clauses]
self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *args).\
- self_group()
+ operator=operators.comma_op,
+ group_contents=True, *args).\
+ self_group()
def _execute_on_connection(self, connection, multiparams, params):
return connection._execute_function(self, multiparams, params)
@@ -160,7 +160,7 @@ class FunctionElement(Executable, ColumnElement, FromClause):
def _bind_param(self, operator, obj):
return BindParameter(None, obj, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
+ _compared_to_type=self.type, unique=True)
class _FunctionGenerator(object):
@@ -271,16 +271,18 @@ func = _FunctionGenerator()
.. note::
The :data:`.func` construct has only limited support for calling
- standalone "stored procedures", especially those with special parameterization
- concerns.
+ standalone "stored procedures", especially those with special
+ parameterization concerns.
See the section :ref:`stored_procedures` for details on how to use
- the DBAPI-level ``callproc()`` method for fully traditional stored procedures.
+ the DBAPI-level ``callproc()`` method for fully traditional stored
+ procedures.
"""
modifier = _FunctionGenerator(group=False)
+
class Function(FunctionElement):
"""Describe a named SQL function.
@@ -315,9 +317,10 @@ class Function(FunctionElement):
def _bind_param(self, operator, obj):
return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
+
class _GenericMeta(VisitableType):
def __init__(cls, clsname, bases, clsdict):
@@ -413,8 +416,8 @@ class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
self.packagenames = []
self._bind = kwargs.get('bind', None)
self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *parsed_args).self_group()
+ operator=operators.comma_op,
+ group_contents=True, *parsed_args).self_group()
self.type = sqltypes.to_instance(
kwargs.pop("type_", None) or getattr(self, 'type', None))
@@ -436,7 +439,7 @@ class next_value(GenericFunction):
def __init__(self, seq, **kw):
assert isinstance(seq, schema.Sequence), \
- "next_value() accepts a Sequence object as input."
+ "next_value() accepts a Sequence object as input."
self._bind = kw.get('bind', None)
self.sequence = seq
diff --git a/lib/sqlalchemy/sql/naming.py b/lib/sqlalchemy/sql/naming.py
index 053db3e34..9e57418b0 100644
--- a/lib/sqlalchemy/sql/naming.py
+++ b/lib/sqlalchemy/sql/naming.py
@@ -11,7 +11,7 @@
"""
from .schema import Constraint, ForeignKeyConstraint, PrimaryKeyConstraint, \
- UniqueConstraint, CheckConstraint, Index, Table, Column
+ UniqueConstraint, CheckConstraint, Index, Table, Column
from .. import event, events
from .. import exc
from .elements import _truncated_label, _defer_name, _defer_none_name, conv
@@ -19,6 +19,7 @@ import re
class ConventionDict(object):
+
def __init__(self, const, table, convention):
self.const = const
self._is_fk = isinstance(const, ForeignKeyConstraint)
@@ -94,6 +95,7 @@ _prefix_dict = {
ForeignKeyConstraint: "fk"
}
+
def _get_convention(dict_, key):
for super_ in key.__mro__:
@@ -104,6 +106,7 @@ def _get_convention(dict_, key):
else:
return None
+
def _constraint_name_for_table(const, table):
metadata = table.metadata
convention = _get_convention(metadata.naming_convention, type(const))
@@ -111,16 +114,17 @@ def _constraint_name_for_table(const, table):
if isinstance(const.name, conv):
return const.name
elif convention is not None and (
- const.name is None or not isinstance(const.name, conv) and
- "constraint_name" in convention
- ):
+ const.name is None or not isinstance(const.name, conv) and
+ "constraint_name" in convention
+ ):
return conv(
- convention % ConventionDict(const, table,
- metadata.naming_convention)
- )
+ convention % ConventionDict(const, table,
+ metadata.naming_convention)
+ )
elif isinstance(convention, _defer_none_name):
return None
+
@event.listens_for(Constraint, "after_parent_attach")
@event.listens_for(Index, "after_parent_attach")
def _constraint_name(const, table):
@@ -129,8 +133,8 @@ def _constraint_name(const, table):
# to link the column attached to the table as this constraint
# associated with the table.
event.listen(table, "after_parent_attach",
- lambda col, table: _constraint_name(const, table)
- )
+ lambda col, table: _constraint_name(const, table)
+ )
elif isinstance(table, Table):
if isinstance(const.name, (conv, _defer_name)):
return
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 50293d239..945356328 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -16,7 +16,7 @@ from .. import util
from operator import (
and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg,
getitem, lshift, rshift
- )
+)
if util.py2k:
from operator import div
@@ -24,11 +24,11 @@ else:
div = truediv
-
class Operators(object):
"""Base of comparison and logical operators.
- Implements base methods :meth:`~sqlalchemy.sql.operators.Operators.operate` and
+ Implements base methods
+ :meth:`~sqlalchemy.sql.operators.Operators.operate` and
:meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as
:meth:`~sqlalchemy.sql.operators.Operators.__and__`,
:meth:`~sqlalchemy.sql.operators.Operators.__or__`,
@@ -38,6 +38,7 @@ class Operators(object):
:class:`.ColumnOperators`.
"""
+
def __and__(self, other):
"""Implement the ``&`` operator.
@@ -136,13 +137,13 @@ class Operators(object):
.. versionadded:: 0.8 - added the 'precedence' argument.
:param is_comparison: if True, the operator will be considered as a
- "comparison" operator, that is which evaulates to a boolean true/false
- value, like ``==``, ``>``, etc. This flag should be set so that
- ORM relationships can establish that the operator is a comparison
- operator when used in a custom join condition.
+ "comparison" operator, that is which evaulates to a boolean
+ true/false value, like ``==``, ``>``, etc. This flag should be set
+ so that ORM relationships can establish that the operator is a
+ comparison operator when used in a custom join condition.
- .. versionadded:: 0.9.2 - added the :paramref:`.Operators.op.is_comparison`
- flag.
+ .. versionadded:: 0.9.2 - added the
+ :paramref:`.Operators.op.is_comparison` flag.
.. seealso::
@@ -422,8 +423,8 @@ class ColumnOperators(Operators):
def notin_(self, other):
"""implement the ``NOT IN`` operator.
- This is equivalent to using negation with :meth:`.ColumnOperators.in_`,
- i.e. ``~x.in_(y)``.
+ This is equivalent to using negation with
+ :meth:`.ColumnOperators.in_`, i.e. ``~x.in_(y)``.
.. versionadded:: 0.8
@@ -681,9 +682,11 @@ def exists():
def istrue(a):
raise NotImplementedError()
+
def isfalse(a):
raise NotImplementedError()
+
def is_(a, b):
return a.is_(b)
@@ -719,6 +722,7 @@ def notilike_op(a, b, escape=None):
def between_op(a, b, c, symmetric=False):
return a.between(b, c, symmetric=symmetric)
+
def notbetween_op(a, b, c, symmetric=False):
return a.notbetween(b, c, symmetric=symmetric)
@@ -803,7 +807,7 @@ def is_commutative(op):
def is_ordering_modifier(op):
return op in (asc_op, desc_op,
- nullsfirst_op, nullslast_op)
+ nullsfirst_op, nullslast_op)
_associative = _commutative.union([concat_op, and_, or_])
@@ -875,6 +879,6 @@ def is_precedent(operator, against):
return False
else:
return (_PRECEDENCE.get(operator,
- getattr(operator, 'precedence', _smallest)) <=
- _PRECEDENCE.get(against,
- getattr(against, 'precedence', _largest)))
+ getattr(operator, 'precedence', _smallest)) <=
+ _PRECEDENCE.get(against,
+ getattr(against, 'precedence', _largest)))
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index 3e8b72ec5..f3af46c40 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -37,8 +37,8 @@ from . import visitors
from . import type_api
from .base import _bind_or_error, ColumnCollection
from .elements import ClauseElement, ColumnClause, _truncated_label, \
- _as_truncated, TextClause, _literal_as_text,\
- ColumnElement, _find_columns, quoted_name
+ _as_truncated, TextClause, _literal_as_text,\
+ ColumnElement, _find_columns, quoted_name
from .selectable import TableClause
import collections
import sqlalchemy
@@ -55,7 +55,6 @@ def _get_table_key(name, schema):
return schema + "." + name
-
@inspection._self_inspects
class SchemaItem(SchemaEventTarget, visitors.Visitable):
"""Base class for items that define a database schema."""
@@ -189,7 +188,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
If ``extend_existing`` or ``keep_existing`` are not set, an error is
raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
+ the given :class:`.Table` is already present in the
+ :class:`.MetaData`.
.. versionchanged:: 0.7.4
``extend_existing`` will work in conjunction
@@ -241,7 +241,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
If extend_existing or keep_existing are not set, an error is
raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
+ the given :class:`.Table` is already present in the
+ :class:`.MetaData`.
:param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
which will be passed to :func:`.event.listen` upon construction.
@@ -287,8 +288,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
``name`` parameter, in that quoting is applied for reserved words or
case-sensitive names; to enable unconditional quoting for the
schema name, specify the flag
- ``quote_schema=True`` to the constructor, or use the :class:`.quoted_name`
- construct to specify the name.
+ ``quote_schema=True`` to the constructor, or use the
+ :class:`.quoted_name` construct to specify the name.
:param useexisting: Deprecated. Use extend_existing.
@@ -357,7 +358,6 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
metadata._remove_table(name, schema)
raise
-
@property
@util.deprecated('0.9', 'Use ``table.schema.quote``')
def quote_schema(self):
@@ -379,7 +379,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
# calling the superclass constructor.
def _init(self, name, metadata, *args, **kwargs):
- super(Table, self).__init__(quoted_name(name, kwargs.pop('quote', None)))
+ super(Table, self).__init__(
+ quoted_name(name, kwargs.pop('quote', None)))
self.metadata = metadata
self.schema = kwargs.pop('schema', None)
@@ -438,16 +439,17 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
self, include_columns, exclude_columns
)
else:
- bind = _bind_or_error(metadata,
- msg="No engine is bound to this Table's MetaData. "
- "Pass an engine to the Table via "
- "autoload_with=<someengine>, "
- "or associate the MetaData with an engine via "
- "metadata.bind=<someengine>")
+ bind = _bind_or_error(
+ metadata,
+ msg="No engine is bound to this Table's MetaData. "
+ "Pass an engine to the Table via "
+ "autoload_with=<someengine>, "
+ "or associate the MetaData with an engine via "
+ "metadata.bind=<someengine>")
bind.run_callable(
- bind.dialect.reflecttable,
- self, include_columns, exclude_columns
- )
+ bind.dialect.reflecttable,
+ self, include_columns, exclude_columns
+ )
@property
def _sorted_constraints(self):
@@ -488,7 +490,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
else:
exclude_columns = ()
self._autoload(
- self.metadata, autoload_with, include_columns, exclude_columns)
+ self.metadata, autoload_with,
+ include_columns, exclude_columns)
self._extra_kwargs(**kwargs)
self._init_items(*args)
@@ -502,12 +505,14 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
@util.memoized_property
def _autoincrement_column(self):
for col in self.primary_key:
- if col.autoincrement and \
- col.type._type_affinity is not None and \
- issubclass(col.type._type_affinity, type_api.INTEGERTYPE._type_affinity) and \
- (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \
- isinstance(col.default, (type(None), Sequence)) and \
- (col.server_default is None or col.server_default.reflected):
+ if (col.autoincrement and col.type._type_affinity is not None and
+ issubclass(col.type._type_affinity,
+ type_api.INTEGERTYPE._type_affinity) and
+ (not col.foreign_keys or
+ col.autoincrement == 'ignore_fk') and
+ isinstance(col.default, (type(None), Sequence)) and
+ (col.server_default is None or
+ col.server_default.reflected)):
return col
@property
@@ -516,8 +521,9 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
This value is used as the dictionary key within the
:attr:`.MetaData.tables` collection. It is typically the same
- as that of :attr:`.Table.name` for a table with no :attr:`.Table.schema`
- set; otherwise it is typically of the form ``schemaname.tablename``.
+ as that of :attr:`.Table.name` for a table with no
+ :attr:`.Table.schema` set; otherwise it is typically of the form
+ ``schemaname.tablename``.
"""
return _get_table_key(self.name, self.schema)
@@ -612,7 +618,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
self.metadata = metadata
def get_children(self, column_collections=True,
- schema_visitor=False, **kw):
+ schema_visitor=False, **kw):
if not schema_visitor:
return TableClause.get_children(
self, column_collections=column_collections, **kw)
@@ -629,7 +635,7 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
bind = _bind_or_error(self)
return bind.run_callable(bind.dialect.has_table,
- self.name, schema=self.schema)
+ self.name, schema=self.schema)
def create(self, bind=None, checkfirst=False):
"""Issue a ``CREATE`` statement for this
@@ -645,8 +651,8 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
+ self,
+ checkfirst=checkfirst)
def drop(self, bind=None, checkfirst=False):
"""Issue a ``DROP`` statement for this
@@ -661,10 +667,11 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
+ self,
+ checkfirst=checkfirst)
- def tometadata(self, metadata, schema=RETAIN_SCHEMA, referred_schema_fn=None):
+ def tometadata(self, metadata, schema=RETAIN_SCHEMA,
+ referred_schema_fn=None):
"""Return a copy of this :class:`.Table` associated with a different
:class:`.MetaData`.
@@ -703,9 +710,10 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
in order to provide for the schema name that should be assigned
to the referenced table of a :class:`.ForeignKeyConstraint`.
The callable accepts this parent :class:`.Table`, the
- target schema that we are changing to, the :class:`.ForeignKeyConstraint`
- object, and the existing "target schema" of that constraint. The
- function should return the string schema name that should be applied.
+ target schema that we are changing to, the
+ :class:`.ForeignKeyConstraint` object, and the existing
+ "target schema" of that constraint. The function should return the
+ string schema name that should be applied.
E.g.::
def referred_schema_fn(table, to_schema,
@@ -738,18 +746,22 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
table = Table(
self.name, metadata, schema=schema,
*args, **self.kwargs
- )
+ )
for c in self.constraints:
if isinstance(c, ForeignKeyConstraint):
referred_schema = c._referred_schema
if referred_schema_fn:
- fk_constraint_schema = referred_schema_fn(self, schema, c, referred_schema)
+ fk_constraint_schema = referred_schema_fn(
+ self, schema, c, referred_schema)
else:
- fk_constraint_schema = schema if referred_schema == self.schema else None
- table.append_constraint(c.copy(schema=fk_constraint_schema, target_table=table))
+ fk_constraint_schema = (
+ schema if referred_schema == self.schema else None)
+ table.append_constraint(
+ c.copy(schema=fk_constraint_schema, target_table=table))
else:
- table.append_constraint(c.copy(schema=schema, target_table=table))
+ table.append_constraint(
+ c.copy(schema=schema, target_table=table))
for index in self.indexes:
# skip indexes that would be generated
# by the 'index' flag on Column
@@ -802,13 +814,13 @@ class Column(SchemaItem, ColumnClause):
The ``type`` argument may be the second positional argument
or specified by keyword.
- If the ``type`` is ``None`` or is omitted, it will first default to the special
- type :class:`.NullType`. If and when this :class:`.Column` is
- made to refer to another column using :class:`.ForeignKey`
- and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced
- column will be copied to this column as well, at the moment that
- the foreign key is resolved against that remote :class:`.Column`
- object.
+ If the ``type`` is ``None`` or is omitted, it will first default to
+ the special type :class:`.NullType`. If and when this
+ :class:`.Column` is made to refer to another column using
+ :class:`.ForeignKey` and/or :class:`.ForeignKeyConstraint`, the type
+ of the remote-referenced column will be copied to this column as
+ well, at the moment that the foreign key is resolved against that
+ remote :class:`.Column` object.
.. versionchanged:: 0.9.0
Support for propagation of type to a :class:`.Column` from its
@@ -952,8 +964,8 @@ class Column(SchemaItem, ColumnClause):
y DATETIME DEFAULT NOW()
- Strings and text() will be converted into a :class:`.DefaultClause`
- object upon initialization.
+ Strings and text() will be converted into a
+ :class:`.DefaultClause` object upon initialization.
Use :class:`.FetchedValue` to indicate that an already-existing
column will generate a default value on the database side which
@@ -988,9 +1000,9 @@ class Column(SchemaItem, ColumnClause):
database, and should not be included in the columns list for a
``CREATE TABLE`` statement.
- For more elaborate scenarios where columns should be conditionally
- rendered differently on different backends, consider custom
- compilation rules for :class:`.CreateColumn`.
+ For more elaborate scenarios where columns should be
+ conditionally rendered differently on different backends,
+ consider custom compilation rules for :class:`.CreateColumn`.
..versionadded:: 0.8.3 Added the ``system=True`` parameter to
:class:`.Column`.
@@ -1019,7 +1031,7 @@ class Column(SchemaItem, ColumnClause):
name = quoted_name(name, kwargs.pop('quote', None))
elif "quote" in kwargs:
raise exc.ArgumentError("Explicit 'name' is required when "
- "sending 'quote' argument")
+ "sending 'quote' argument")
super(Column, self).__init__(name, type_)
self.key = kwargs.pop('key', name)
@@ -1076,7 +1088,7 @@ class Column(SchemaItem, ColumnClause):
args.append(self.server_onupdate._as_for_update(True))
else:
args.append(DefaultClause(self.server_onupdate,
- for_update=True))
+ for_update=True))
self._init_items(*args)
util.set_creation_order(self)
@@ -1135,7 +1147,7 @@ class Column(SchemaItem, ColumnClause):
[repr(x) for x in self.foreign_keys if x is not None] +
[repr(x) for x in self.constraints] +
[(self.table is not None and "table=<%s>" %
- self.table.description or "table=None")] +
+ self.table.description or "table=None")] +
["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
def _set_parent(self, table):
@@ -1149,8 +1161,8 @@ class Column(SchemaItem, ColumnClause):
existing = getattr(self, 'table', None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
- "Column object already assigned to Table '%s'" %
- existing.description)
+ "Column object already assigned to Table '%s'" %
+ existing.description)
if self.key in table._columns:
col = table._columns.get(self.key)
@@ -1172,7 +1184,7 @@ class Column(SchemaItem, ColumnClause):
raise exc.ArgumentError(
"Trying to redefine primary-key column '%s' as a "
"non-primary-key column on table '%s'" % (
- self.key, table.fullname))
+ self.key, table.fullname))
self.table = table
if self.index:
@@ -1219,27 +1231,27 @@ class Column(SchemaItem, ColumnClause):
type_ = type_.copy(**kw)
c = self._constructor(
- name=self.name,
- type_=type_,
- key=self.key,
- primary_key=self.primary_key,
- nullable=self.nullable,
- unique=self.unique,
- system=self.system,
- #quote=self.quote,
- index=self.index,
- autoincrement=self.autoincrement,
- default=self.default,
- server_default=self.server_default,
- onupdate=self.onupdate,
- server_onupdate=self.server_onupdate,
- doc=self.doc,
- *args
- )
+ name=self.name,
+ type_=type_,
+ key=self.key,
+ primary_key=self.primary_key,
+ nullable=self.nullable,
+ unique=self.unique,
+ system=self.system,
+ # quote=self.quote,
+ index=self.index,
+ autoincrement=self.autoincrement,
+ default=self.default,
+ server_default=self.server_default,
+ onupdate=self.onupdate,
+ server_onupdate=self.server_onupdate,
+ doc=self.doc,
+ *args
+ )
return self._schema_item_copy(c)
def _make_proxy(self, selectable, name=None, key=None,
- name_is_truncatable=False, **kw):
+ name_is_truncatable=False, **kw):
"""Create a *proxy* for this column.
This is a copy of this ``Column`` referenced by a different parent
@@ -1249,15 +1261,16 @@ class Column(SchemaItem, ColumnClause):
"""
fk = [ForeignKey(f.column, _constraint=f.constraint)
- for f in self.foreign_keys]
+ for f in self.foreign_keys]
if name is None and self.name is None:
- raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
- " with this Column object until its 'name' has "
- "been assigned.")
+ raise exc.InvalidRequestError(
+ "Cannot initialize a sub-selectable"
+ " with this Column object until its 'name' has "
+ "been assigned.")
try:
c = self._constructor(
- _as_truncated(name or self.name) if \
- name_is_truncatable else (name or self.name),
+ _as_truncated(name or self.name) if
+ name_is_truncatable else (name or self.name),
self.type,
key=key if key else name if name else self.key,
primary_key=self.primary_key,
@@ -1271,7 +1284,7 @@ class Column(SchemaItem, ColumnClause):
"attribute or method which accepts the "
"standard Column constructor arguments, or "
"references the Column class itself." % self.__class__)
- )
+ )
c.table = selectable
selectable._columns.add(c)
@@ -1331,9 +1344,9 @@ class ForeignKey(DialectKWArgs, SchemaItem):
__visit_name__ = 'foreign_key'
def __init__(self, column, _constraint=None, use_alter=False, name=None,
- onupdate=None, ondelete=None, deferrable=None,
- initially=None, link_to_name=False, match=None,
- **dialect_kw):
+ onupdate=None, ondelete=None, deferrable=None,
+ initially=None, link_to_name=False, match=None,
+ **dialect_kw):
"""
Construct a column-level FOREIGN KEY.
@@ -1375,19 +1388,20 @@ class ForeignKey(DialectKWArgs, SchemaItem):
assigned ``key``.
:param use_alter: passed to the underlying
- :class:`.ForeignKeyConstraint` to indicate the constraint should be
- generated/dropped externally from the CREATE TABLE/ DROP TABLE
+ :class:`.ForeignKeyConstraint` to indicate the constraint should
+ be generated/dropped externally from the CREATE TABLE/ DROP TABLE
statement. See that classes' constructor for details.
:param match: Optional string. If set, emit MATCH <value> when issuing
DDL for this constraint. Typical values include SIMPLE, PARTIAL
and FULL.
- :param \**dialect_kw: Additional keyword arguments are dialect specific,
- and passed in the form ``<dialectname>_<argname>``. The arguments
- are ultimately handled by a corresponding :class:`.ForeignKeyConstraint`.
- See the documentation regarding an individual dialect at
- :ref:`dialect_toplevel` for detail on documented arguments.
+ :param \**dialect_kw: Additional keyword arguments are dialect
+ specific, and passed in the form ``<dialectname>_<argname>``. The
+ arguments are ultimately handled by a corresponding
+ :class:`.ForeignKeyConstraint`. See the documentation regarding
+ an individual dialect at :ref:`dialect_toplevel` for detail on
+ documented arguments.
.. versionadded:: 0.9.2
@@ -1404,13 +1418,14 @@ class ForeignKey(DialectKWArgs, SchemaItem):
if not isinstance(self._table_column, ColumnClause):
raise exc.ArgumentError(
- "String, Column, or Column-bound argument "
- "expected, got %r" % self._table_column)
- elif not isinstance(self._table_column.table, (util.NoneType, TableClause)):
+ "String, Column, or Column-bound argument "
+ "expected, got %r" % self._table_column)
+ elif not isinstance(
+ self._table_column.table, (util.NoneType, TableClause)):
raise exc.ArgumentError(
- "ForeignKey received Column not bound "
- "to a Table, got: %r" % self._table_column.table
- )
+ "ForeignKey received Column not bound "
+ "to a Table, got: %r" % self._table_column.table
+ )
# the linked ForeignKeyConstraint.
# ForeignKey will create this when parent Column
@@ -1449,20 +1464,19 @@ class ForeignKey(DialectKWArgs, SchemaItem):
"""
fk = ForeignKey(
- self._get_colspec(schema=schema),
- use_alter=self.use_alter,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match,
- **self._unvalidated_dialect_kw
- )
+ self._get_colspec(schema=schema),
+ use_alter=self.use_alter,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ **self._unvalidated_dialect_kw
+ )
return self._schema_item_copy(fk)
-
def _get_colspec(self, schema=None):
"""Return a string based 'column specification' for this
:class:`.ForeignKey`.
@@ -1476,7 +1490,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
return "%s.%s.%s" % (schema, tname, colname)
elif self._table_column is not None:
return "%s.%s" % (
- self._table_column.table.fullname, self._table_column.key)
+ self._table_column.table.fullname, self._table_column.key)
else:
return self._colspec
@@ -1484,7 +1498,6 @@ class ForeignKey(DialectKWArgs, SchemaItem):
def _referred_schema(self):
return self._column_tokens[0]
-
def _table_key(self):
if self._table_column is not None:
if self._table_column.table is None:
@@ -1495,8 +1508,6 @@ class ForeignKey(DialectKWArgs, SchemaItem):
schema, tname, colname = self._column_tokens
return _get_table_key(tname, schema)
-
-
target_fullname = property(_get_colspec)
def references(self, table):
@@ -1550,13 +1561,13 @@ class ForeignKey(DialectKWArgs, SchemaItem):
def _resolve_col_tokens(self):
if self.parent is None:
raise exc.InvalidRequestError(
- "this ForeignKey object does not yet have a "
- "parent Column associated with it.")
+ "this ForeignKey object does not yet have a "
+ "parent Column associated with it.")
elif self.parent.table is None:
raise exc.InvalidRequestError(
- "this ForeignKey's parent column is not yet associated "
- "with a Table.")
+ "this ForeignKey's parent column is not yet associated "
+ "with a Table.")
parenttable = self.parent.table
@@ -1580,7 +1591,6 @@ class ForeignKey(DialectKWArgs, SchemaItem):
tablekey = _get_table_key(tname, schema)
return parenttable, tablekey, colname
-
def _link_to_col_by_colstring(self, parenttable, table, colname):
if not hasattr(self.constraint, '_referred_table'):
self.constraint._referred_table = table
@@ -1606,9 +1616,10 @@ class ForeignKey(DialectKWArgs, SchemaItem):
if _column is None:
raise exc.NoReferencedColumnError(
- "Could not initialize target column for ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, table.name, key),
+ "Could not initialize target column "
+ "for ForeignKey '%s' on table '%s': "
+ "table '%s' has no column named '%s'" %
+ (self._colspec, parenttable.name, table.name, key),
table.name, key)
self._set_target_column(_column)
@@ -1664,7 +1675,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
"Could not initialize target column for "
"ForeignKey '%s' on table '%s': "
"table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, tablekey, colname),
+ self._colspec, parenttable.name, tablekey, colname),
tablekey, colname)
elif hasattr(self._colspec, '__clause_element__'):
_column = self._colspec.__clause_element__()
@@ -1676,7 +1687,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
def _set_parent(self, column):
if self.parent is not None and self.parent is not column:
raise exc.InvalidRequestError(
- "This ForeignKey already has a parent !")
+ "This ForeignKey already has a parent !")
self.parent = column
self.parent.foreign_keys.add(self)
self.parent._on_table_attach(self._set_table)
@@ -1704,7 +1715,7 @@ class ForeignKey(DialectKWArgs, SchemaItem):
deferrable=self.deferrable, initially=self.initially,
match=self.match,
**self._unvalidated_dialect_kw
- )
+ )
self.constraint._elements[self.parent] = self
self.constraint._set_parent_with_dispatch(table)
table.foreign_keys.add(self)
@@ -1717,7 +1728,8 @@ class ForeignKey(DialectKWArgs, SchemaItem):
if table_key in parenttable.metadata.tables:
table = parenttable.metadata.tables[table_key]
try:
- self._link_to_col_by_colstring(parenttable, table, colname)
+ self._link_to_col_by_colstring(
+ parenttable, table, colname)
except exc.NoReferencedColumnError:
# this is OK, we'll try later
pass
@@ -1730,12 +1742,11 @@ class ForeignKey(DialectKWArgs, SchemaItem):
self._set_target_column(_column)
-
class _NotAColumnExpr(object):
def _not_a_column_expr(self):
raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression." % self.__class__.__name__)
+ "This %s cannot be used directly "
+ "as a column expression." % self.__class__.__name__)
__clause_element__ = self_group = lambda self: self._not_a_column_expr()
_from_objects = property(lambda self: self._not_a_column_expr())
@@ -1841,8 +1852,8 @@ class ColumnDefault(DefaultGenerator):
@util.memoized_property
def is_scalar(self):
return not self.is_callable and \
- not self.is_clause_element and \
- not self.is_sequence
+ not self.is_clause_element and \
+ not self.is_sequence
def _maybe_wrap_callable(self, fn):
"""Wrap callables that don't accept a context.
@@ -1890,8 +1901,10 @@ class Sequence(DefaultGenerator):
The :class:`.Sequence` is typically associated with a primary key column::
- some_table = Table('some_table', metadata,
- Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
+ some_table = Table(
+ 'some_table', metadata,
+ Column('id', Integer, Sequence('some_table_seq'),
+ primary_key=True)
)
When CREATE TABLE is emitted for the above :class:`.Table`, if the
@@ -1938,8 +1951,8 @@ class Sequence(DefaultGenerator):
creates a sequence for us automatically".
:param quote: boolean value, when ``True`` or ``False``, explicitly
forces quoting of the schema name on or off. When left at its
- default of ``None``, normal quoting rules based on casing and reserved
- words take place.
+ default of ``None``, normal quoting rules based on casing and
+ reserved words take place.
:param quote_schema: set the quoting preferences for the ``schema``
name.
:param metadata: optional :class:`.MetaData` object which will be
@@ -2023,8 +2036,8 @@ class Sequence(DefaultGenerator):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
+ self,
+ checkfirst=checkfirst)
def drop(self, bind=None, checkfirst=True):
"""Drops this sequence from the database."""
@@ -2032,16 +2045,16 @@ class Sequence(DefaultGenerator):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
+ self,
+ checkfirst=checkfirst)
def _not_a_column_expr(self):
raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression. Use func.next_value(sequence) "
- "to produce a 'next value' function that's usable "
- "as a column element."
- % self.__class__.__name__)
+ "This %s cannot be used directly "
+ "as a column expression. Use func.next_value(sequence) "
+ "to produce a 'next value' function that's usable "
+ "as a column element."
+ % self.__class__.__name__)
@inspection._self_inspects
@@ -2129,7 +2142,7 @@ class DefaultClause(FetchedValue):
def __repr__(self):
return "DefaultClause(%r, for_update=%r)" % \
- (self.arg, self.for_update)
+ (self.arg, self.for_update)
class PassiveDefault(DefaultClause):
@@ -2140,9 +2153,9 @@ class PassiveDefault(DefaultClause):
Use :class:`.DefaultClause`.
"""
@util.deprecated("0.6",
- ":class:`.PassiveDefault` is deprecated. "
- "Use :class:`.DefaultClause`.",
- False)
+ ":class:`.PassiveDefault` is deprecated. "
+ "Use :class:`.DefaultClause`.",
+ False)
def __init__(self, *arg, **kw):
DefaultClause.__init__(self, *arg, **kw)
@@ -2153,8 +2166,8 @@ class Constraint(DialectKWArgs, SchemaItem):
__visit_name__ = 'constraint'
def __init__(self, name=None, deferrable=None, initially=None,
- _create_rule=None,
- **dialect_kw):
+ _create_rule=None,
+ **dialect_kw):
"""Create a SQL constraint.
:param name:
@@ -2185,10 +2198,10 @@ class Constraint(DialectKWArgs, SchemaItem):
_create_rule is used by some types to create constraints.
Currently, its call signature is subject to change at any time.
- :param \**dialect_kw: Additional keyword arguments are dialect specific,
- and passed in the form ``<dialectname>_<argname>``. See the
- documentation regarding an individual dialect at :ref:`dialect_toplevel`
- for detail on documented arguments.
+ :param \**dialect_kw: Additional keyword arguments are dialect
+ specific, and passed in the form ``<dialectname>_<argname>``. See
+ the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
"""
@@ -2207,8 +2220,8 @@ class Constraint(DialectKWArgs, SchemaItem):
except AttributeError:
pass
raise exc.InvalidRequestError(
- "This constraint is not bound to a table. Did you "
- "mean to call table.append_constraint(constraint) ?")
+ "This constraint is not bound to a table. Did you "
+ "mean to call table.append_constraint(constraint) ?")
def _set_parent(self, parent):
self.parent = parent
@@ -2239,7 +2252,7 @@ class ColumnCollectionMixin(object):
def __init__(self, *columns):
self.columns = ColumnCollection()
self._pending_colargs = [_to_schema_column_or_string(c)
- for c in columns]
+ for c in columns]
if self._pending_colargs and \
isinstance(self._pending_colargs[0], Column) and \
isinstance(self._pending_colargs[0].table, Table):
@@ -2287,7 +2300,7 @@ class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
def copy(self, **kw):
c = self.__class__(name=self.name, deferrable=self.deferrable,
- initially=self.initially, *self.columns.keys())
+ initially=self.initially, *self.columns.keys())
return self._schema_item_copy(c)
def contains_column(self, col):
@@ -2311,8 +2324,8 @@ class CheckConstraint(Constraint):
"""
def __init__(self, sqltext, name=None, deferrable=None,
- initially=None, table=None, _create_rule=None,
- _autoattach=True):
+ initially=None, table=None, _create_rule=None,
+ _autoattach=True):
"""Construct a CHECK constraint.
:param sqltext:
@@ -2337,17 +2350,17 @@ class CheckConstraint(Constraint):
"""
super(CheckConstraint, self).\
- __init__(name, deferrable, initially, _create_rule)
+ __init__(name, deferrable, initially, _create_rule)
self.sqltext = _literal_as_text(sqltext)
if table is not None:
self._set_parent_with_dispatch(table)
elif _autoattach:
cols = _find_columns(self.sqltext)
tables = set([c.table for c in cols
- if isinstance(c.table, Table)])
+ if isinstance(c.table, Table)])
if len(tables) == 1:
self._set_parent_with_dispatch(
- tables.pop())
+ tables.pop())
def __visit_name__(self):
if isinstance(self.parent, Table):
@@ -2367,12 +2380,12 @@ class CheckConstraint(Constraint):
else:
sqltext = self.sqltext
c = CheckConstraint(sqltext,
- name=self.name,
- initially=self.initially,
- deferrable=self.deferrable,
- _create_rule=self._create_rule,
- table=target_table,
- _autoattach=False)
+ name=self.name,
+ initially=self.initially,
+ deferrable=self.deferrable,
+ _create_rule=self._create_rule,
+ table=target_table,
+ _autoattach=False)
return self._schema_item_copy(c)
@@ -2381,8 +2394,9 @@ class ForeignKeyConstraint(Constraint):
Defines a single column or composite FOREIGN KEY ... REFERENCES
constraint. For a no-frills, single column foreign key, adding a
- :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
- equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
+ :class:`.ForeignKey` to the definition of a :class:`.Column` is a
+ shorthand equivalent for an unnamed, single column
+ :class:`.ForeignKeyConstraint`.
Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
@@ -2390,8 +2404,9 @@ class ForeignKeyConstraint(Constraint):
__visit_name__ = 'foreign_key_constraint'
def __init__(self, columns, refcolumns, name=None, onupdate=None,
- ondelete=None, deferrable=None, initially=None, use_alter=False,
- link_to_name=False, match=None, table=None, **dialect_kw):
+ ondelete=None, deferrable=None, initially=None,
+ use_alter=False, link_to_name=False, match=None,
+ table=None, **dialect_kw):
"""Construct a composite-capable FOREIGN KEY.
:param columns: A sequence of local column names. The named columns
@@ -2427,25 +2442,25 @@ class ForeignKeyConstraint(Constraint):
ALTER TABLE statement issued after the full collection of tables
have been created, and drop it via an ALTER TABLE statement before
the full collection of tables are dropped. This is shorthand for the
- usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
- as "after-create" and "before-drop" events on the MetaData object.
- This is normally used to generate/drop constraints on objects that
- are mutually dependent on each other.
+ usage of :class:`.AddConstraint` and :class:`.DropConstraint`
+ applied as "after-create" and "before-drop" events on the MetaData
+ object. This is normally used to generate/drop constraints on
+ objects that are mutually dependent on each other.
:param match: Optional string. If set, emit MATCH <value> when issuing
- DDL for this constraint. Typical values include SIMPLE, PARTIAL
- and FULL.
+ DDL for this constraint. Typical values include SIMPLE, PARTIAL
+ and FULL.
- :param \**dialect_kw: Additional keyword arguments are dialect specific,
- and passed in the form ``<dialectname>_<argname>``. See the
- documentation regarding an individual dialect at :ref:`dialect_toplevel`
- for detail on documented arguments.
+ :param \**dialect_kw: Additional keyword arguments are dialect
+ specific, and passed in the form ``<dialectname>_<argname>``. See
+ the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
.. versionadded:: 0.9.2
"""
super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially, **dialect_kw)
+ __init__(name, deferrable, initially, **dialect_kw)
self.onupdate = onupdate
self.ondelete = ondelete
@@ -2463,18 +2478,18 @@ class ForeignKeyConstraint(Constraint):
# to the Table for string-specified names
for col, refcol in zip(columns, refcolumns):
self._elements[col] = ForeignKey(
- refcol,
- _constraint=self,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- link_to_name=self.link_to_name,
- match=self.match,
- deferrable=self.deferrable,
- initially=self.initially,
- **self.dialect_kwargs
- )
+ refcol,
+ _constraint=self,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ **self.dialect_kwargs
+ )
if table is not None:
self._set_parent_with_dispatch(table)
@@ -2491,17 +2506,18 @@ class ForeignKeyConstraint(Constraint):
return None
def _validate_dest_table(self, table):
- table_keys = set([elem._table_key() for elem in self._elements.values()])
+ table_keys = set([elem._table_key()
+ for elem in self._elements.values()])
if None not in table_keys and len(table_keys) > 1:
elem0, elem1 = sorted(table_keys)[0:2]
raise exc.ArgumentError(
'ForeignKeyConstraint on %s(%s) refers to '
'multiple remote tables: %s and %s' % (
- table.fullname,
- self._col_description,
- elem0,
- elem1
- ))
+ table.fullname,
+ self._col_description,
+ elem0,
+ elem1
+ ))
@property
def _col_description(self):
@@ -2539,7 +2555,7 @@ class ForeignKeyConstraint(Constraint):
if self.use_alter:
def supports_alter(ddl, event, schema_item, bind, **kw):
return table in set(kw['tables']) and \
- bind.dialect.supports_alter
+ bind.dialect.supports_alter
event.listen(table.metadata, "after_create",
ddl.AddConstraint(self, on=supports_alter))
@@ -2548,20 +2564,21 @@ class ForeignKeyConstraint(Constraint):
def copy(self, schema=None, **kw):
fkc = ForeignKeyConstraint(
- [x.parent.key for x in self._elements.values()],
- [x._get_colspec(schema=schema) for x in self._elements.values()],
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match
- )
+ [x.parent.key for x in self._elements.values()],
+ [x._get_colspec(schema=schema)
+ for x in self._elements.values()],
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match
+ )
for self_fk, other_fk in zip(
- self._elements.values(),
- fkc._elements.values()):
+ self._elements.values(),
+ fkc._elements.values()):
self_fk._schema_item_copy(other_fk)
return self._schema_item_copy(fkc)
@@ -2581,8 +2598,10 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
... )
>>> my_table.primary_key
PrimaryKeyConstraint(
- Column('id', Integer(), table=<mytable>, primary_key=True, nullable=False),
- Column('version_id', Integer(), table=<mytable>, primary_key=True, nullable=False)
+ Column('id', Integer(), table=<mytable>,
+ primary_key=True, nullable=False),
+ Column('version_id', Integer(), table=<mytable>,
+ primary_key=True, nullable=False)
)
The primary key of a :class:`.Table` can also be specified by using
@@ -2594,7 +2613,8 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
Column('id', Integer),
Column('version_id', Integer),
Column('data', String(50)),
- PrimaryKeyConstraint('id', 'version_id', name='mytable_pk')
+ PrimaryKeyConstraint('id', 'version_id',
+ name='mytable_pk')
)
The two styles of column-specification should generally not be mixed.
@@ -2602,9 +2622,9 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
:class:`.PrimaryKeyConstraint`
don't match the columns that were marked as ``primary_key=True``, if both
are present; in this case, the columns are taken strictly from the
- :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise marked
- as ``primary_key=True`` are ignored. This behavior is intended to be
- backwards compatible with previous behavior.
+ :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise
+ marked as ``primary_key=True`` are ignored. This behavior is intended to
+ be backwards compatible with previous behavior.
.. versionchanged:: 0.9.2 Using a mixture of columns within a
:class:`.PrimaryKeyConstraint` in addition to columns marked as
@@ -2614,23 +2634,26 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
may raise an exception in a future release.
For the use case where specific options are to be specified on the
- :class:`.PrimaryKeyConstraint`, but the usual style of using ``primary_key=True``
- flags is still desirable, an empty :class:`.PrimaryKeyConstraint` may be
- specified, which will take on the primary key column collection from
- the :class:`.Table` based on the flags::
+ :class:`.PrimaryKeyConstraint`, but the usual style of using
+ ``primary_key=True`` flags is still desirable, an empty
+ :class:`.PrimaryKeyConstraint` may be specified, which will take on the
+ primary key column collection from the :class:`.Table` based on the
+ flags::
my_table = Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('version_id', Integer, primary_key=True),
Column('data', String(50)),
- PrimaryKeyConstraint(name='mytable_pk', mssql_clustered=True)
+ PrimaryKeyConstraint(name='mytable_pk',
+ mssql_clustered=True)
)
.. versionadded:: 0.9.2 an empty :class:`.PrimaryKeyConstraint` may now
- be specified for the purposes of establishing keyword arguments with the
- constraint, independently of the specification of "primary key" columns
- within the :class:`.Table` itself; columns marked as ``primary_key=True``
- will be gathered into the empty constraint's column collection.
+ be specified for the purposes of establishing keyword arguments with
+ the constraint, independently of the specification of "primary key"
+ columns within the :class:`.Table` itself; columns marked as
+ ``primary_key=True`` will be gathered into the empty constraint's
+ column collection.
"""
@@ -2646,19 +2669,19 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint):
table_pks = [c for c in table.c if c.primary_key]
if self.columns and table_pks and \
- set(table_pks) != set(self.columns.values()):
+ set(table_pks) != set(self.columns.values()):
util.warn(
- "Table '%s' specifies columns %s as primary_key=True, "
- "not matching locally specified columns %s; setting the "
- "current primary key columns to %s. This warning "
- "may become an exception in a future release" %
- (
- table.name,
- ", ".join("'%s'" % c.name for c in table_pks),
- ", ".join("'%s'" % c.name for c in self.columns),
- ", ".join("'%s'" % c.name for c in self.columns)
- )
+ "Table '%s' specifies columns %s as primary_key=True, "
+ "not matching locally specified columns %s; setting the "
+ "current primary key columns to %s. This warning "
+ "may become an exception in a future release" %
+ (
+ table.name,
+ ", ".join("'%s'" % c.name for c in table_pks),
+ ", ".join("'%s'" % c.name for c in self.columns),
+ ", ".join("'%s'" % c.name for c in self.columns)
)
+ )
table_pks[:] = []
for c in self.columns:
@@ -2745,9 +2768,9 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
.. versionadded:: 0.8 support for functional and expression-based indexes.
An :class:`.Index` can also be manually associated with a :class:`.Table`,
- either through inline declaration or using :meth:`.Table.append_constraint`.
- When this approach is used, the names of the indexed columns can be specified
- as strings::
+ either through inline declaration or using
+ :meth:`.Table.append_constraint`. When this approach is used, the names
+ of the indexed columns can be specified as strings::
Table("sometable", metadata,
Column("name", String(50)),
@@ -2775,8 +2798,8 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
:ref:`schema_indexes` - General information on :class:`.Index`.
- :ref:`postgresql_indexes` - PostgreSQL-specific options available for the
- :class:`.Index` construct.
+ :ref:`postgresql_indexes` - PostgreSQL-specific options available for
+ the :class:`.Index` construct.
:ref:`mysql_indexes` - MySQL-specific options available for the
:class:`.Index` construct.
@@ -2809,9 +2832,10 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
:paramref:`.Column.quote`.
:param \**kw: Additional keyword arguments not mentioned above are
- dialect specific, and passed in the form ``<dialectname>_<argname>``.
- See the documentation regarding an individual dialect at
- :ref:`dialect_toplevel` for detail on documented arguments.
+ dialect specific, and passed in the form
+ ``<dialectname>_<argname>``. See the documentation regarding an
+ individual dialect at :ref:`dialect_toplevel` for detail on
+ documented arguments.
"""
self.table = None
@@ -2835,7 +2859,6 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
# objects are present
ColumnCollectionMixin.__init__(self, *columns)
-
def _set_parent(self, table):
ColumnCollectionMixin._set_parent(self, table)
@@ -2860,7 +2883,8 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
self.expressions = [
expr if isinstance(expr, ClauseElement)
else colexpr
- for expr, colexpr in util.zip_longest(self.expressions, self.columns)
+ for expr, colexpr in util.zip_longest(self.expressions,
+ self.columns)
]
@property
@@ -2900,11 +2924,11 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
def __repr__(self):
return 'Index(%s)' % (
- ", ".join(
- [repr(self.name)] +
- [repr(e) for e in self.expressions] +
- (self.unique and ["unique=True"] or [])
- ))
+ ", ".join(
+ [repr(self.name)] +
+ [repr(e) for e in self.expressions] +
+ (self.unique and ["unique=True"] or [])
+ ))
DEFAULT_NAMING_CONVENTION = util.immutabledict({
@@ -2925,9 +2949,9 @@ class MetaData(SchemaItem):
The :class:`.Table` objects themselves are stored in the
:attr:`.MetaData.tables` dictionary.
- :class:`.MetaData` is a thread-safe object for read operations. Construction
- of new tables within a single :class:`.MetaData` object, either explicitly
- or via reflection, may not be completely thread-safe.
+ :class:`.MetaData` is a thread-safe object for read operations.
+ Construction of new tables within a single :class:`.MetaData` object,
+ either explicitly or via reflection, may not be completely thread-safe.
.. seealso::
@@ -2940,7 +2964,7 @@ class MetaData(SchemaItem):
def __init__(self, bind=None, reflect=False, schema=None,
quote_schema=None,
naming_convention=DEFAULT_NAMING_CONVENTION
- ):
+ ):
"""Create a new MetaData object.
:param bind:
@@ -2985,9 +3009,9 @@ class MetaData(SchemaItem):
The values associated with each "constraint class" or "constraint
mnemonic" key are string naming templates, such as
``"uq_%(table_name)s_%(column_0_name)s"``,
- which describe how the name should be composed. The values associated
- with user-defined "token" keys should be callables of the form
- ``fn(constraint, table)``, which accepts the constraint/index
+ which describe how the name should be composed. The values
+ associated with user-defined "token" keys should be callables of the
+ form ``fn(constraint, table)``, which accepts the constraint/index
object and :class:`.Table` as arguments, returning a string
result.
@@ -3011,14 +3035,15 @@ class MetaData(SchemaItem):
index position "0", e.g. :attr:`.Column.key`
* ``%(referred_column_0_name)s`` - the name of a :class:`.Column`
- at index position "0" referenced by a :class:`.ForeignKeyConstraint`.
+ at index position "0" referenced by a
+ :class:`.ForeignKeyConstraint`.
- * ``%(constraint_name)s`` - a special key that refers to the existing
- name given to the constraint. When this key is present, the
- :class:`.Constraint` object's existing name will be replaced with
- one that is composed from template string that uses this token.
- When this token is present, it is required that the :class:`.Constraint`
- is given an expicit name ahead of time.
+ * ``%(constraint_name)s`` - a special key that refers to the
+ existing name given to the constraint. When this key is
+ present, the :class:`.Constraint` object's existing name will be
+ replaced with one that is composed from template string that
+ uses this token. When this token is present, it is required that
+ the :class:`.Constraint` is given an expicit name ahead of time.
* user-defined: any additional token may be implemented by passing
it along with a ``fn(constraint, table)`` callable to the
@@ -3042,7 +3067,7 @@ class MetaData(SchemaItem):
self.bind = bind
if reflect:
util.warn_deprecated("reflect=True is deprecate; please "
- "use the reflect() method.")
+ "use the reflect() method.")
if not bind:
raise exc.ArgumentError(
"A bind must be supplied in conjunction "
@@ -3077,8 +3102,6 @@ class MetaData(SchemaItem):
if schema:
self._schemas.add(schema)
-
-
def _remove_table(self, name, schema):
key = _get_table_key(name, schema)
removed = dict.pop(self.tables, key, None)
@@ -3087,9 +3110,8 @@ class MetaData(SchemaItem):
fk._remove_from_metadata(self)
if self._schemas:
self._schemas = set([t.schema
- for t in self.tables.values()
- if t.schema is not None])
-
+ for t in self.tables.values()
+ if t.schema is not None])
def __getstate__(self):
return {'tables': self.tables,
@@ -3172,9 +3194,9 @@ class MetaData(SchemaItem):
return ddl.sort_tables(self.tables.values())
def reflect(self, bind=None, schema=None, views=False, only=None,
- extend_existing=False,
- autoload_replace=True,
- **dialect_kwargs):
+ extend_existing=False,
+ autoload_replace=True,
+ **dialect_kwargs):
"""Load all available table definitions from the database.
Automatically creates ``Table`` entries in this ``MetaData`` for any
@@ -3219,13 +3241,15 @@ class MetaData(SchemaItem):
.. versionadded:: 0.9.1
- :param \**dialect_kwargs: Additional keyword arguments not mentioned above are
- dialect specific, and passed in the form ``<dialectname>_<argname>``.
- See the documentation regarding an individual dialect at
- :ref:`dialect_toplevel` for detail on documented arguments.
+ :param \**dialect_kwargs: Additional keyword arguments not mentioned
+ above are dialect specific, and passed in the form
+ ``<dialectname>_<argname>``. See the documentation regarding an
+ individual dialect at :ref:`dialect_toplevel` for detail on
+ documented arguments.
- .. versionadded:: 0.9.2 - Added :paramref:`.MetaData.reflect.**dialect_kwargs`
- to support dialect-level reflection options for all :class:`.Table`
+ .. versionadded:: 0.9.2 - Added
+ :paramref:`.MetaData.reflect.**dialect_kwargs` to support
+ dialect-level reflection options for all :class:`.Table`
objects reflected.
"""
@@ -3249,8 +3273,8 @@ class MetaData(SchemaItem):
if schema is not None:
reflect_opts['schema'] = schema
- available = util.OrderedSet(bind.engine.table_names(schema,
- connection=conn))
+ available = util.OrderedSet(
+ bind.engine.table_names(schema, connection=conn))
if views:
available.update(
bind.dialect.get_view_names(conn, schema)
@@ -3258,7 +3282,7 @@ class MetaData(SchemaItem):
if schema is not None:
available_w_schema = util.OrderedSet(["%s.%s" % (schema, name)
- for name in available])
+ for name in available])
else:
available_w_schema = available
@@ -3270,9 +3294,9 @@ class MetaData(SchemaItem):
if extend_existing or schname not in current]
elif util.callable(only):
load = [name for name, schname in
- zip(available, available_w_schema)
- if (extend_existing or schname not in current)
- and only(name, self)]
+ zip(available, available_w_schema)
+ if (extend_existing or schname not in current)
+ and only(name, self)]
else:
missing = [name for name in only if name not in available]
if missing:
@@ -3282,7 +3306,7 @@ class MetaData(SchemaItem):
'in %s%s: (%s)' %
(bind.engine.url, s, ', '.join(missing)))
load = [name for name in only if extend_existing or
- name not in current]
+ name not in current]
for name in load:
Table(name, self, **reflect_opts)
@@ -3323,9 +3347,9 @@ class MetaData(SchemaItem):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst,
- tables=tables)
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
def drop_all(self, bind=None, tables=None, checkfirst=True):
"""Drop all tables stored in this metadata.
@@ -3350,9 +3374,9 @@ class MetaData(SchemaItem):
if bind is None:
bind = _bind_or_error(self)
bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst,
- tables=tables)
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
class ThreadLocalMetaData(MetaData):
@@ -3418,6 +3442,3 @@ class ThreadLocalMetaData(MetaData):
for e in self.__engines.values():
if hasattr(e, 'dispose'):
e.dispose()
-
-
-
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index a57f1ecc3..4808a3935 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -11,14 +11,14 @@ SQL tables and derived rowsets.
"""
from .elements import ClauseElement, TextClause, ClauseList, \
- and_, Grouping, UnaryExpression, literal_column, BindParameter
+ and_, Grouping, UnaryExpression, literal_column, BindParameter
from .elements import _clone, \
- _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
- _select_iterables, _anonymous_label, _clause_element_as_expr,\
- _cloned_intersection, _cloned_difference, True_, _only_column_elements,\
- TRUE
+ _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
+ _select_iterables, _anonymous_label, _clause_element_as_expr,\
+ _cloned_intersection, _cloned_difference, True_, _only_column_elements,\
+ TRUE
from .base import Immutable, Executable, _generative, \
- ColumnCollection, ColumnSet, _from_objects, Generative
+ ColumnCollection, ColumnSet, _from_objects, Generative
from . import type_api
from .. import inspection
from .. import util
@@ -41,6 +41,7 @@ def _interpret_as_from(element):
return insp.selectable
raise exc.ArgumentError("FROM expression expected")
+
def _interpret_as_select(element):
element = _interpret_as_from(element)
if isinstance(element, Alias):
@@ -49,11 +50,13 @@ def _interpret_as_select(element):
element = element.select()
return element
+
class _OffsetLimitParam(BindParameter):
@property
def _limit_offset_value(self):
return self.effective_value
+
def _offset_or_limit_clause(element, name=None, type_=None):
"""Convert the given value to an "offset or limit" clause.
@@ -71,6 +74,7 @@ def _offset_or_limit_clause(element, name=None, type_=None):
value = util.asint(element)
return _OffsetLimitParam(name, value, type_=type_, unique=True)
+
def _offset_or_limit_clause_asint(clause, attrname):
"""Convert the "offset or limit" clause of a select construct to an
integer.
@@ -85,11 +89,12 @@ def _offset_or_limit_clause_asint(clause, attrname):
value = clause._limit_offset_value
except AttributeError:
raise exc.CompileError(
- "This SELECT structure does not use a simple "
- "integer value for %s" % attrname)
+ "This SELECT structure does not use a simple "
+ "integer value for %s" % attrname)
else:
return util.asint(value)
+
def subquery(alias, *args, **kwargs):
"""Return an :class:`.Alias` object derived
from a :class:`.Select`.
@@ -106,7 +111,6 @@ def subquery(alias, *args, **kwargs):
return Select(*args, **kwargs).alias(alias)
-
def alias(selectable, name=None, flat=False):
"""Return an :class:`.Alias` object.
@@ -194,8 +198,9 @@ class FromClause(Selectable):
schema = None
"""Define the 'schema' attribute for this :class:`.FromClause`.
- This is typically ``None`` for most objects except that of :class:`.Table`,
- where it is taken as the value of the :paramref:`.Table.schema` argument.
+ This is typically ``None`` for most objects except that of
+ :class:`.Table`, where it is taken as the value of the
+ :paramref:`.Table.schema` argument.
"""
@@ -211,10 +216,10 @@ class FromClause(Selectable):
else:
col = list(self.columns)[0]
return Select(
- [functions.func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
def select(self, whereclause=None, **params):
"""return a SELECT of this :class:`.FromClause`.
@@ -245,9 +250,10 @@ class FromClause(Selectable):
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
- :param right: the right side of the join; this is any :class:`.FromClause`
- object such as a :class:`.Table` object, and may also be a selectable-compatible
- object such as an ORM-mapped class.
+ :param right: the right side of the join; this is any
+ :class:`.FromClause` object such as a :class:`.Table` object, and
+ may also be a selectable-compatible object such as an ORM-mapped
+ class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
@@ -279,12 +285,15 @@ class FromClause(Selectable):
The above is equivalent to::
- j = user_table.join(address_table,
- user_table.c.id == address_table.c.user_id, isouter=True)
+ j = user_table.join(
+ address_table,
+ user_table.c.id == address_table.c.user_id,
+ isouter=True)
- :param right: the right side of the join; this is any :class:`.FromClause`
- object such as a :class:`.Table` object, and may also be a selectable-compatible
- object such as an ORM-mapped class.
+ :param right: the right side of the join; this is any
+ :class:`.FromClause` object such as a :class:`.Table` object, and
+ may also be a selectable-compatible object such as an ORM-mapped
+ class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
@@ -368,8 +377,8 @@ class FromClause(Selectable):
:param column: the target :class:`.ColumnElement` to be matched
:param require_embedded: only return corresponding columns for
- the given :class:`.ColumnElement`, if the given :class:`.ColumnElement`
- is actually present within a sub-element
+ the given :class:`.ColumnElement`, if the given
+ :class:`.ColumnElement` is actually present within a sub-element
of this :class:`.FromClause`. Normally the column will match if
it merely shares a common ancestor with one of the exported
columns of this :class:`.FromClause`.
@@ -379,7 +388,7 @@ class FromClause(Selectable):
def embedded(expanded_proxy_set, target_set):
for t in target_set.difference(expanded_proxy_set):
if not set(_expand_cloned([t])
- ).intersection(expanded_proxy_set):
+ ).intersection(expanded_proxy_set):
return False
return True
@@ -419,12 +428,14 @@ class FromClause(Selectable):
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
- col_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- col.proxy_set if sc.shares_lineage(column)])
- c_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- c.proxy_set if sc.shares_lineage(column)])
+ col_distance = util.reduce(
+ operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ col.proxy_set if sc.shares_lineage(column)])
+ c_distance = util.reduce(
+ operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ c.proxy_set if sc.shares_lineage(column)])
if c_distance < col_distance:
col, intersect = c, i
return col
@@ -480,7 +491,7 @@ class FromClause(Selectable):
return self.foreign_keys
c = property(attrgetter('columns'),
- doc="An alias for the :attr:`.columns` attribute.")
+ doc="An alias for the :attr:`.columns` attribute.")
_select_iterable = property(attrgetter('columns'))
def _init_collections(self):
@@ -531,7 +542,8 @@ class FromClause(Selectable):
"""
if not self._cols_populated:
return None
- elif column.key in self.columns and self.columns[column.key] is column:
+ elif (column.key in self.columns and
+ self.columns[column.key] is column):
return column
else:
return None
@@ -599,7 +611,6 @@ class Join(FromClause):
"""
return cls(left, right, onclause, isouter=True)
-
@classmethod
def _create_join(cls, left, right, onclause=None, isouter=False):
"""Produce a :class:`.Join` object, given two :class:`.FromClause`
@@ -607,7 +618,8 @@ class Join(FromClause):
E.g.::
- j = join(user_table, address_table, user_table.c.id == address_table.c.user_id)
+ j = join(user_table, address_table,
+ user_table.c.id == address_table.c.user_id)
stmt = select([user_table]).select_from(j)
would emit SQL along the lines of::
@@ -615,15 +627,16 @@ class Join(FromClause):
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
- Similar functionality is available given any :class:`.FromClause` object
- (e.g. such as a :class:`.Table`) using the :meth:`.FromClause.join`
- method.
+ Similar functionality is available given any
+ :class:`.FromClause` object (e.g. such as a :class:`.Table`) using
+ the :meth:`.FromClause.join` method.
:param left: The left side of the join.
- :param right: the right side of the join; this is any :class:`.FromClause`
- object such as a :class:`.Table` object, and may also be a selectable-compatible
- object such as an ORM-mapped class.
+ :param right: the right side of the join; this is any
+ :class:`.FromClause` object such as a :class:`.Table` object, and
+ may also be a selectable-compatible object such as an ORM-mapped
+ class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
@@ -641,7 +654,6 @@ class Join(FromClause):
return cls(left, right, onclause, isouter)
-
@property
def description(self):
return "Join object on %s(%d) and %s(%d)" % (
@@ -652,8 +664,8 @@ class Join(FromClause):
def is_derived_from(self, fromclause):
return fromclause is self or \
- self.left.is_derived_from(fromclause) or \
- self.right.is_derived_from(fromclause)
+ self.left.is_derived_from(fromclause) or \
+ self.right.is_derived_from(fromclause)
def self_group(self, against=None):
return FromGrouping(self)
@@ -661,13 +673,13 @@ class Join(FromClause):
@util.dependencies("sqlalchemy.sql.util")
def _populate_column_collection(self, sqlutil):
columns = [c for c in self.left.columns] + \
- [c for c in self.right.columns]
+ [c for c in self.right.columns]
self.primary_key.extend(sqlutil.reduce_columns(
- (c for c in columns if c.primary_key), self.onclause))
+ (c for c in columns if c.primary_key), self.onclause))
self._columns.update((col._label, col) for col in columns)
self.foreign_keys.update(itertools.chain(
- *[col.foreign_keys for col in columns]))
+ *[col.foreign_keys for col in columns]))
def _refresh_for_new_column(self, column):
col = self.left._refresh_for_new_column(column)
@@ -734,10 +746,10 @@ class Join(FromClause):
if left is None:
continue
for fk in sorted(
- b.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
+ b.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
+ fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(left)
@@ -751,10 +763,10 @@ class Join(FromClause):
constraints[fk.constraint].append((col, fk.parent))
if left is not b:
for fk in sorted(
- left.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
+ left.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
+ fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(b)
@@ -777,7 +789,8 @@ class Join(FromClause):
# "consider_as_foreign_keys".
if consider_as_foreign_keys:
for const in list(constraints):
- if set(f.parent for f in const.elements) != set(consider_as_foreign_keys):
+ if set(f.parent for f in const.elements) != set(
+ consider_as_foreign_keys):
del constraints[const]
# if still multiple constraints, but
@@ -799,12 +812,13 @@ class Join(FromClause):
if len(constraints) == 0:
if isinstance(b, FromGrouping):
hint = " Perhaps you meant to convert the right side to a "\
- "subquery using alias()?"
+ "subquery using alias()?"
else:
hint = ""
raise exc.NoForeignKeysError(
"Can't find any foreign key relationships "
- "between '%s' and '%s'.%s" % (a.description, b.description, hint))
+ "between '%s' and '%s'.%s" %
+ (a.description, b.description, hint))
crit = [(x == y) for x, y in list(constraints.values())[0]]
if len(crit) == 1:
@@ -812,7 +826,6 @@ class Join(FromClause):
else:
return and_(*crit)
-
def select(self, whereclause=None, **kwargs):
"""Create a :class:`.Select` from this :class:`.Join`.
@@ -877,8 +890,8 @@ class Join(FromClause):
columns as that of the two individual selectables presented under
a single name - the individual columns are "auto-labeled", meaning
the ``.c.`` collection of the resulting :class:`.Alias` represents
- the names of the individual columns using a ``<tablename>_<columname>``
- scheme::
+ the names of the individual columns using a
+ ``<tablename>_<columname>`` scheme::
j.c.table_a_id
j.c.table_b_a_id
@@ -941,26 +954,26 @@ class Join(FromClause):
if flat:
assert name is None, "Can't send name argument with flat"
left_a, right_a = self.left.alias(flat=True), \
- self.right.alias(flat=True)
+ self.right.alias(flat=True)
adapter = sqlutil.ClauseAdapter(left_a).\
- chain(sqlutil.ClauseAdapter(right_a))
+ chain(sqlutil.ClauseAdapter(right_a))
- return left_a.join(right_a,
- adapter.traverse(self.onclause), isouter=self.isouter)
+ return left_a.join(right_a, adapter.traverse(self.onclause),
+ isouter=self.isouter)
else:
return self.select(use_labels=True, correlate=False).alias(name)
@property
def _hide_froms(self):
return itertools.chain(*[_from_objects(x.left, x.right)
- for x in self._cloned_set])
+ for x in self._cloned_set])
@property
def _from_objects(self):
return [self] + \
- self.onclause._from_objects + \
- self.left._from_objects + \
- self.right._from_objects
+ self.onclause._from_objects + \
+ self.left._from_objects + \
+ self.right._from_objects
class Alias(FromClause):
@@ -970,9 +983,9 @@ class Alias(FromClause):
sub-select within a SQL statement using the ``AS`` keyword (or
without the keyword on certain databases such as Oracle).
- This object is constructed from the :func:`~.expression.alias` module level
- function as well as the :meth:`.FromClause.alias` method available on all
- :class:`.FromClause` subclasses.
+ This object is constructed from the :func:`~.expression.alias` module
+ level function as well as the :meth:`.FromClause.alias` method available
+ on all :class:`.FromClause` subclasses.
"""
@@ -994,10 +1007,9 @@ class Alias(FromClause):
if self.original.named_with_column:
name = getattr(self.original, 'name', None)
name = _anonymous_label('%%(%d %s)s' % (id(self), name
- or 'anon'))
+ or 'anon'))
self.name = name
-
@property
def description(self):
if util.py3k:
@@ -1072,10 +1084,10 @@ class CTE(Alias):
__visit_name__ = 'cte'
def __init__(self, selectable,
- name=None,
- recursive=False,
- _cte_alias=None,
- _restates=frozenset()):
+ name=None,
+ recursive=False,
+ _cte_alias=None,
+ _restates=frozenset()):
self.recursive = recursive
self._cte_alias = _cte_alias
self._restates = _restates
@@ -1087,7 +1099,7 @@ class CTE(Alias):
name=name,
recursive=self.recursive,
_cte_alias=self,
- )
+ )
def union(self, other):
return CTE(
@@ -1106,8 +1118,6 @@ class CTE(Alias):
)
-
-
class FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
@@ -1159,6 +1169,7 @@ class FromGrouping(FromClause):
def __setstate__(self, state):
self.element = state['element']
+
class TableClause(Immutable, FromClause):
"""Represents a minimal "table" construct.
@@ -1257,10 +1268,10 @@ class TableClause(Immutable, FromClause):
else:
col = list(self.columns)[0]
return Select(
- [functions.func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
@util.dependencies("sqlalchemy.sql.dml")
def insert(self, dml, values=None, inline=False, **kwargs):
@@ -1278,7 +1289,8 @@ class TableClause(Immutable, FromClause):
return dml.Insert(self, values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
- def update(self, dml, whereclause=None, values=None, inline=False, **kwargs):
+ def update(
+ self, dml, whereclause=None, values=None, inline=False, **kwargs):
"""Generate an :func:`.update` construct against this
:class:`.TableClause`.
@@ -1291,7 +1303,7 @@ class TableClause(Immutable, FromClause):
"""
return dml.Update(self, whereclause=whereclause,
- values=values, inline=inline, **kwargs)
+ values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
def delete(self, dml, whereclause=None, **kwargs):
@@ -1377,7 +1389,7 @@ class ForUpdateArg(ClauseElement):
self.read = read
if of is not None:
self.of = [_interpret_as_column_or_from(elem)
- for elem in util.to_list(of)]
+ for elem in util.to_list(of)]
else:
self.of = None
@@ -1405,7 +1417,6 @@ class SelectBase(Executable, FromClause):
"""
return ScalarSelect(self)
-
def label(self, name):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
@@ -1449,8 +1460,8 @@ class SelectBase(Executable, FromClause):
Example 1, non recursive::
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
+ from sqlalchemy import (Table, Column, String, Integer,
+ MetaData, select, func)
metadata = MetaData()
@@ -1488,8 +1499,8 @@ class SelectBase(Executable, FromClause):
Example 2, WITH RECURSIVE::
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
+ from sqlalchemy import (Table, Column, String, Integer,
+ MetaData, select, func)
metadata = MetaData()
@@ -1530,7 +1541,8 @@ class SelectBase(Executable, FromClause):
.. seealso::
- :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
+ :meth:`.orm.query.Query.cte` - ORM version of
+ :meth:`.SelectBase.cte`.
"""
return CTE(self, name=name, recursive=recursive)
@@ -1561,21 +1573,22 @@ class SelectBase(Executable, FromClause):
def _from_objects(self):
return [self]
+
class GenerativeSelect(SelectBase):
"""Base class for SELECT statements where additional elements can be
added.
This serves as the base for :class:`.Select` and :class:`.CompoundSelect`
- where elements such as ORDER BY, GROUP BY can be added and column rendering
- can be controlled. Compare to :class:`.TextAsFrom`, which, while it
- subclasses :class:`.SelectBase` and is also a SELECT construct, represents
- a fixed textual string which cannot be altered at this level, only
- wrapped as a subquery.
+ where elements such as ORDER BY, GROUP BY can be added and column
+ rendering can be controlled. Compare to :class:`.TextAsFrom`, which,
+ while it subclasses :class:`.SelectBase` and is also a SELECT construct,
+ represents a fixed textual string which cannot be altered at this level,
+ only wrapped as a subquery.
.. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to
- provide functionality specific to :class:`.Select` and :class:`.CompoundSelect`
- while allowing :class:`.SelectBase` to be used for other SELECT-like
- objects, e.g. :class:`.TextAsFrom`.
+ provide functionality specific to :class:`.Select` and
+ :class:`.CompoundSelect` while allowing :class:`.SelectBase` to be
+ used for other SELECT-like objects, e.g. :class:`.TextAsFrom`.
"""
_order_by_clause = ClauseList()
@@ -1585,18 +1598,19 @@ class GenerativeSelect(SelectBase):
_for_update_arg = None
def __init__(self,
- use_labels=False,
- for_update=False,
- limit=None,
- offset=None,
- order_by=None,
- group_by=None,
- bind=None,
- autocommit=None):
+ use_labels=False,
+ for_update=False,
+ limit=None,
+ offset=None,
+ order_by=None,
+ group_by=None,
+ bind=None,
+ autocommit=None):
self.use_labels = use_labels
if for_update is not False:
- self._for_update_arg = ForUpdateArg.parse_legacy_select(for_update)
+ self._for_update_arg = (ForUpdateArg.
+ parse_legacy_select(for_update))
if autocommit is not None:
util.warn_deprecated('autocommit on select() is '
@@ -1604,7 +1618,7 @@ class GenerativeSelect(SelectBase):
'utocommit=True)')
self._execution_options = \
self._execution_options.union(
- {'autocommit': autocommit})
+ {'autocommit': autocommit})
if limit is not None:
self._limit_clause = _offset_or_limit_clause(limit)
if offset is not None:
@@ -1652,8 +1666,8 @@ class GenerativeSelect(SelectBase):
provided which allow for common database-specific
variants.
- :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle and
- Postgresql dialects.
+ :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle
+ and Postgresql dialects.
:param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
``FOR SHARE`` on Postgresql. On Postgresql, when combined with
@@ -1761,8 +1775,8 @@ class GenerativeSelect(SelectBase):
The criterion will be appended to any pre-existing ORDER BY criterion.
This is an **in-place** mutation method; the
- :meth:`~.GenerativeSelect.order_by` method is preferred, as it provides standard
- :term:`method chaining`.
+ :meth:`~.GenerativeSelect.order_by` method is preferred, as it
+ provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
@@ -1778,8 +1792,8 @@ class GenerativeSelect(SelectBase):
The criterion will be appended to any pre-existing GROUP BY criterion.
This is an **in-place** mutation method; the
- :meth:`~.GenerativeSelect.group_by` method is preferred, as it provides standard
- :term:`method chaining`.
+ :meth:`~.GenerativeSelect.group_by` method is preferred, as it
+ provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
@@ -1789,13 +1803,13 @@ class GenerativeSelect(SelectBase):
clauses = list(self._group_by_clause) + list(clauses)
self._group_by_clause = ClauseList(*clauses)
-
def _copy_internals(self, clone=_clone, **kw):
if self._limit_clause is not None:
self._limit_clause = clone(self._limit_clause, **kw)
if self._offset_clause is not None:
self._offset_clause = clone(self._offset_clause, **kw)
+
class CompoundSelect(GenerativeSelect):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations.
@@ -1842,11 +1856,14 @@ class CompoundSelect(GenerativeSelect):
if not numcols:
numcols = len(s.c._all_columns)
elif len(s.c._all_columns) != numcols:
- raise exc.ArgumentError('All selectables passed to '
- 'CompoundSelect must have identical numbers of '
- 'columns; select #%d has %d columns, select '
- '#%d has %d' % (1, len(self.selects[0].c._all_columns), n
- + 1, len(s.c._all_columns)))
+ raise exc.ArgumentError(
+ 'All selectables passed to '
+ 'CompoundSelect must have identical numbers of '
+ 'columns; select #%d has %d columns, select '
+ '#%d has %d' %
+ (1, len(self.selects[0].c._all_columns),
+ n + 1, len(s.c._all_columns))
+ )
self.selects.append(s.self_group(self))
@@ -1892,7 +1909,6 @@ class CompoundSelect(GenerativeSelect):
"""
return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
-
@classmethod
def _create_except(cls, *selects, **kwargs):
"""Return an ``EXCEPT`` of multiple selectables.
@@ -1910,7 +1926,6 @@ class CompoundSelect(GenerativeSelect):
"""
return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
-
@classmethod
def _create_except_all(cls, *selects, **kwargs):
"""Return an ``EXCEPT ALL`` of multiple selectables.
@@ -1928,7 +1943,6 @@ class CompoundSelect(GenerativeSelect):
"""
return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
-
@classmethod
def _create_intersect(cls, *selects, **kwargs):
"""Return an ``INTERSECT`` of multiple selectables.
@@ -1946,7 +1960,6 @@ class CompoundSelect(GenerativeSelect):
"""
return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
-
@classmethod
def _create_intersect_all(cls, *selects, **kwargs):
"""Return an ``INTERSECT ALL`` of multiple selectables.
@@ -1962,8 +1975,8 @@ class CompoundSelect(GenerativeSelect):
:func:`select`.
"""
- return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
-
+ return CompoundSelect(
+ CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
def _scalar_type(self):
return self.selects[0]._scalar_type()
@@ -1990,9 +2003,9 @@ class CompoundSelect(GenerativeSelect):
# ForeignKeys in. this would allow the union() to have all
# those fks too.
- proxy = cols[0]._make_proxy(self,
- name=cols[0]._label if self.use_labels else None,
- key=cols[0]._key_label if self.use_labels else None)
+ proxy = cols[0]._make_proxy(
+ self, name=cols[0]._label if self.use_labels else None,
+ key=cols[0]._key_label if self.use_labels else None)
# hand-construct the "_proxies" collection to include all
# derived columns place a 'weight' annotation corresponding
@@ -2000,8 +2013,8 @@ class CompoundSelect(GenerativeSelect):
# that the corresponding_column() operation can resolve
# conflicts
- proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
- c) in enumerate(cols)]
+ proxy._proxies = [
+ c._annotate({'weight': i + 1}) for (i, c) in enumerate(cols)]
def _refresh_for_new_column(self, column):
for s in self.selects:
@@ -2011,7 +2024,8 @@ class CompoundSelect(GenerativeSelect):
return None
raise NotImplementedError("CompoundSelect constructs don't support "
- "addition of columns to underlying selectables")
+ "addition of columns to underlying "
+ "selectables")
def _copy_internals(self, clone=_clone, **kw):
super(CompoundSelect, self)._copy_internals(clone, **kw)
@@ -2019,7 +2033,8 @@ class CompoundSelect(GenerativeSelect):
self.selects = [clone(s, **kw) for s in self.selects]
if hasattr(self, '_col_map'):
del self._col_map
- for attr in ('_order_by_clause', '_group_by_clause', '_for_update_arg'):
+ for attr in (
+ '_order_by_clause', '_group_by_clause', '_for_update_arg'):
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
@@ -2072,13 +2087,12 @@ class HasPrefixes(object):
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
- ",".join(kw))
+ ",".join(kw))
self._setup_prefixes(expr, dialect)
def _setup_prefixes(self, prefixes, dialect=None):
self._prefixes = self._prefixes + tuple(
- [(_literal_as_text(p), dialect) for p in prefixes])
-
+ [(_literal_as_text(p), dialect) for p in prefixes])
class Select(HasPrefixes, GenerativeSelect):
@@ -2098,21 +2112,21 @@ class Select(HasPrefixes, GenerativeSelect):
_is_select = True
def __init__(self,
- columns=None,
- whereclause=None,
- from_obj=None,
- distinct=False,
- having=None,
- correlate=True,
- prefixes=None,
- **kwargs):
+ columns=None,
+ whereclause=None,
+ from_obj=None,
+ distinct=False,
+ having=None,
+ correlate=True,
+ prefixes=None,
+ **kwargs):
"""Construct a new :class:`.Select`.
- Similar functionality is also available via the :meth:`.FromClause.select`
- method on any :class:`.FromClause`.
+ Similar functionality is also available via the
+ :meth:`.FromClause.select` method on any :class:`.FromClause`.
- All arguments which accept :class:`.ClauseElement` arguments also accept
- string arguments, which will be converted as appropriate into
+ All arguments which accept :class:`.ClauseElement` arguments also
+ accept string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
.. seealso::
@@ -2124,12 +2138,12 @@ class Select(HasPrefixes, GenerativeSelect):
A list of :class:`.ClauseElement` objects, typically
:class:`.ColumnElement` objects or subclasses, which will form the
columns clause of the resulting statement. For all members which are
- instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
- members of the :class:`.Selectable` will be added individually to the
- columns clause. For example, specifying a
+ instances of :class:`.Selectable`, the individual
+ :class:`.ColumnElement` members of the :class:`.Selectable` will be
+ added individually to the columns clause. For example, specifying a
:class:`~sqlalchemy.schema.Table` instance will result in all the
- contained :class:`~sqlalchemy.schema.Column` objects within to be added
- to the columns clause.
+ contained :class:`~sqlalchemy.schema.Column` objects within to be
+ added to the columns clause.
This argument is not present on the form of :func:`select()`
available on :class:`~sqlalchemy.schema.Table`.
@@ -2140,14 +2154,14 @@ class Select(HasPrefixes, GenerativeSelect):
:param from_obj:
A list of :class:`.ClauseElement` objects which will be added to the
- ``FROM`` clause of the resulting statement. Note that "from" objects are
- automatically located within the columns and whereclause ClauseElements.
- Use this parameter to explicitly specify "from" objects which are not
- automatically locatable. This could include
- :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
- or :class:`.Join` objects whose presence will supersede that of the
- :class:`~sqlalchemy.schema.Table` objects already located in the other
- clauses.
+ ``FROM`` clause of the resulting statement. Note that "from" objects
+ are automatically located within the columns and whereclause
+ ClauseElements. Use this parameter to explicitly specify "from"
+ objects which are not automatically locatable. This could include
+ :class:`~sqlalchemy.schema.Table` objects that aren't otherwise
+ present, or :class:`.Join` objects whose presence will supersede
+ that of the :class:`~sqlalchemy.schema.Table` objects already
+ located in the other clauses.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
@@ -2156,19 +2170,19 @@ class Select(HasPrefixes, GenerativeSelect):
:param bind=None:
an :class:`~.Engine` or :class:`~.Connection` instance
to which the
- resulting :class:`.Select` object will be bound. The :class:`.Select`
- object will otherwise automatically bind to whatever
- :class:`~.base.Connectable` instances can be located within its contained
- :class:`.ClauseElement` members.
+ resulting :class:`.Select` object will be bound. The
+ :class:`.Select` object will otherwise automatically bind to
+ whatever :class:`~.base.Connectable` instances can be located within
+ its contained :class:`.ClauseElement` members.
:param correlate=True:
indicates that this :class:`.Select` object should have its
contained :class:`.FromClause` elements "correlated" to an enclosing
- :class:`.Select` object. This means that any :class:`.ClauseElement`
- instance within the "froms" collection of this :class:`.Select`
- which is also present in the "froms" collection of an
- enclosing select will not be rendered in the ``FROM`` clause
- of this select statement.
+ :class:`.Select` object. This means that any
+ :class:`.ClauseElement` instance within the "froms" collection of
+ this :class:`.Select` which is also present in the "froms"
+ collection of an enclosing select will not be rendered in the
+ ``FROM`` clause of this select statement.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
@@ -2186,8 +2200,9 @@ class Select(HasPrefixes, GenerativeSelect):
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement.
- .. deprecated:: 0.9.0 - use :meth:`.GenerativeSelect.with_for_update`
- to specify the structure of the ``FOR UPDATE`` clause.
+ .. deprecated:: 0.9.0 - use
+ :meth:`.GenerativeSelect.with_for_update` to specify the
+ structure of the ``FOR UPDATE`` clause.
``for_update`` accepts various string values interpreted by
specific backends, including:
@@ -2237,8 +2252,8 @@ class Select(HasPrefixes, GenerativeSelect):
collection of the resulting :class:`.Select` object will use these
names as well for targeting column members.
- use_labels is also available via the :meth:`~.GenerativeSelect.apply_labels`
- generative method.
+ use_labels is also available via the
+ :meth:`~.GenerativeSelect.apply_labels` generative method.
"""
self._auto_correlate = correlate
@@ -2247,14 +2262,14 @@ class Select(HasPrefixes, GenerativeSelect):
self._distinct = True
else:
self._distinct = [
- _literal_as_text(e)
- for e in util.to_list(distinct)
- ]
+ _literal_as_text(e)
+ for e in util.to_list(distinct)
+ ]
if from_obj is not None:
self._from_obj = util.OrderedSet(
- _interpret_as_from(f)
- for f in util.to_list(from_obj))
+ _interpret_as_from(f)
+ for f in util.to_list(from_obj))
else:
self._from_obj = util.OrderedSet()
@@ -2262,7 +2277,7 @@ class Select(HasPrefixes, GenerativeSelect):
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
- "be a Python list or other iterable")
+ "be a Python list or other iterable")
if cols_present:
self._raw_columns = []
@@ -2275,12 +2290,14 @@ class Select(HasPrefixes, GenerativeSelect):
self._raw_columns = []
if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause).self_group(against=operators._asbool)
+ self._whereclause = _literal_as_text(
+ whereclause).self_group(against=operators._asbool)
else:
self._whereclause = None
if having is not None:
- self._having = _literal_as_text(having).self_group(against=operators._asbool)
+ self._having = _literal_as_text(
+ having).self_group(against=operators._asbool)
else:
self._having = None
@@ -2303,7 +2320,7 @@ class Select(HasPrefixes, GenerativeSelect):
for item in items:
if item is self:
raise exc.InvalidRequestError(
- "select() construct refers to itself as a FROM")
+ "select() construct refers to itself as a FROM")
if translate and item in translate:
item = translate[item]
if not seen.intersection(item._cloned_set):
@@ -2318,7 +2335,7 @@ class Select(HasPrefixes, GenerativeSelect):
return froms
def _get_display_froms(self, explicit_correlate_froms=None,
- implicit_correlate_froms=None):
+ implicit_correlate_froms=None):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
@@ -2330,8 +2347,8 @@ class Select(HasPrefixes, GenerativeSelect):
froms = self._froms
toremove = set(itertools.chain(*[
- _expand_cloned(f._hide_froms)
- for f in froms]))
+ _expand_cloned(f._hide_froms)
+ for f in froms]))
if toremove:
# if we're maintaining clones of froms,
# add the copies out to the toremove list. only include
@@ -2352,7 +2369,8 @@ class Select(HasPrefixes, GenerativeSelect):
froms = [
f for f in froms if f not in
_cloned_intersection(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
+ _cloned_intersection(
+ froms, explicit_correlate_froms or ()),
to_correlate
)
]
@@ -2362,14 +2380,15 @@ class Select(HasPrefixes, GenerativeSelect):
froms = [
f for f in froms if f not in
_cloned_difference(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
+ _cloned_intersection(
+ froms, explicit_correlate_froms or ()),
self._correlate_except
)
]
if self._auto_correlate and \
- implicit_correlate_froms and \
- len(froms) > 1:
+ implicit_correlate_froms and \
+ len(froms) > 1:
froms = [
f for f in froms if f not in
@@ -2378,10 +2397,11 @@ class Select(HasPrefixes, GenerativeSelect):
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
- "' returned no FROM clauses due to "
- "auto-correlation; specify "
- "correlate(<tables>) to control "
- "correlation manually." % self)
+ "' returned no FROM clauses "
+ "due to auto-correlation; "
+ "specify correlate(<tables>) "
+ "to control correlation "
+ "manually." % self)
return froms
@@ -2422,18 +2442,20 @@ class Select(HasPrefixes, GenerativeSelect):
and Sybase simultaneously::
select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(
+ mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
"""
self._hints = self._hints.union(
- {(selectable, dialect_name): text})
+ {(selectable, dialect_name): text})
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
- "Call as_scalar() on this Select object "
- "to return a 'scalar' version of this Select.")
+ "Call as_scalar() on this Select "
+ "object to return a 'scalar' version "
+ "of this Select.")
@_memoized_property.method
def locate_all_froms(self):
@@ -2478,12 +2500,12 @@ class Select(HasPrefixes, GenerativeSelect):
# as of 0.7.4 we also put the current version of _froms, which
# gets cleared on each generation. previously we were "baking"
# _froms into self._from_obj.
- self._from_cloned = from_cloned = dict((f, clone(f, **kw))
- for f in self._from_obj.union(self._froms))
+ self._from_cloned = from_cloned = dict(
+ (f, clone(f, **kw)) for f in self._from_obj.union(self._froms))
# 3. update persistent _from_obj with the cloned versions.
self._from_obj = util.OrderedSet(from_cloned[f] for f in
- self._from_obj)
+ self._from_obj)
# the _correlate collection is done separately, what can happen
# here is the same item is _correlate as in _from_obj but the
@@ -2501,7 +2523,7 @@ class Select(HasPrefixes, GenerativeSelect):
# present here.
self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
for attr in '_whereclause', '_having', '_order_by_clause', \
- '_group_by_clause', '_for_update_arg':
+ '_group_by_clause', '_for_update_arg':
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
@@ -2517,7 +2539,7 @@ class Select(HasPrefixes, GenerativeSelect):
[x for x in
(self._whereclause, self._having,
self._order_by_clause, self._group_by_clause)
- if x is not None]
+ if x is not None]
@_generative
def column(self, column):
@@ -2552,12 +2574,12 @@ class Select(HasPrefixes, GenerativeSelect):
"""
return self.with_only_columns(
- sqlutil.reduce_columns(
- self.inner_columns,
- only_synonyms=only_synonyms,
- *(self._whereclause, ) + tuple(self._from_obj)
- )
+ sqlutil.reduce_columns(
+ self.inner_columns,
+ only_synonyms=only_synonyms,
+ *(self._whereclause, ) + tuple(self._from_obj)
)
+ )
@_generative
def with_only_columns(self, columns):
@@ -2764,16 +2786,16 @@ class Select(HasPrefixes, GenerativeSelect):
:meth:`.Select.correlate`.
.. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
- longer unconditionally removes entries from the FROM clause; instead,
- the candidate FROM entries must also be matched by a FROM entry
- located in an enclosing :class:`.Select`, which ultimately encloses
- this one as present in the WHERE clause, ORDER BY clause, HAVING
- clause, or columns clause of an enclosing :meth:`.Select`.
+ longer unconditionally removes entries from the FROM clause;
+ instead, the candidate FROM entries must also be matched by a FROM
+ entry located in an enclosing :class:`.Select`, which ultimately
+ encloses this one as present in the WHERE clause, ORDER BY clause,
+ HAVING clause, or columns clause of an enclosing :meth:`.Select`.
.. versionchanged:: 0.8.2 explicit correlation takes place
via any level of nesting of :class:`.Select` objects; in previous
- 0.8 versions, correlation would only occur relative to the immediate
- enclosing :class:`.Select` construct.
+ 0.8 versions, correlation would only occur relative to the
+ immediate enclosing :class:`.Select` construct.
.. seealso::
@@ -2787,7 +2809,7 @@ class Select(HasPrefixes, GenerativeSelect):
self._correlate = ()
else:
self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclauses)
+ _interpret_as_from(f) for f in fromclauses)
@_generative
def correlate_except(self, *fromclauses):
@@ -2829,21 +2851,21 @@ class Select(HasPrefixes, GenerativeSelect):
self._correlate_except = ()
else:
self._correlate_except = set(self._correlate_except or ()).union(
- _interpret_as_from(f) for f in fromclauses)
+ _interpret_as_from(f) for f in fromclauses)
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
construct.
This is an **in-place** mutation method; the
- :meth:`~.Select.correlate` method is preferred, as it provides standard
- :term:`method chaining`.
+ :meth:`~.Select.correlate` method is preferred, as it provides
+ standard :term:`method chaining`.
"""
self._auto_correlate = False
self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclause)
+ _interpret_as_from(f) for f in fromclause)
def append_column(self, column):
"""append the given column expression to the columns clause of this
@@ -2867,8 +2889,8 @@ class Select(HasPrefixes, GenerativeSelect):
construct.
This is an **in-place** mutation method; the
- :meth:`~.Select.prefix_with` method is preferred, as it provides standard
- :term:`method chaining`.
+ :meth:`~.Select.prefix_with` method is preferred, as it provides
+ standard :term:`method chaining`.
"""
clause = _literal_as_text(clause)
@@ -2887,7 +2909,8 @@ class Select(HasPrefixes, GenerativeSelect):
"""
self._reset_exported()
- self._whereclause = and_(True_._ifnone(self._whereclause), whereclause)
+ self._whereclause = and_(
+ True_._ifnone(self._whereclause), whereclause)
def append_having(self, having):
"""append the given expression to this select() construct's HAVING
@@ -2908,19 +2931,19 @@ class Select(HasPrefixes, GenerativeSelect):
FROM clause.
This is an **in-place** mutation method; the
- :meth:`~.Select.select_from` method is preferred, as it provides standard
- :term:`method chaining`.
+ :meth:`~.Select.select_from` method is preferred, as it provides
+ standard :term:`method chaining`.
"""
self._reset_exported()
fromclause = _interpret_as_from(fromclause)
self._from_obj = self._from_obj.union([fromclause])
-
@_memoized_property
def _columns_plus_names(self):
if self.use_labels:
names = set()
+
def name_for_col(c):
if c._label is None:
return (None, c)
@@ -2933,12 +2956,14 @@ class Select(HasPrefixes, GenerativeSelect):
return [
name_for_col(c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
+ for c in util.unique_list(
+ _select_iterables(self._raw_columns))
]
else:
return [
(None, c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
+ for c in util.unique_list(
+ _select_iterables(self._raw_columns))
]
def _populate_column_collection(self):
@@ -2955,8 +2980,8 @@ class Select(HasPrefixes, GenerativeSelect):
key = None
c._make_proxy(self, key=key,
- name=name,
- name_is_truncatable=True)
+ name=name,
+ name_is_truncatable=True)
def _refresh_for_new_column(self, column):
for fromclause in self._froms:
@@ -2965,7 +2990,8 @@ class Select(HasPrefixes, GenerativeSelect):
if col in self.inner_columns and self._cols_populated:
our_label = col._key_label if self.use_labels else col.key
if our_label not in self.c:
- return col._make_proxy(self,
+ return col._make_proxy(
+ self,
name=col._label if self.use_labels else None,
key=col._key_label if self.use_labels else None,
name_is_truncatable=True)
@@ -3059,8 +3085,8 @@ class ScalarSelect(Generative, Grouping):
@property
def columns(self):
raise exc.InvalidRequestError('Scalar Select expression has no '
- 'columns; use this object directly within a '
- 'column-level expression.')
+ 'columns; use this object directly '
+ 'within a column-level expression.')
c = columns
@_generative
@@ -3082,7 +3108,6 @@ class Exists(UnaryExpression):
__visit_name__ = UnaryExpression.__visit_name__
_from_objects = []
-
def __init__(self, *args, **kwargs):
"""Construct a new :class:`.Exists` against an existing
:class:`.Select` object.
@@ -3109,7 +3134,7 @@ class Exists(UnaryExpression):
s = Select(*args, **kwargs).as_scalar().self_group()
UnaryExpression.__init__(self, s, operator=operators.exists,
- type_=type_api.BOOLEANTYPE)
+ type_=type_api.BOOLEANTYPE)
def select(self, whereclause=None, **params):
return Select([self], whereclause, **params)
@@ -3148,8 +3173,8 @@ class TextAsFrom(SelectBase):
"""Wrap a :class:`.TextClause` construct within a :class:`.SelectBase`
interface.
- This allows the :class:`.TextClause` object to gain a ``.c`` collection and
- other FROM-like capabilities such as :meth:`.FromClause.alias`,
+ This allows the :class:`.TextClause` object to gain a ``.c`` collection
+ and other FROM-like capabilities such as :meth:`.FromClause.alias`,
:meth:`.SelectBase.cte`, etc.
The :class:`.TextAsFrom` construct is produced via the
@@ -3191,11 +3216,10 @@ class TextAsFrom(SelectBase):
def _scalar_type(self):
return self.column_args[0].type
+
class AnnotatedFromClause(Annotated):
def __init__(self, element, values):
# force FromClause to generate their internal
# collections into __dict__
element.c
Annotated.__init__(self, element, values)
-
-
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
index 52e5053ef..a7f25bbfa 100644
--- a/lib/sqlalchemy/sql/sqltypes.py
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -25,7 +25,9 @@ import decimal
if util.jython:
import array
+
class _DateAffinity(object):
+
"""Mixin date/time specific expression adaptations.
Rules are implemented within Date,Time,Interval,DateTime, Numeric,
@@ -43,19 +45,28 @@ class _DateAffinity(object):
def _adapt_expression(self, op, other_comparator):
othertype = other_comparator.type._type_affinity
- return op, \
- to_instance(self.type._expression_adaptations.get(op, self._blank_dict).\
+ return (
+ op, to_instance(
+ self.type._expression_adaptations.
+ get(op, self._blank_dict).
get(othertype, NULLTYPE))
+ )
comparator_factory = Comparator
+
class Concatenable(object):
+
"""A mixin that marks a type as supporting 'concatenation',
typically strings."""
class Comparator(TypeEngine.Comparator):
+
def _adapt_expression(self, op, other_comparator):
- if op is operators.add and isinstance(other_comparator,
- (Concatenable.Comparator, NullType.Comparator)):
+ if (op is operators.add and
+ isinstance(
+ other_comparator,
+ (Concatenable.Comparator, NullType.Comparator)
+ )):
return operators.concat_op, self.expr.type
else:
return op, self.expr.type
@@ -64,6 +75,7 @@ class Concatenable(object):
class String(Concatenable, TypeEngine):
+
"""The base for all string and character types.
In SQL, corresponds to VARCHAR. Can also take Python unicode objects
@@ -79,10 +91,10 @@ class String(Concatenable, TypeEngine):
__visit_name__ = 'string'
def __init__(self, length=None, collation=None,
- convert_unicode=False,
- unicode_error=None,
- _warn_on_bytestring=False
- ):
+ convert_unicode=False,
+ unicode_error=None,
+ _warn_on_bytestring=False
+ ):
"""
Create a string-holding type.
@@ -147,7 +159,7 @@ class String(Concatenable, TypeEngine):
"""
if unicode_error is not None and convert_unicode != 'force':
raise exc.ArgumentError("convert_unicode must be 'force' "
- "when unicode_error is set.")
+ "when unicode_error is set.")
self.length = length
self.collation = collation
@@ -164,12 +176,12 @@ class String(Concatenable, TypeEngine):
def bind_processor(self, dialect):
if self.convert_unicode or dialect.convert_unicode:
if dialect.supports_unicode_binds and \
- self.convert_unicode != 'force':
+ self.convert_unicode != 'force':
if self._warn_on_bytestring:
def process(value):
if isinstance(value, util.binary_type):
- util.warn("Unicode type received non-unicode bind "
- "param value.")
+ util.warn("Unicode type received non-unicode"
+ "bind param value.")
return value
return process
else:
@@ -192,23 +204,23 @@ class String(Concatenable, TypeEngine):
def result_processor(self, dialect, coltype):
wants_unicode = self.convert_unicode or dialect.convert_unicode
needs_convert = wants_unicode and \
- (dialect.returns_unicode_strings is not True or
- self.convert_unicode in ('force', 'force_nocheck'))
+ (dialect.returns_unicode_strings is not True or
+ self.convert_unicode in ('force', 'force_nocheck'))
needs_isinstance = (
- needs_convert and
- dialect.returns_unicode_strings and
- self.convert_unicode != 'force_nocheck'
- )
+ needs_convert and
+ dialect.returns_unicode_strings and
+ self.convert_unicode != 'force_nocheck'
+ )
if needs_convert:
to_unicode = processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
+ dialect.encoding, self.unicode_error)
if needs_isinstance:
return processors.to_conditional_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
+ dialect.encoding, self.unicode_error)
else:
return processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
+ dialect.encoding, self.unicode_error)
else:
return None
@@ -224,6 +236,7 @@ class String(Concatenable, TypeEngine):
class Text(String):
+
"""A variably sized string type.
In SQL, usually corresponds to CLOB or TEXT. Can also take Python
@@ -237,6 +250,7 @@ class Text(String):
class Unicode(String):
+
"""A variable length Unicode string type.
The :class:`.Unicode` type is a :class:`.String` subclass
@@ -308,6 +322,7 @@ class Unicode(String):
class UnicodeText(Text):
+
"""An unbounded-length Unicode string type.
See :class:`.Unicode` for details on the unicode
@@ -336,6 +351,7 @@ class UnicodeText(Text):
class Integer(_DateAffinity, TypeEngine):
+
"""A type for ``int`` integers."""
__visit_name__ = 'integer'
@@ -382,8 +398,8 @@ class Integer(_DateAffinity, TypeEngine):
}
-
class SmallInteger(Integer):
+
"""A type for smaller ``int`` integers.
Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
@@ -395,6 +411,7 @@ class SmallInteger(Integer):
class BigInteger(Integer):
+
"""A type for bigger ``int`` integers.
Typically generates a ``BIGINT`` in DDL, and otherwise acts like
@@ -405,13 +422,13 @@ class BigInteger(Integer):
__visit_name__ = 'big_integer'
-
class Numeric(_DateAffinity, TypeEngine):
+
"""A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``.
- This type returns Python ``decimal.Decimal`` objects by default, unless the
- :paramref:`.Numeric.asdecimal` flag is set to False, in which case they
- are coerced to Python ``float`` objects.
+ This type returns Python ``decimal.Decimal`` objects by default, unless
+ the :paramref:`.Numeric.asdecimal` flag is set to False, in which case
+ they are coerced to Python ``float`` objects.
.. note::
@@ -421,8 +438,8 @@ class Numeric(_DateAffinity, TypeEngine):
type (e.g. ``FLOAT``, ``REAL``, others).
If the database column on the server is in fact a floating-point type
type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float`
- type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
- may or may not function as expected.
+ type or a subclass, otherwise numeric coercion between
+ ``float``/``Decimal`` may or may not function as expected.
.. note::
@@ -450,7 +467,7 @@ class Numeric(_DateAffinity, TypeEngine):
_default_decimal_return_scale = 10
def __init__(self, precision=None, scale=None,
- decimal_return_scale=None, asdecimal=True):
+ decimal_return_scale=None, asdecimal=True):
"""
Construct a Numeric.
@@ -471,9 +488,10 @@ class Numeric(_DateAffinity, TypeEngine):
database types don't have a notion of "scale", so by default the
float type looks for the first ten decimal places when converting.
Specfiying this value will override that length. Types which
- do include an explicit ".scale" value, such as the base :class:`.Numeric`
- as well as the MySQL float types, will use the value of ".scale"
- as the default for decimal_return_scale, if not otherwise specified.
+ do include an explicit ".scale" value, such as the base
+ :class:`.Numeric` as well as the MySQL float types, will use the
+ value of ".scale" as the default for decimal_return_scale, if not
+ otherwise specified.
.. versionadded:: 0.9.0
@@ -545,9 +563,9 @@ class Numeric(_DateAffinity, TypeEngine):
# we're a "numeric", DBAPI returns floats, convert.
return processors.to_decimal_processor_factory(
- decimal.Decimal,
- self.scale if self.scale is not None
- else self._default_decimal_return_scale)
+ decimal.Decimal,
+ self.scale if self.scale is not None
+ else self._default_decimal_return_scale)
else:
if dialect.supports_native_decimal:
return processors.to_float
@@ -582,6 +600,7 @@ class Numeric(_DateAffinity, TypeEngine):
class Float(Numeric):
+
"""Type representing floating point types, such as ``FLOAT`` or ``REAL``.
This type returns Python ``float`` objects by default, unless the
@@ -596,8 +615,8 @@ class Float(Numeric):
and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others).
If the database column on the server is in fact a Numeric
type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric`
- type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
- may or may not function as expected.
+ type or a subclass, otherwise numeric coercion between
+ ``float``/``Decimal`` may or may not function as expected.
"""
@@ -606,7 +625,7 @@ class Float(Numeric):
scale = None
def __init__(self, precision=None, asdecimal=False,
- decimal_return_scale=None, **kwargs):
+ decimal_return_scale=None, **kwargs):
"""
Construct a Float.
@@ -640,13 +659,13 @@ class Float(Numeric):
self.decimal_return_scale = decimal_return_scale
if kwargs:
util.warn_deprecated("Additional keyword arguments "
- "passed to Float ignored.")
+ "passed to Float ignored.")
def result_processor(self, dialect, coltype):
if self.asdecimal:
return processors.to_decimal_processor_factory(
- decimal.Decimal,
- self._effective_decimal_return_scale)
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
else:
return None
@@ -673,6 +692,7 @@ class Float(Numeric):
class DateTime(_DateAffinity, TypeEngine):
+
"""A type for ``datetime.datetime()`` objects.
Date and time types return objects from the Python ``datetime``
@@ -717,6 +737,7 @@ class DateTime(_DateAffinity, TypeEngine):
class Date(_DateAffinity, TypeEngine):
+
"""A type for ``datetime.date()`` objects."""
__visit_name__ = 'date'
@@ -754,6 +775,7 @@ class Date(_DateAffinity, TypeEngine):
class Time(_DateAffinity, TypeEngine):
+
"""A type for ``datetime.time()`` objects."""
__visit_name__ = 'time'
@@ -783,6 +805,7 @@ class Time(_DateAffinity, TypeEngine):
class _Binary(TypeEngine):
+
"""Define base behavior for binary types."""
def __init__(self, length=None):
@@ -850,6 +873,7 @@ class _Binary(TypeEngine):
class LargeBinary(_Binary):
+
"""A type for large binary byte data.
The Binary type generates BLOB or BYTEA when tables are created,
@@ -878,6 +902,7 @@ class LargeBinary(_Binary):
class Binary(LargeBinary):
+
"""Deprecated. Renamed to LargeBinary."""
def __init__(self, *arg, **kw):
@@ -886,8 +911,8 @@ class Binary(LargeBinary):
LargeBinary.__init__(self, *arg, **kw)
-
class SchemaType(SchemaEventTarget):
+
"""Mark a type as possibly requiring schema-level DDL for usage.
Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
@@ -910,7 +935,7 @@ class SchemaType(SchemaEventTarget):
"""
def __init__(self, name=None, schema=None, metadata=None,
- inherit_schema=False, quote=None):
+ inherit_schema=False, quote=None):
if name is not None:
self.name = quoted_name(name, quote)
else:
@@ -941,8 +966,8 @@ class SchemaType(SchemaEventTarget):
event.listen(
table,
"before_create",
- util.portable_instancemethod(
- self._on_table_create)
+ util.portable_instancemethod(
+ self._on_table_create)
)
event.listen(
table,
@@ -974,10 +999,10 @@ class SchemaType(SchemaEventTarget):
# listeners
metadata = kw.pop('metadata', None)
return impltype(name=self.name,
- schema=schema,
- metadata=metadata,
- inherit_schema=self.inherit_schema,
- **kw)
+ schema=schema,
+ metadata=metadata,
+ inherit_schema=self.inherit_schema,
+ **kw)
@property
def bind(self):
@@ -1021,7 +1046,9 @@ class SchemaType(SchemaEventTarget):
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
t._on_metadata_drop(target, bind, **kw)
+
class Enum(String, SchemaType):
+
"""Generic Enum Type.
The Enum type provides a set of possible string values which the
@@ -1118,12 +1145,12 @@ class Enum(String, SchemaType):
def __repr__(self):
return util.generic_repr(self,
- to_inspect=[Enum, SchemaType],
- )
+ to_inspect=[Enum, SchemaType],
+ )
def _should_create_constraint(self, compiler):
return not self.native_enum or \
- not compiler.dialect.supports_native_enum
+ not compiler.dialect.supports_native_enum
@util.dependencies("sqlalchemy.sql.schema")
def _set_table(self, schema, column, table):
@@ -1131,11 +1158,11 @@ class Enum(String, SchemaType):
SchemaType._set_table(self, column, table)
e = schema.CheckConstraint(
- type_coerce(column, self).in_(self.enums),
- name=_defer_name(self.name),
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
+ type_coerce(column, self).in_(self.enums),
+ name=_defer_name(self.name),
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
assert e.table is table
def adapt(self, impltype, **kw):
@@ -1143,18 +1170,19 @@ class Enum(String, SchemaType):
metadata = kw.pop('metadata', None)
if issubclass(impltype, Enum):
return impltype(name=self.name,
- schema=schema,
- metadata=metadata,
- convert_unicode=self.convert_unicode,
- native_enum=self.native_enum,
- inherit_schema=self.inherit_schema,
- *self.enums,
- **kw)
+ schema=schema,
+ metadata=metadata,
+ convert_unicode=self.convert_unicode,
+ native_enum=self.native_enum,
+ inherit_schema=self.inherit_schema,
+ *self.enums,
+ **kw)
else:
return super(Enum, self).adapt(impltype, **kw)
class PickleType(TypeDecorator):
+
"""Holds Python objects, which are serialized using pickle.
PickleType builds upon the Binary type to apply Python's
@@ -1170,7 +1198,7 @@ class PickleType(TypeDecorator):
impl = LargeBinary
def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
- pickler=None, comparator=None):
+ pickler=None, comparator=None):
"""
Construct a PickleType.
@@ -1235,6 +1263,7 @@ class PickleType(TypeDecorator):
class Boolean(TypeEngine, SchemaType):
+
"""A bool datatype.
Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
@@ -1267,11 +1296,11 @@ class Boolean(TypeEngine, SchemaType):
return
e = schema.CheckConstraint(
- type_coerce(column, self).in_([0, 1]),
- name=_defer_name(self.name),
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
+ type_coerce(column, self).in_([0, 1]),
+ name=_defer_name(self.name),
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
assert e.table is table
@property
@@ -1301,6 +1330,7 @@ class Boolean(TypeEngine, SchemaType):
class Interval(_DateAffinity, TypeDecorator):
+
"""A type for ``datetime.timedelta()`` objects.
The Interval type deals with ``datetime.timedelta`` objects. In
@@ -1321,8 +1351,8 @@ class Interval(_DateAffinity, TypeDecorator):
epoch = dt.datetime.utcfromtimestamp(0)
def __init__(self, native=True,
- second_precision=None,
- day_precision=None):
+ second_precision=None,
+ day_precision=None):
"""Construct an Interval object.
:param native: when True, use the actual
@@ -1349,10 +1379,10 @@ class Interval(_DateAffinity, TypeDecorator):
return cls._adapt_from_generic_interval(self, **kw)
else:
return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
- day_precision=self.day_precision,
- **kw)
+ native=self.native,
+ second_precision=self.second_precision,
+ day_precision=self.day_precision,
+ **kw)
@property
def python_type(self):
@@ -1423,30 +1453,35 @@ class Interval(_DateAffinity, TypeDecorator):
class REAL(Float):
+
"""The SQL REAL type."""
__visit_name__ = 'REAL'
class FLOAT(Float):
+
"""The SQL FLOAT type."""
__visit_name__ = 'FLOAT'
class NUMERIC(Numeric):
+
"""The SQL NUMERIC type."""
__visit_name__ = 'NUMERIC'
class DECIMAL(Numeric):
+
"""The SQL DECIMAL type."""
__visit_name__ = 'DECIMAL'
class INTEGER(Integer):
+
"""The SQL INT or INTEGER type."""
__visit_name__ = 'INTEGER'
@@ -1454,18 +1489,21 @@ INT = INTEGER
class SMALLINT(SmallInteger):
+
"""The SQL SMALLINT type."""
__visit_name__ = 'SMALLINT'
class BIGINT(BigInteger):
+
"""The SQL BIGINT type."""
__visit_name__ = 'BIGINT'
class TIMESTAMP(DateTime):
+
"""The SQL TIMESTAMP type."""
__visit_name__ = 'TIMESTAMP'
@@ -1475,30 +1513,35 @@ class TIMESTAMP(DateTime):
class DATETIME(DateTime):
+
"""The SQL DATETIME type."""
__visit_name__ = 'DATETIME'
class DATE(Date):
+
"""The SQL DATE type."""
__visit_name__ = 'DATE'
class TIME(Time):
+
"""The SQL TIME type."""
__visit_name__ = 'TIME'
class TEXT(Text):
+
"""The SQL TEXT type."""
__visit_name__ = 'TEXT'
class CLOB(Text):
+
"""The CLOB type.
This type is found in Oracle and Informix.
@@ -1508,53 +1551,63 @@ class CLOB(Text):
class VARCHAR(String):
+
"""The SQL VARCHAR type."""
__visit_name__ = 'VARCHAR'
class NVARCHAR(Unicode):
+
"""The SQL NVARCHAR type."""
__visit_name__ = 'NVARCHAR'
class CHAR(String):
+
"""The SQL CHAR type."""
__visit_name__ = 'CHAR'
class NCHAR(Unicode):
+
"""The SQL NCHAR type."""
__visit_name__ = 'NCHAR'
class BLOB(LargeBinary):
+
"""The SQL BLOB type."""
__visit_name__ = 'BLOB'
class BINARY(_Binary):
+
"""The SQL BINARY type."""
__visit_name__ = 'BINARY'
class VARBINARY(_Binary):
+
"""The SQL VARBINARY type."""
__visit_name__ = 'VARBINARY'
class BOOLEAN(Boolean):
+
"""The SQL BOOLEAN type."""
__visit_name__ = 'BOOLEAN'
+
class NullType(TypeEngine):
+
"""An unknown type.
:class:`.NullType` is used as a default type for those cases where
@@ -1568,12 +1621,13 @@ class NullType(TypeEngine):
as ``None`` or is not passed at all.
The :class:`.NullType` can be used within SQL expression invocation
- without issue, it just has no behavior either at the expression construction
- level or at the bind-parameter/result processing level. :class:`.NullType`
- will result in a :exc:`.CompileError` if the compiler is asked to render
- the type itself, such as if it is used in a :func:`.cast` operation
- or within a schema creation operation such as that invoked by
- :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct.
+ without issue, it just has no behavior either at the expression
+ construction level or at the bind-parameter/result processing level.
+ :class:`.NullType` will result in a :exc:`.CompileError` if the compiler
+ is asked to render the type itself, such as if it is used in a
+ :func:`.cast` operation or within a schema creation operation such as that
+ invoked by :meth:`.MetaData.create_all` or the :class:`.CreateTable`
+ construct.
"""
__visit_name__ = 'null'
@@ -1586,9 +1640,10 @@ class NullType(TypeEngine):
return process
class Comparator(TypeEngine.Comparator):
+
def _adapt_expression(self, op, other_comparator):
if isinstance(other_comparator, NullType.Comparator) or \
- not operators.is_commutative(op):
+ not operators.is_commutative(op):
return op, self.expr.type
else:
return other_comparator._adapt_expression(op, self)
@@ -1633,11 +1688,14 @@ type_api._type_map = _type_map
# the expression element system, as you might expect. We can use
# importlaters or whatnot, but the typing system just necessarily has
# to have some kind of connection like this. right now we're injecting the
-# _DefaultColumnComparator implementation into the TypeEngine.Comparator interface.
-# Alternatively TypeEngine.Comparator could have an "impl" injected, though
-# just injecting the base is simpler, error free, and more performant.
+# _DefaultColumnComparator implementation into the TypeEngine.Comparator
+# interface. Alternatively TypeEngine.Comparator could have an "impl"
+# injected, though just injecting the base is simpler, error free, and more
+# performant.
+
+
class Comparator(_DefaultColumnComparator):
BOOLEANTYPE = BOOLEANTYPE
-TypeEngine.Comparator.__bases__ = (Comparator, ) + TypeEngine.Comparator.__bases__
-
+TypeEngine.Comparator.__bases__ = (
+ Comparator, ) + TypeEngine.Comparator.__bases__
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index f9af21baa..77c6e1b1e 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -20,13 +20,15 @@ INTEGERTYPE = None
NULLTYPE = None
STRINGTYPE = None
+
class TypeEngine(Visitable):
"""The ultimate base class for all SQL datatypes.
Common subclasses of :class:`.TypeEngine` include
:class:`.String`, :class:`.Integer`, and :class:`.Boolean`.
- For an overview of the SQLAlchemy typing system, see :ref:`types_toplevel`.
+ For an overview of the SQLAlchemy typing system, see
+ :ref:`types_toplevel`.
.. seealso::
@@ -50,7 +52,6 @@ class TypeEngine(Visitable):
def __reduce__(self):
return _reconstitute_comparator, (self.expr, )
-
hashable = True
"""Flag, if False, means values from this type aren't hashable.
@@ -265,7 +266,6 @@ class TypeEngine(Visitable):
"""
return Variant(self, {dialect_name: to_instance(type_)})
-
@util.memoized_property
def _type_affinity(self):
"""Return a rudimental 'affinity' value expressing the general class
@@ -290,7 +290,6 @@ class TypeEngine(Visitable):
except KeyError:
return self._dialect_info(dialect)['impl']
-
def _cached_literal_processor(self, dialect):
"""Return a dialect-specific literal processor for this type."""
try:
@@ -352,7 +351,6 @@ class TypeEngine(Visitable):
"""
return util.constructor_copy(self, cls, **kw)
-
def coerce_compared_value(self, op, value):
"""Suggest a type for a 'coerced' Python value in an expression.
@@ -374,7 +372,7 @@ class TypeEngine(Visitable):
"""
_coerced_type = _type_map.get(type(value), NULLTYPE)
if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
- is self._type_affinity:
+ is self._type_affinity:
return self
else:
return _coerced_type
@@ -411,13 +409,14 @@ class TypeEngine(Visitable):
def __str__(self):
if util.py2k:
return unicode(self.compile()).\
- encode('ascii', 'backslashreplace')
+ encode('ascii', 'backslashreplace')
else:
return str(self.compile())
def __repr__(self):
return util.generic_repr(self)
+
class UserDefinedType(TypeEngine):
"""Base for user defined types.
@@ -454,16 +453,15 @@ class UserDefinedType(TypeEngine):
"""
__visit_name__ = "user_defined"
-
class Comparator(TypeEngine.Comparator):
def _adapt_expression(self, op, other_comparator):
if hasattr(self.type, 'adapt_operator'):
util.warn_deprecated(
"UserDefinedType.adapt_operator is deprecated. Create "
- "a UserDefinedType.Comparator subclass instead which "
- "generates the desired expression constructs, given a "
- "particular operator."
- )
+ "a UserDefinedType.Comparator subclass instead which "
+ "generates the desired expression constructs, given a "
+ "particular operator."
+ )
return self.type.adapt_operator(op), self.type
else:
return op, self.type
@@ -602,7 +600,8 @@ class TypeDecorator(TypeEngine):
level to "IS <constant>" when compared using ``==`` (and same for
``IS NOT`` in conjunction with ``!=``.
- For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``.
+ For most SQLAlchemy types, this includes ``NoneType``, as well as
+ ``bool``.
:class:`.TypeDecorator` modifies this list to only include ``NoneType``,
as typedecorator implementations that deal with boolean types are common.
@@ -622,18 +621,18 @@ class TypeDecorator(TypeEngine):
def operate(self, op, *other, **kwargs):
kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
return super(TypeDecorator.Comparator, self).operate(
- op, *other, **kwargs)
+ op, *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
return super(TypeDecorator.Comparator, self).reverse_operate(
- op, other, **kwargs)
+ op, other, **kwargs)
@property
def comparator_factory(self):
return type("TDComparator",
- (TypeDecorator.Comparator, self.impl.comparator_factory),
- {})
+ (TypeDecorator.Comparator, self.impl.comparator_factory),
+ {})
def _gen_dialect_impl(self, dialect):
"""
@@ -651,8 +650,8 @@ class TypeDecorator(TypeEngine):
if not isinstance(tt, self.__class__):
raise AssertionError('Type object %s does not properly '
'implement the copy() method, it must '
- 'return an object of type %s' % (self,
- self.__class__))
+ 'return an object of type %s' %
+ (self, self.__class__))
tt.impl = typedesc
return tt
@@ -676,7 +675,7 @@ class TypeDecorator(TypeEngine):
"""
adapted = dialect.type_descriptor(self)
- if type(adapted) is not type(self):
+ if not isinstance(adapted, type(self)):
return adapted
elif isinstance(self.impl, TypeDecorator):
return self.impl.type_engine(dialect)
@@ -796,16 +795,19 @@ class TypeDecorator(TypeEngine):
"""Provide a literal processing function for the given
:class:`.Dialect`.
- Subclasses here will typically override :meth:`.TypeDecorator.process_literal_param`
- instead of this method directly.
+ Subclasses here will typically override
+ :meth:`.TypeDecorator.process_literal_param` instead of this method
+ directly.
- By default, this method makes use of :meth:`.TypeDecorator.process_bind_param`
- if that method is implemented, where :meth:`.TypeDecorator.process_literal_param`
- is not. The rationale here is that :class:`.TypeDecorator` typically deals
- with Python conversions of data that are above the layer of database
- presentation. With the value converted by :meth:`.TypeDecorator.process_bind_param`,
- the underlying type will then handle whether it needs to be presented to the
- DBAPI as a bound parameter or to the database as an inline SQL value.
+ By default, this method makes use of
+ :meth:`.TypeDecorator.process_bind_param` if that method is
+ implemented, where :meth:`.TypeDecorator.process_literal_param` is
+ not. The rationale here is that :class:`.TypeDecorator` typically
+ deals with Python conversions of data that are above the layer of
+ database presentation. With the value converted by
+ :meth:`.TypeDecorator.process_bind_param`, the underlying type will
+ then handle whether it needs to be presented to the DBAPI as a bound
+ parameter or to the database as an inline SQL value.
.. versionadded:: 0.9.0
@@ -903,7 +905,7 @@ class TypeDecorator(TypeEngine):
if self._has_result_processor:
process_value = self.process_result_value
impl_processor = self.impl.result_processor(dialect,
- coltype)
+ coltype)
if impl_processor:
def process(value):
return process_value(impl_processor(value), dialect)
@@ -1032,6 +1034,7 @@ class Variant(TypeDecorator):
"""express comparison behavior in terms of the base type"""
return self.impl.comparator_factory
+
def _reconstitute_comparator(expression):
return expression.comparator
@@ -1066,5 +1069,3 @@ def adapt_type(typeobj, colspecs):
if (issubclass(typeobj.__class__, impltype)):
return typeobj
return typeobj.adapt(impltype)
-
-
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index b4ed7a5f3..8bbae8b93 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -16,13 +16,13 @@ from itertools import chain
from collections import deque
from .elements import BindParameter, ColumnClause, ColumnElement, \
- Null, UnaryExpression, literal_column, Label
+ Null, UnaryExpression, literal_column, Label
from .selectable import ScalarSelect, Join, FromClause, FromGrouping
from .schema import Column
join_condition = util.langhelpers.public_factory(
- Join._join_condition,
- ".sql.util.join_condition")
+ Join._join_condition,
+ ".sql.util.join_condition")
# names that are still being imported from the outside
from .annotation import _shallow_annotate, _deep_annotate, _deep_deannotate
@@ -99,7 +99,7 @@ def visit_binary_product(fn, expr):
# those are just column elements by themselves
yield element
elif element.__visit_name__ == 'binary' and \
- operators.is_comparison(element.operator):
+ operators.is_comparison(element.operator):
stack.insert(0, element)
for l in visit(element.left):
for r in visit(element.right):
@@ -135,7 +135,7 @@ def find_tables(clause, check_columns=False,
if include_crud:
_visitors['insert'] = _visitors['update'] = \
- _visitors['delete'] = lambda ent: tables.append(ent.table)
+ _visitors['delete'] = lambda ent: tables.append(ent.table)
if check_columns:
def visit_column(column):
@@ -148,7 +148,6 @@ def find_tables(clause, check_columns=False,
return tables
-
def unwrap_order_by(clause):
"""Break up an 'order by' expression into individual column-expressions,
without DESC/ASC/NULLS FIRST/NULLS LAST"""
@@ -159,9 +158,9 @@ def unwrap_order_by(clause):
t = stack.popleft()
if isinstance(t, ColumnElement) and \
(
- not isinstance(t, UnaryExpression) or \
+ not isinstance(t, UnaryExpression) or
not operators.is_ordering_modifier(t.modifier)
- ):
+ ):
cols.add(t)
else:
for c in t.get_children():
@@ -184,6 +183,7 @@ def clause_is_present(clause, search):
else:
return False
+
def surface_selectables(clause):
stack = [clause]
while stack:
@@ -194,14 +194,16 @@ def surface_selectables(clause):
elif isinstance(elem, FromGrouping):
stack.append(elem.element)
+
def selectables_overlap(left, right):
"""Return True if left/right have some overlapping selectable"""
return bool(
- set(surface_selectables(left)).intersection(
- surface_selectables(right)
- )
- )
+ set(surface_selectables(left)).intersection(
+ surface_selectables(right)
+ )
+ )
+
def bind_values(clause):
"""Return an ordered list of "bound" values in the given clause.
@@ -237,26 +239,25 @@ class _repr_params(object):
display to the given number of 'multi' parameter sets.
"""
+
def __init__(self, params, batches):
self.params = params
self.batches = batches
def __repr__(self):
if isinstance(self.params, (list, tuple)) and \
- len(self.params) > self.batches and \
- isinstance(self.params[0], (list, dict, tuple)):
+ len(self.params) > self.batches and \
+ isinstance(self.params[0], (list, dict, tuple)):
msg = " ... displaying %i of %i total bound parameter sets ... "
return ' '.join((
- repr(self.params[:self.batches - 2])[0:-1],
- msg % (self.batches, len(self.params)),
- repr(self.params[-2:])[1:]
- ))
+ repr(self.params[:self.batches - 2])[0:-1],
+ msg % (self.batches, len(self.params)),
+ repr(self.params[-2:])[1:]
+ ))
else:
return repr(self.params)
-
-
def adapt_criterion_to_null(crit, nulls):
"""given criterion containing bind params, convert selected elements
to IS NULL.
@@ -265,14 +266,14 @@ def adapt_criterion_to_null(crit, nulls):
def visit_binary(binary):
if isinstance(binary.left, BindParameter) \
- and binary.left._identifying_key in nulls:
+ and binary.left._identifying_key in nulls:
# reverse order if the NULL is on the left side
binary.left = binary.right
binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
elif isinstance(binary.right, BindParameter) \
- and binary.right._identifying_key in nulls:
+ and binary.right._identifying_key in nulls:
binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
@@ -320,8 +321,8 @@ def reduce_columns(columns, *clauses, **kw):
\**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys
whose tables are not yet configured, or columns that aren't yet present.
- This function is primarily used to determine the most minimal "primary key"
- from a selectable, by reducing the set of primary key columns present
+ This function is primarily used to determine the most minimal "primary
+ key" from a selectable, by reducing the set of primary key columns present
in the selectable to just those that are not repeated.
"""
@@ -353,21 +354,21 @@ def reduce_columns(columns, *clauses, **kw):
else:
raise
if fk_col.shares_lineage(c) and \
- (not only_synonyms or \
- c.name == col.name):
+ (not only_synonyms or
+ c.name == col.name):
omit.add(col)
break
if clauses:
def visit_binary(binary):
if binary.operator == operators.eq:
- cols = util.column_set(chain(*[c.proxy_set
- for c in columns.difference(omit)]))
+ cols = util.column_set(
+ chain(*[c.proxy_set for c in columns.difference(omit)]))
if binary.left in cols and binary.right in cols:
for c in reversed(columns):
if c.shares_lineage(binary.right) and \
- (not only_synonyms or \
- c.name == binary.left.name):
+ (not only_synonyms or
+ c.name == binary.left.name):
omit.add(c)
break
for clause in clauses:
@@ -378,7 +379,7 @@ def reduce_columns(columns, *clauses, **kw):
def criterion_as_pairs(expression, consider_as_foreign_keys=None,
- consider_as_referenced_keys=None, any_operator=False):
+ consider_as_referenced_keys=None, any_operator=False):
"""traverse an expression and locate binary criterion pairs."""
if consider_as_foreign_keys and consider_as_referenced_keys:
@@ -387,37 +388,37 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
"'consider_as_referenced_keys'")
def col_is(a, b):
- #return a is b
+ # return a is b
return a.compare(b)
def visit_binary(binary):
if not any_operator and binary.operator is not operators.eq:
return
if not isinstance(binary.left, ColumnElement) or \
- not isinstance(binary.right, ColumnElement):
+ not isinstance(binary.right, ColumnElement):
return
if consider_as_foreign_keys:
if binary.left in consider_as_foreign_keys and \
- (col_is(binary.right, binary.left) or
- binary.right not in consider_as_foreign_keys):
+ (col_is(binary.right, binary.left) or
+ binary.right not in consider_as_foreign_keys):
pairs.append((binary.right, binary.left))
elif binary.right in consider_as_foreign_keys and \
- (col_is(binary.left, binary.right) or
- binary.left not in consider_as_foreign_keys):
+ (col_is(binary.left, binary.right) or
+ binary.left not in consider_as_foreign_keys):
pairs.append((binary.left, binary.right))
elif consider_as_referenced_keys:
if binary.left in consider_as_referenced_keys and \
- (col_is(binary.right, binary.left) or
- binary.right not in consider_as_referenced_keys):
+ (col_is(binary.right, binary.left) or
+ binary.right not in consider_as_referenced_keys):
pairs.append((binary.left, binary.right))
elif binary.right in consider_as_referenced_keys and \
- (col_is(binary.left, binary.right) or
- binary.left not in consider_as_referenced_keys):
+ (col_is(binary.left, binary.right) or
+ binary.left not in consider_as_referenced_keys):
pairs.append((binary.right, binary.left))
else:
if isinstance(binary.left, Column) and \
- isinstance(binary.right, Column):
+ isinstance(binary.right, Column):
if binary.left.references(binary.right):
pairs.append((binary.right, binary.left))
elif binary.right.references(binary.left):
@@ -427,7 +428,6 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
return pairs
-
class AliasedRow(object):
"""Wrap a RowProxy with a translation map.
@@ -435,6 +435,7 @@ class AliasedRow(object):
to those present in a RowProxy.
"""
+
def __init__(self, row, map):
# AliasedRow objects don't nest, so un-nest
# if another AliasedRow was passed
@@ -483,10 +484,11 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
s.c.col1 == table2.c.col1
"""
+
def __init__(self, selectable, equivalents=None,
- include=None, exclude=None,
- include_fn=None, exclude_fn=None,
- adapt_on_names=False):
+ include=None, exclude=None,
+ include_fn=None, exclude_fn=None,
+ adapt_on_names=False):
self.__traverse_options__ = {'stop_on': [selectable]}
self.selectable = selectable
if include:
@@ -505,13 +507,13 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
def _corresponding_column(self, col, require_embedded,
_seen=util.EMPTY_SET):
newcol = self.selectable.corresponding_column(
- col,
- require_embedded=require_embedded)
+ col,
+ require_embedded=require_embedded)
if newcol is None and col in self.equivalents and col not in _seen:
for equiv in self.equivalents[col]:
- newcol = self._corresponding_column(equiv,
- require_embedded=require_embedded,
- _seen=_seen.union([col]))
+ newcol = self._corresponding_column(
+ equiv, require_embedded=require_embedded,
+ _seen=_seen.union([col]))
if newcol is not None:
return newcol
if self.adapt_on_names and newcol is None:
@@ -519,9 +521,10 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
return newcol
magic_flag = False
+
def replace(self, col):
if not self.magic_flag and isinstance(col, FromClause) and \
- self.selectable.is_derived_from(col):
+ self.selectable.is_derived_from(col):
return self.selectable
elif not isinstance(col, ColumnElement):
return None
@@ -542,10 +545,12 @@ class ColumnAdapter(ClauseAdapter):
adapted_row() factory.
"""
+
def __init__(self, selectable, equivalents=None,
- chain_to=None, include=None,
- exclude=None, adapt_required=False):
- ClauseAdapter.__init__(self, selectable, equivalents, include, exclude)
+ chain_to=None, include=None,
+ exclude=None, adapt_required=False):
+ ClauseAdapter.__init__(self, selectable, equivalents,
+ include, exclude)
if chain_to:
self.chain(chain_to)
self.columns = util.populate_column_dict(self._locate_col)
@@ -599,4 +604,3 @@ class ColumnAdapter(ClauseAdapter):
def __setstate__(self, state):
self.__dict__.update(state)
self.columns = util.PopulateDict(self._locate_col)
-
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index ddf469d47..bb525744a 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -30,10 +30,10 @@ import operator
from .. import exc
__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
- 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
- 'iterate_depthfirst', 'traverse_using', 'traverse',
- 'traverse_depthfirst',
- 'cloned_traverse', 'replacement_traverse']
+ 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
+ 'iterate_depthfirst', 'traverse_using', 'traverse',
+ 'traverse_depthfirst',
+ 'cloned_traverse', 'replacement_traverse']
class VisitableType(type):
@@ -92,7 +92,7 @@ def _generate_dispatch(cls):
return meth(self, **kw)
_compiler_dispatch.__doc__ = \
- """Look for an attribute named "visit_" + self.__visit_name__
+ """Look for an attribute named "visit_" + self.__visit_name__
on the visitor, and call it with the same kw params.
"""
cls._compiler_dispatch = _compiler_dispatch
@@ -297,7 +297,7 @@ def replacement_traverse(obj, opts, replace):
def clone(elem, **kw):
if id(elem) in stop_on or \
- 'no_replacement_traverse' in elem._annotations:
+ 'no_replacement_traverse' in elem._annotations:
return elem
else:
newelem = replace(elem)
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index 453f2329f..8f8f56412 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -19,9 +19,9 @@ def against(*queries):
return _against(config._current, *queries)
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
- eq_, ne_, is_, is_not_, startswith_, assert_raises, \
- assert_raises_message, AssertsCompiledSQL, ComparesTables, \
- AssertsExecutionResults, expect_deprecated
+ eq_, ne_, is_, is_not_, startswith_, assert_raises, \
+ assert_raises_message, AssertsCompiledSQL, ComparesTables, \
+ AssertsExecutionResults, expect_deprecated
from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict
@@ -30,4 +30,4 @@ crashes = skip
from .config import db
from .config import requirements as requires
-from . import mock \ No newline at end of file
+from . import mock
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index bc75621e9..f9331a73e 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -44,12 +44,12 @@ def emits_warning(*messages):
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
- category=sa_exc.SAWarning))
+ category=sa_exc.SAWarning))
else:
filters.extend(dict(action='ignore',
- message=message,
- category=sa_exc.SAWarning)
- for message in messages)
+ message=message,
+ category=sa_exc.SAWarning)
+ for message in messages)
for f in filters:
warnings.filterwarnings(**f)
try:
@@ -103,6 +103,7 @@ def uses_deprecated(*messages):
return fn(*args, **kw)
return decorate
+
@contextlib.contextmanager
def expect_deprecated(*messages):
# todo: should probably be strict about this, too
@@ -118,8 +119,8 @@ def expect_deprecated(*messages):
category=sa_exc.SADeprecationWarning)
for message in
[(m.startswith('//') and
- ('Call to deprecated function ' + m[2:]) or m)
- for m in messages]])
+ ('Call to deprecated function ' + m[2:]) or m)
+ for m in messages]])
for f in filters:
warnings.filterwarnings(**f)
@@ -140,6 +141,8 @@ def global_cleanup_assertions():
_assert_no_stray_pool_connections()
_STRAY_CONNECTION_FAILURES = 0
+
+
def _assert_no_stray_pool_connections():
global _STRAY_CONNECTION_FAILURES
@@ -156,7 +159,7 @@ def _assert_no_stray_pool_connections():
_STRAY_CONNECTION_FAILURES += 1
print("Encountered a stray connection in test cleanup: %s"
- % str(pool._refs))
+ % str(pool._refs))
# then do a real GC sweep. We shouldn't even be here
# so a single sweep should really be doing it, otherwise
# there's probably a real unreachable cycle somewhere.
@@ -218,17 +221,18 @@ def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
callable_(*args, **kwargs)
assert False, "Callable did not raise an exception"
except except_cls as e:
- assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
+ assert re.search(
+ msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
print(util.text_type(e).encode('utf-8'))
class AssertsCompiledSQL(object):
def assert_compile(self, clause, result, params=None,
- checkparams=None, dialect=None,
- checkpositional=None,
- use_default_dialect=False,
- allow_dialect_select=False,
- literal_binds=False):
+ checkparams=None, dialect=None,
+ checkpositional=None,
+ use_default_dialect=False,
+ allow_dialect_select=False,
+ literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
@@ -244,7 +248,6 @@ class AssertsCompiledSQL(object):
elif isinstance(dialect, util.string_types):
dialect = url.URL(dialect).get_dialect()()
-
kw = {}
compile_kwargs = {}
@@ -268,10 +271,15 @@ class AssertsCompiledSQL(object):
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
- print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
+ print(
+ ("\nSQL String:\n" +
+ util.text_type(c) +
+ param_str).encode('utf-8'))
else:
- print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
-
+ print(
+ "\nSQL String:\n" +
+ util.text_type(c).encode('utf-8') +
+ param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
@@ -296,7 +304,7 @@ class ComparesTables(object):
if strict_types:
msg = "Type '%s' doesn't correspond to type '%s'"
- assert type(reflected_c.type) is type(c.type), \
+ assert isinstance(reflected_c.type, type(c.type)), \
msg % (reflected_c.type, c.type)
else:
self.assert_types_base(reflected_c, c)
@@ -318,8 +326,8 @@ class ComparesTables(object):
def assert_types_base(self, c1, c2):
assert c1.type._compare_type_affinity(c2.type),\
- "On column %r, type '%s' doesn't correspond to type '%s'" % \
- (c1.name, c1.type, c2.type)
+ "On column %r, type '%s' doesn't correspond to type '%s'" % \
+ (c1.name, c1.type, c2.type)
class AssertsExecutionResults(object):
@@ -363,7 +371,8 @@ class AssertsExecutionResults(object):
found = util.IdentitySet(result)
expected = set([immutabledict(e) for e in expected])
- for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found):
+ for wrong in util.itertools_filterfalse(lambda o:
+ isinstance(o, cls), found):
fail('Unexpected type "%s", expected "%s"' % (
type(wrong).__name__, cls.__name__))
@@ -394,7 +403,7 @@ class AssertsExecutionResults(object):
else:
fail(
"Expected %s instance with attributes %s not found." % (
- cls.__name__, repr(expected_item)))
+ cls.__name__, repr(expected_item)))
return True
def assert_sql_execution(self, db, callable_, *rules):
@@ -406,7 +415,8 @@ class AssertsExecutionResults(object):
assertsql.asserter.clear_rules()
def assert_sql(self, db, callable_, list_, with_sequences=None):
- if with_sequences is not None and config.db.dialect.supports_sequences:
+ if (with_sequences is not None and
+ config.db.dialect.supports_sequences):
rules = with_sequences
else:
rules = list_
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index 7b4630c05..bcc999fe3 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -78,7 +78,7 @@ class ExactSQL(SQLMatchRule):
return
_received_statement = \
_process_engine_statement(context.unicode_statement,
- context)
+ context)
_received_parameters = context.compiled_parameters
# TODO: remove this step once all unit tests are migrated, as
@@ -99,10 +99,10 @@ class ExactSQL(SQLMatchRule):
params = {}
self._result = equivalent
if not self._result:
- self._errmsg = \
- 'Testing for exact statement %r exact params %r, '\
- 'received %r with params %r' % (sql, params,
- _received_statement, _received_parameters)
+ self._errmsg = (
+ 'Testing for exact statement %r exact params %r, '
+ 'received %r with params %r' %
+ (sql, params, _received_statement, _received_parameters))
class RegexSQL(SQLMatchRule):
@@ -119,7 +119,7 @@ class RegexSQL(SQLMatchRule):
return
_received_statement = \
_process_engine_statement(context.unicode_statement,
- context)
+ context)
_received_parameters = context.compiled_parameters
equivalent = bool(self.regex.match(_received_statement))
if self.params:
@@ -168,9 +168,11 @@ class CompiledSQL(SQLMatchRule):
compiled = \
context.compiled.statement.compile(dialect=DefaultDialect())
else:
- compiled = \
- context.compiled.statement.compile(dialect=DefaultDialect(),
- column_keys=context.compiled.column_keys)
+ compiled = (
+ context.compiled.statement.compile(
+ dialect=DefaultDialect(),
+ column_keys=context.compiled.column_keys)
+ )
_received_statement = re.sub(r'[\n\t]', '', str(compiled))
equivalent = self.statement == _received_statement
if self.params:
@@ -201,17 +203,19 @@ class CompiledSQL(SQLMatchRule):
all_received = []
self._result = equivalent
if not self._result:
- print('Testing for compiled statement %r partial params '\
- '%r, received %r with params %r' % (self.statement,
- all_params, _received_statement, all_received))
- self._errmsg = \
- 'Testing for compiled statement %r partial params %r, '\
- 'received %r with params %r' % (self.statement,
- all_params, _received_statement, all_received)
-
+ print('Testing for compiled statement %r partial params '
+ '%r, received %r with params %r' %
+ (self.statement, all_params,
+ _received_statement, all_received))
+ self._errmsg = (
+ 'Testing for compiled statement %r partial params %r, '
+ 'received %r with params %r' %
+ (self.statement, all_params,
+ _received_statement, all_received))
# print self._errmsg
+
class CountStatements(AssertRule):
def __init__(self, count):
@@ -248,7 +252,7 @@ class AllOf(AssertRule):
executemany):
for rule in self.rules:
rule.process_cursor_execute(statement, parameters, context,
- executemany)
+ executemany)
def is_consumed(self):
if not self.rules:
@@ -265,6 +269,7 @@ class AllOf(AssertRule):
def consume_final(self):
return len(self.rules) == 0
+
class Or(AllOf):
def __init__(self, *rules):
self.rules = set(rules)
@@ -282,6 +287,7 @@ class Or(AllOf):
def consume_final(self):
assert self._consume_final, "Unsatisified rules remain"
+
def _process_engine_statement(query, context):
if util.jython:
@@ -289,7 +295,7 @@ def _process_engine_statement(query, context):
query = str(query)
if context.engine.name == 'mssql' \
- and query.endswith('; select scope_identity()'):
+ and query.endswith('; select scope_identity()'):
query = query[:-25]
query = re.sub(r'\n', '', query)
return query
@@ -348,6 +354,6 @@ class SQLAssert(object):
if self.rules:
rule = self.rules[0]
rule.process_cursor_execute(statement, parameters, context,
- executemany)
+ executemany)
asserter = SQLAssert()
diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py
index 66bfbc892..c914434b4 100644
--- a/lib/sqlalchemy/testing/config.py
+++ b/lib/sqlalchemy/testing/config.py
@@ -15,6 +15,7 @@ file_config = None
_current = None
+
class Config(object):
def __init__(self, db, db_opts, options, file_config):
self.db = db
@@ -52,7 +53,8 @@ class Config(object):
def push_engine(cls, db, namespace):
assert _current, "Can't push without a default Config set up"
cls.push(
- Config(db, _current.db_opts, _current.options, _current.file_config),
+ Config(
+ db, _current.db_opts, _current.options, _current.file_config),
namespace
)
diff --git a/lib/sqlalchemy/testing/distutils_run.py b/lib/sqlalchemy/testing/distutils_run.py
index d8f8f5931..ecec3ffd5 100644
--- a/lib/sqlalchemy/testing/distutils_run.py
+++ b/lib/sqlalchemy/testing/distutils_run.py
@@ -5,6 +5,7 @@ custom setuptools/distutils code.
import unittest
import pytest
+
class TestSuite(unittest.TestCase):
def test_sqlalchemy(self):
pytest.main()
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 4136e5292..9052df570 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -17,6 +17,7 @@ import re
import warnings
from .. import util
+
class ConnectionKiller(object):
def __init__(self):
@@ -43,8 +44,8 @@ class ConnectionKiller(object):
raise
except Exception as e:
warnings.warn(
- "testing_reaper couldn't "
- "rollback/close connection: %s" % e)
+ "testing_reaper couldn't "
+ "rollback/close connection: %s" % e)
def rollback_all(self):
for rec in list(self.proxy_refs):
@@ -174,8 +175,8 @@ class ReconnectFixture(object):
raise
except Exception as e:
warnings.warn(
- "ReconnectFixture couldn't "
- "close connection: %s" % e)
+ "ReconnectFixture couldn't "
+ "close connection: %s" % e)
def shutdown(self):
# TODO: this doesn't cover all cases
@@ -236,8 +237,6 @@ def testing_engine(url=None, options=None):
return engine
-
-
def mock_engine(dialect_name=None):
"""Provides a mocking engine based on the current testing.db.
@@ -262,7 +261,7 @@ def mock_engine(dialect_name=None):
def assert_sql(stmts):
recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer]
- assert recv == stmts, recv
+ assert recv == stmts, recv
def print_sql():
d = engine.dialect
@@ -287,6 +286,7 @@ class DBAPIProxyCursor(object):
DBAPI-level cursor operations.
"""
+
def __init__(self, engine, conn):
self.engine = engine
self.connection = conn
@@ -312,6 +312,7 @@ class DBAPIProxyConnection(object):
DBAPI-level connection operations.
"""
+
def __init__(self, engine, cursor_cls):
self.conn = self._sqla_unwrap = engine.pool._creator()
self.engine = engine
@@ -352,20 +353,20 @@ class ReplayableSession(object):
if util.py2k:
Natives = set([getattr(types, t)
- for t in dir(types) if not t.startswith('_')]).\
- difference([getattr(types, t)
- for t in ('FunctionType', 'BuiltinFunctionType',
- 'MethodType', 'BuiltinMethodType',
- 'LambdaType', 'UnboundMethodType',)])
+ for t in dir(types) if not t.startswith('_')]).\
+ difference([getattr(types, t)
+ for t in ('FunctionType', 'BuiltinFunctionType',
+ 'MethodType', 'BuiltinMethodType',
+ 'LambdaType', 'UnboundMethodType',)])
else:
Natives = set([getattr(types, t)
for t in dir(types) if not t.startswith('_')]).\
- union([type(t) if not isinstance(t, type)
- else t for t in __builtins__.values()]).\
- difference([getattr(types, t)
- for t in ('FunctionType', 'BuiltinFunctionType',
- 'MethodType', 'BuiltinMethodType',
- 'LambdaType', )])
+ union([type(t) if not isinstance(t, type)
+ else t for t in __builtins__.values()]).\
+ difference([getattr(types, t)
+ for t in ('FunctionType', 'BuiltinFunctionType',
+ 'MethodType', 'BuiltinMethodType',
+ 'LambdaType', )])
def __init__(self):
self.buffer = deque()
diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py
index 7bf99918a..3e42955e6 100644
--- a/lib/sqlalchemy/testing/entities.py
+++ b/lib/sqlalchemy/testing/entities.py
@@ -86,7 +86,8 @@ class ComparableEntity(BasicEntity):
return False
if hasattr(value, '__iter__'):
- if hasattr(value, '__getitem__') and not hasattr(value, 'keys'):
+ if hasattr(value, '__getitem__') and not hasattr(
+ value, 'keys'):
if list(value) != list(battr):
return False
else:
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index 41337ea4d..fd43865aa 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -14,6 +14,7 @@ from .. import util
import contextlib
import inspect
+
class skip_if(object):
def __init__(self, predicate, reason=None):
self.predicate = _as_predicate(predicate)
@@ -55,13 +56,13 @@ class skip_if(object):
if self.predicate(config._current):
if self.reason:
msg = "'%s' : %s" % (
- fn.__name__,
- self.reason
- )
+ fn.__name__,
+ self.reason
+ )
else:
msg = "'%s': %s" % (
- fn.__name__, self.predicate
- )
+ fn.__name__, self.predicate
+ )
raise SkipTest(msg)
else:
if self._fails_on:
@@ -79,6 +80,7 @@ class skip_if(object):
self._fails_on = skip_if(fails_on_everything_except(*dbs))
return self
+
class fails_if(skip_if):
def __call__(self, fn):
@decorator
@@ -150,15 +152,15 @@ class SpecPredicate(Predicate):
self.description = description
_ops = {
- '<': operator.lt,
- '>': operator.gt,
- '==': operator.eq,
- '!=': operator.ne,
- '<=': operator.le,
- '>=': operator.ge,
- 'in': operator.contains,
- 'between': lambda val, pair: val >= pair[0] and val <= pair[1],
- }
+ '<': operator.lt,
+ '>': operator.gt,
+ '==': operator.eq,
+ '!=': operator.ne,
+ '<=': operator.le,
+ '>=': operator.ge,
+ 'in': operator.contains,
+ 'between': lambda val, pair: val >= pair[0] and val <= pair[1],
+ }
def __call__(self, config):
engine = config.db
@@ -178,7 +180,7 @@ class SpecPredicate(Predicate):
version = _server_version(engine)
oper = hasattr(self.op, '__call__') and self.op \
- or self._ops[self.op]
+ or self._ops[self.op]
return oper(version, self.spec)
else:
return True
@@ -194,16 +196,16 @@ class SpecPredicate(Predicate):
else:
if negate:
return "not %s %s %s" % (
- self.db,
- self.op,
- self.spec
- )
+ self.db,
+ self.op,
+ self.spec
+ )
else:
return "%s %s %s" % (
- self.db,
- self.op,
- self.spec
- )
+ self.db,
+ self.op,
+ self.spec
+ )
def __str__(self):
return self._as_string()
@@ -270,7 +272,7 @@ class OrPredicate(Predicate):
else:
conjunction = " or "
return conjunction.join(p._as_string(negate=negate)
- for p in self.predicates)
+ for p in self.predicates)
else:
return self._str._as_string(negate=negate)
@@ -311,8 +313,8 @@ def _server_version(engine):
def db_spec(*dbs):
return OrPredicate(
- [Predicate.as_predicate(db) for db in dbs]
- )
+ [Predicate.as_predicate(db) for db in dbs]
+ )
def open():
@@ -322,9 +324,11 @@ def open():
def closed():
return skip_if(BooleanPredicate(True, "marked as skip"))
+
def fails():
return fails_if(BooleanPredicate(True, "expected to fail"))
+
@decorator
def future(fn, *arg):
return fails_if(LambdaPredicate(fn), "Future feature")
@@ -336,10 +340,10 @@ def fails_on(db, reason=None):
def fails_on_everything_except(*dbs):
return succeeds_if(
- OrPredicate([
+ OrPredicate([
SpecPredicate(db) for db in dbs
])
- )
+ )
def skip(db, reason=None):
@@ -348,7 +352,7 @@ def skip(db, reason=None):
def only_on(dbs, reason=None):
return only_if(
- OrPredicate([SpecPredicate(db) for db in util.to_list(dbs)])
+ OrPredicate([SpecPredicate(db) for db in util.to_list(dbs)])
)
@@ -359,6 +363,6 @@ def exclude(db, op, spec, reason=None):
def against(config, *queries):
assert queries, "no queries sent!"
return OrPredicate([
- Predicate.as_predicate(query)
- for query in queries
- ])(config)
+ Predicate.as_predicate(query)
+ for query in queries
+ ])(config)
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index 23d010ec9..7c7b00998 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -18,6 +18,7 @@ from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
# whether or not we use unittest changes things dramatically,
# as far as how py.test collection works.
+
class TestBase(object):
# A sequence of database names to always run, regardless of the
# constraints below.
@@ -49,6 +50,7 @@ class TestBase(object):
if hasattr(self, "tearDown"):
self.tearDown()
+
class TablesTest(TestBase):
# 'once', None
@@ -222,6 +224,8 @@ class TablesTest(TestBase):
for column_values in rows[table]])
from sqlalchemy import event
+
+
class RemovesEvents(object):
@util.memoized_property
def _event_fns(self):
@@ -239,7 +243,6 @@ class RemovesEvents(object):
super_.teardown()
-
class _ORMTest(object):
@classmethod
@@ -366,14 +369,14 @@ class DeclarativeMappedTest(MappedTest):
def __init__(cls, classname, bases, dict_):
cls_registry[classname] = cls
return DeclarativeMeta.__init__(
- cls, classname, bases, dict_)
+ cls, classname, bases, dict_)
class DeclarativeBasic(object):
__table_cls__ = schema.Table
_DeclBase = declarative_base(metadata=cls.metadata,
- metaclass=FindFixtureDeclarative,
- cls=DeclarativeBasic)
+ metaclass=FindFixtureDeclarative,
+ cls=DeclarativeBasic)
cls.DeclarativeBasic = _DeclBase
fn()
diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py
index ccbe8aa92..c6a4d4360 100644
--- a/lib/sqlalchemy/testing/mock.py
+++ b/lib/sqlalchemy/testing/mock.py
@@ -17,6 +17,5 @@ else:
from mock import MagicMock, Mock, call, patch
except ImportError:
raise ImportError(
- "SQLAlchemy's test suite requires the "
- "'mock' library as of 0.8.2.")
-
+ "SQLAlchemy's test suite requires the "
+ "'mock' library as of 0.8.2.")
diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py
index fe68457e8..5a903aae7 100644
--- a/lib/sqlalchemy/testing/pickleable.py
+++ b/lib/sqlalchemy/testing/pickleable.py
@@ -63,8 +63,8 @@ class Foo(object):
def __eq__(self, other):
return other.data == self.data and \
- other.stuff == self.stuff and \
- other.moredata == self.moredata
+ other.stuff == self.stuff and \
+ other.moredata == self.moredata
class Bar(object):
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index 7262adb4b..e362d6141 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -21,9 +21,10 @@ fixtures = None
# no package imports yet! this prevents us from tripping coverage
# too soon.
path = os.path.join(os.path.dirname(__file__), "plugin_base.py")
-if sys.version_info >= (3,3):
+if sys.version_info >= (3, 3):
from importlib import machinery
- plugin_base = machinery.SourceFileLoader("plugin_base", path).load_module()
+ plugin_base = machinery.SourceFileLoader(
+ "plugin_base", path).load_module()
else:
import imp
plugin_base = imp.load_source("plugin_base", path)
@@ -76,20 +77,20 @@ class NoseSQLAlchemy(Plugin):
def beforeTest(self, test):
plugin_base.before_test(test,
- test.test.cls.__module__,
- test.test.cls, test.test.method.__name__)
+ test.test.cls.__module__,
+ test.test.cls, test.test.method.__name__)
def afterTest(self, test):
plugin_base.after_test(test)
def startContext(self, ctx):
if not isinstance(ctx, type) \
- or not issubclass(ctx, fixtures.TestBase):
+ or not issubclass(ctx, fixtures.TestBase):
return
plugin_base.start_test_class(ctx)
def stopContext(self, ctx):
if not isinstance(ctx, type) \
- or not issubclass(ctx, fixtures.TestBase):
+ or not issubclass(ctx, fixtures.TestBase):
return
plugin_base.stop_test_class(ctx)
diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py
index b91fa4d50..2590f3b1e 100644
--- a/lib/sqlalchemy/testing/plugin/plugin_base.py
+++ b/lib/sqlalchemy/testing/plugin/plugin_base.py
@@ -8,8 +8,8 @@
"""Testing extensions.
this module is designed to work as a testing-framework-agnostic library,
-so that we can continue to support nose and also begin adding new functionality
-via py.test.
+so that we can continue to support nose and also begin adding new
+functionality via py.test.
"""
@@ -50,50 +50,62 @@ logging = None
db_opts = {}
options = None
+
def setup_options(make_option):
make_option("--log-info", action="callback", type="string", callback=_log,
- help="turn on info logging for <LOG> (multiple OK)")
- make_option("--log-debug", action="callback", type="string", callback=_log,
- help="turn on debug logging for <LOG> (multiple OK)")
+ help="turn on info logging for <LOG> (multiple OK)")
+ make_option("--log-debug", action="callback",
+ type="string", callback=_log,
+ help="turn on debug logging for <LOG> (multiple OK)")
make_option("--db", action="append", type="string", dest="db",
help="Use prefab database uri. Multiple OK, "
- "first one is run by default.")
+ "first one is run by default.")
make_option('--dbs', action='callback', callback=_list_dbs,
- help="List available prefab dbs")
+ help="List available prefab dbs")
make_option("--dburi", action="append", type="string", dest="dburi",
- help="Database uri. Multiple OK, first one is run by default.")
+ help="Database uri. Multiple OK, "
+ "first one is run by default.")
make_option("--dropfirst", action="store_true", dest="dropfirst",
- help="Drop all tables in the target database first")
+ help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
- help="Run only tests marked with __backend__")
+ help="Run only tests marked with __backend__")
make_option("--mockpool", action="store_true", dest="mockpool",
- help="Use mock pool (asserts only one connection used)")
- make_option("--low-connections", action="store_true", dest="low_connections",
- help="Use a low number of distinct connections - i.e. for Oracle TNS"
- )
- make_option("--reversetop", action="store_true", dest="reversetop", default=False,
- help="Use a random-ordering set implementation in the ORM (helps "
- "reveal dependency issues)")
+ help="Use mock pool (asserts only one connection used)")
+ make_option("--low-connections", action="store_true",
+ dest="low_connections",
+ help="Use a low number of distinct connections - "
+ "i.e. for Oracle TNS")
+ make_option("--reversetop", action="store_true",
+ dest="reversetop", default=False,
+ help="Use a random-ordering set implementation in the ORM "
+ "(helps reveal dependency issues)")
make_option("--requirements", action="callback", type="string",
- callback=_requirements_opt,
- help="requirements class for testing, overrides setup.cfg")
- make_option("--with-cdecimal", action="store_true", dest="cdecimal", default=False,
- help="Monkeypatch the cdecimal library into Python 'decimal' for all tests")
- make_option("--serverside", action="callback", callback=_server_side_cursors,
- help="Turn on server side cursors for PG")
- make_option("--mysql-engine", action="store", dest="mysql_engine", default=None,
- help="Use the specified MySQL storage engine for all tables, default is "
- "a db-default/InnoDB combo.")
+ callback=_requirements_opt,
+ help="requirements class for testing, overrides setup.cfg")
+ make_option("--with-cdecimal", action="store_true",
+ dest="cdecimal", default=False,
+ help="Monkeypatch the cdecimal library into Python 'decimal' "
+ "for all tests")
+ make_option("--serverside", action="callback",
+ callback=_server_side_cursors,
+ help="Turn on server side cursors for PG")
+ make_option("--mysql-engine", action="store",
+ dest="mysql_engine", default=None,
+ help="Use the specified MySQL storage engine for all tables, "
+ "default is a db-default/InnoDB combo.")
make_option("--tableopts", action="append", dest="tableopts", default=[],
- help="Add a dialect-specific table option, key=value")
- make_option("--write-profiles", action="store_true", dest="write_profiles", default=False,
- help="Write/update profiling data.")
+ help="Add a dialect-specific table option, key=value")
+ make_option("--write-profiles", action="store_true",
+ dest="write_profiles", default=False,
+ help="Write/update profiling data.")
+
def read_config():
global file_config
file_config = configparser.ConfigParser()
file_config.read(['setup.cfg', 'test.cfg'])
+
def pre_begin(opt):
"""things to set up early, before coverage might be setup."""
global options
@@ -101,9 +113,11 @@ def pre_begin(opt):
for fn in pre_configure:
fn(options, file_config)
+
def set_coverage_flag(value):
options.has_coverage = value
+
def post_begin():
"""things to set up later, once we know coverage is running."""
# Lazy setup of other options (post coverage)
@@ -113,11 +127,11 @@ def post_begin():
# late imports, has to happen after config as well
# as nose plugins like coverage
global util, fixtures, engines, exclusions, \
- assertions, warnings, profiling,\
- config, testing
+ assertions, warnings, profiling,\
+ config, testing
from sqlalchemy import testing
from sqlalchemy.testing import fixtures, engines, exclusions, \
- assertions, warnings, profiling, config
+ assertions, warnings, profiling, config
from sqlalchemy import util
@@ -143,6 +157,7 @@ def _list_dbs(*args):
def _server_side_cursors(opt_str, value, parser):
db_opts['server_side_cursors'] = True
+
def _requirements_opt(opt_str, value, parser):
_setup_requirements(value)
@@ -189,8 +204,9 @@ def _engine_uri(options, file_config):
for db in re.split(r'[,\s]+', db_token):
if db not in file_config.options('db'):
raise RuntimeError(
- "Unknown URI specifier '%s'. Specify --dbs for known uris."
- % db)
+ "Unknown URI specifier '%s'. "
+ "Specify --dbs for known uris."
+ % db)
else:
db_urls.append(file_config.get('db', db))
@@ -211,12 +227,14 @@ def _engine_pool(options, file_config):
from sqlalchemy import pool
db_opts['poolclass'] = pool.AssertionPool
+
@post
def _requirements(options, file_config):
requirement_cls = file_config.get('sqla_testing', "requirement_cls")
_setup_requirements(requirement_cls)
+
def _setup_requirements(argument):
from sqlalchemy.testing import config
from sqlalchemy import testing
@@ -235,6 +253,7 @@ def _setup_requirements(argument):
config.requirements = testing.requires = req_cls()
+
@post
def _prep_testing_database(options, file_config):
from sqlalchemy.testing import config
@@ -250,27 +269,36 @@ def _prep_testing_database(options, file_config):
pass
else:
for vname in view_names:
- e.execute(schema._DropView(schema.Table(vname, schema.MetaData())))
+ e.execute(schema._DropView(
+ schema.Table(vname, schema.MetaData())
+ ))
if config.requirements.schemas.enabled_for_config(cfg):
try:
- view_names = inspector.get_view_names(schema="test_schema")
+ view_names = inspector.get_view_names(
+ schema="test_schema")
except NotImplementedError:
pass
else:
for vname in view_names:
e.execute(schema._DropView(
- schema.Table(vname,
- schema.MetaData(), schema="test_schema")))
+ schema.Table(vname, schema.MetaData(),
+ schema="test_schema")
+ ))
- for tname in reversed(inspector.get_table_names(order_by="foreign_key")):
- e.execute(schema.DropTable(schema.Table(tname, schema.MetaData())))
+ for tname in reversed(inspector.get_table_names(
+ order_by="foreign_key")):
+ e.execute(schema.DropTable(
+ schema.Table(tname, schema.MetaData())
+ ))
if config.requirements.schemas.enabled_for_config(cfg):
for tname in reversed(inspector.get_table_names(
- order_by="foreign_key", schema="test_schema")):
+ order_by="foreign_key", schema="test_schema")):
e.execute(schema.DropTable(
- schema.Table(tname, schema.MetaData(), schema="test_schema")))
+ schema.Table(tname, schema.MetaData(),
+ schema="test_schema")
+ ))
@post
@@ -304,7 +332,7 @@ def _post_setup_options(opt, file_config):
def _setup_profiling(options, file_config):
from sqlalchemy.testing import profiling
profiling._profile_stats = profiling.ProfileStatsFile(
- file_config.get('sqla_testing', 'profile_file'))
+ file_config.get('sqla_testing', 'profile_file'))
def want_class(cls):
@@ -312,22 +340,24 @@ def want_class(cls):
return False
elif cls.__name__.startswith('_'):
return False
- elif config.options.backend_only and not getattr(cls, '__backend__', False):
+ elif config.options.backend_only and not getattr(cls, '__backend__',
+ False):
return False
else:
return True
+
def generate_sub_tests(cls, module):
if getattr(cls, '__backend__', False):
for cfg in _possible_configs_for_cls(cls):
name = "%s_%s_%s" % (cls.__name__, cfg.db.name, cfg.db.driver)
subcls = type(
- name,
- (cls, ),
- {
- "__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)),
- "__backend__": False}
- )
+ name,
+ (cls, ),
+ {
+ "__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)),
+ "__backend__": False}
+ )
setattr(module, name, subcls)
yield subcls
else:
@@ -338,20 +368,24 @@ def start_test_class(cls):
_do_skips(cls)
_setup_engine(cls)
+
def stop_test_class(cls):
engines.testing_reaper._stop_test_ctx()
if not options.low_connections:
assertions.global_cleanup_assertions()
_restore_engine()
+
def _restore_engine():
config._current.reset(testing)
+
def _setup_engine(cls):
if getattr(cls, '__engine_options__', None):
eng = engines.testing_engine(options=cls.__engine_options__)
config._current.push_engine(eng, testing)
+
def before_test(test, test_module_name, test_class, test_name):
# like a nose id, e.g.:
@@ -367,10 +401,12 @@ def before_test(test, test_module_name, test_class, test_name):
warnings.resetwarnings()
profiling._current_test = id_
+
def after_test(test):
engines.testing_reaper._after_test_ctx()
warnings.resetwarnings()
+
def _possible_configs_for_cls(cls):
all_configs = set(config.Config.all_configs())
if cls.__unsupported_on__:
@@ -378,16 +414,14 @@ def _possible_configs_for_cls(cls):
for config_obj in list(all_configs):
if spec(config_obj):
all_configs.remove(config_obj)
-
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
for config_obj in list(all_configs):
if not spec(config_obj):
all_configs.remove(config_obj)
-
-
return all_configs
+
def _do_skips(cls):
all_configs = _possible_configs_for_cls(cls)
reasons = []
@@ -427,19 +461,17 @@ def _do_skips(cls):
for config_obj in list(all_configs):
if exclusions.skip_if(
exclusions.SpecPredicate(db_spec, op, spec)
- ).predicate(config_obj):
+ ).predicate(config_obj):
all_configs.remove(config_obj)
-
-
if not all_configs:
raise SkipTest(
"'%s' unsupported on DB implementation %s%s" % (
cls.__name__,
- ", ".join("'%s' = %s" % (
- config_obj.db.name,
- config_obj.db.dialect.server_version_info)
- for config_obj in config.Config.all_configs()
- ),
+ ", ".join("'%s' = %s"
+ % (config_obj.db.name,
+ config_obj.db.dialect.server_version_info)
+ for config_obj in config.Config.all_configs()
+ ),
", ".join(reasons)
)
)
@@ -455,6 +487,6 @@ def _do_skips(cls):
if config._current not in all_configs:
_setup_config(all_configs.pop(), cls)
+
def _setup_config(config_obj, ctx):
config._current.push(config_obj, testing)
-
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index 74d5cc083..11238bbac 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -4,6 +4,7 @@ import inspect
from . import plugin_base
import collections
+
def pytest_addoption(parser):
group = parser.getgroup("sqlalchemy")
@@ -11,7 +12,8 @@ def pytest_addoption(parser):
callback_ = kw.pop("callback", None)
if callback_:
class CallableAction(argparse.Action):
- def __call__(self, parser, namespace, values, option_string=None):
+ def __call__(self, parser, namespace,
+ values, option_string=None):
callback_(option_string, values, parser)
kw["action"] = CallableAction
@@ -20,10 +22,12 @@ def pytest_addoption(parser):
plugin_base.setup_options(make_option)
plugin_base.read_config()
+
def pytest_configure(config):
plugin_base.pre_begin(config.option)
- plugin_base.set_coverage_flag(bool(getattr(config.option, "cov_source", False)))
+ plugin_base.set_coverage_flag(bool(getattr(config.option,
+ "cov_source", False)))
plugin_base.post_begin()
@@ -42,12 +46,14 @@ def pytest_collection_modifyitems(session, config, items):
rebuilt_items = collections.defaultdict(list)
test_classes = set(item.parent for item in items)
for test_class in test_classes:
- for sub_cls in plugin_base.generate_sub_tests(test_class.cls, test_class.parent.module):
+ for sub_cls in plugin_base.generate_sub_tests(
+ test_class.cls, test_class.parent.module):
if sub_cls is not test_class.cls:
list_ = rebuilt_items[test_class.cls]
- for inst in pytest.Class(sub_cls.__name__,
- parent=test_class.parent.parent).collect():
+ for inst in pytest.Class(
+ sub_cls.__name__,
+ parent=test_class.parent.parent).collect():
list_.extend(inst.collect())
newitems = []
@@ -61,12 +67,10 @@ def pytest_collection_modifyitems(session, config, items):
# seems like the functions attached to a test class aren't sorted already?
# is that true and why's that? (when using unittest, they're sorted)
items[:] = sorted(newitems, key=lambda item: (
- item.parent.parent.parent.name,
- item.parent.parent.name,
- item.name
- )
- )
-
+ item.parent.parent.parent.name,
+ item.parent.parent.name,
+ item.name
+ ))
def pytest_pycollect_makeitem(collector, name, obj):
@@ -82,6 +86,7 @@ def pytest_pycollect_makeitem(collector, name, obj):
_current_class = None
+
def pytest_runtest_setup(item):
# here we seem to get called only based on what we collected
# in pytest_collection_modifyitems. So to do class-based stuff
@@ -100,10 +105,12 @@ def pytest_runtest_setup(item):
# this is needed for the class-level, to ensure that the
# teardown runs after the class is completed with its own
# class-level teardown...
- item.parent.parent.addfinalizer(lambda: class_teardown(item.parent.parent))
+ item.parent.parent.addfinalizer(
+ lambda: class_teardown(item.parent.parent))
test_setup(item)
+
def pytest_runtest_teardown(item):
# ...but this works better as the hook here rather than
# using a finalizer, as the finalizer seems to get in the way
@@ -111,15 +118,19 @@ def pytest_runtest_teardown(item):
# py.test assertion stuff instead)
test_teardown(item)
+
def test_setup(item):
- plugin_base.before_test(item,
- item.parent.module.__name__, item.parent.cls, item.name)
+ plugin_base.before_test(item, item.parent.module.__name__,
+ item.parent.cls, item.name)
+
def test_teardown(item):
plugin_base.after_test(item)
+
def class_setup(item):
plugin_base.start_test_class(item.cls)
+
def class_teardown(item):
plugin_base.stop_test_class(item.cls)
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index b818e4e15..75baec987 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -43,12 +43,12 @@ def profiled(target=None, **target_opts):
"""
profile_config = {'targets': set(),
- 'report': True,
- 'print_callers': False,
- 'print_callees': False,
- 'graphic': False,
- 'sort': ('time', 'calls'),
- 'limit': None}
+ 'report': True,
+ 'print_callers': False,
+ 'print_callees': False,
+ 'graphic': False,
+ 'sort': ('time', 'calls'),
+ 'limit': None}
if target is None:
target = 'anonymous_target'
@@ -67,7 +67,7 @@ def profiled(target=None, **target_opts):
limit = target_opts.get('limit', profile_config['limit'])
print(("Profile report for target '%s'" % (
target, )
- ))
+ ))
stats = load_stats()
stats.sort_stats(*sort_)
@@ -97,6 +97,7 @@ class ProfileStatsFile(object):
so no json lib :( need to roll something silly
"""
+
def __init__(self, filename):
self.write = (
config.options is not None and
@@ -177,19 +178,19 @@ class ProfileStatsFile(object):
def _header(self):
return \
- "# %s\n"\
- "# This file is written out on a per-environment basis.\n"\
- "# For each test in aaa_profiling, the corresponding function and \n"\
- "# environment is located within this file. If it doesn't exist,\n"\
- "# the test is skipped.\n"\
- "# If a callcount does exist, it is compared to what we received. \n"\
- "# assertions are raised if the counts do not match.\n"\
- "# \n"\
- "# To add a new callcount test, apply the function_call_count \n"\
- "# decorator and re-run the tests using the --write-profiles \n"\
- "# option - this file will be rewritten including the new count.\n"\
- "# \n"\
- "" % (self.fname)
+ "# %s\n"\
+ "# This file is written out on a per-environment basis.\n"\
+ "# For each test in aaa_profiling, the corresponding function and \n"\
+ "# environment is located within this file. If it doesn't exist,\n"\
+ "# the test is skipped.\n"\
+ "# If a callcount does exist, it is compared to what we received. \n"\
+ "# assertions are raised if the counts do not match.\n"\
+ "# \n"\
+ "# To add a new callcount test, apply the function_call_count \n"\
+ "# decorator and re-run the tests using the --write-profiles \n"\
+ "# option - this file will be rewritten including the new count.\n"\
+ "# \n"\
+ "" % (self.fname)
def _read(self):
try:
@@ -225,7 +226,6 @@ class ProfileStatsFile(object):
profile_f.close()
-
def function_call_count(variance=0.05):
"""Assert a target for a test case's function call count.
@@ -248,9 +248,9 @@ def function_call_count(variance=0.05):
# (not a great idea but we have these in test_zoomark)
fn(*args, **kw)
raise SkipTest("No profiling stats available on this "
- "platform for this function. Run tests with "
- "--write-profiles to add statistics to %s for "
- "this platform." % _profile_stats.short_fname)
+ "platform for this function. Run tests with "
+ "--write-profiles to add statistics to %s for "
+ "this platform." % _profile_stats.short_fname)
gc_collect()
@@ -267,12 +267,12 @@ def function_call_count(variance=0.05):
line_no, expected_count = expected
print(("Pstats calls: %d Expected %s" % (
- callcount,
- expected_count
- )
+ callcount,
+ expected_count
+ )
))
stats.print_stats()
- #stats.print_callers()
+ # stats.print_callers()
if expected_count:
deviance = int(callcount * variance)
@@ -287,8 +287,8 @@ def function_call_count(variance=0.05):
"of expected %s. Rerun with --write-profiles to "
"regenerate this callcount."
% (
- callcount, (variance * 100),
- expected_count))
+ callcount, (variance * 100),
+ expected_count))
return fn_result
return update_wrapper(wrap, fn)
return decorate
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index 59578ce7f..3413c0d30 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -21,6 +21,7 @@ from . import exclusions
class Requirements(object):
pass
+
class SuiteRequirements(Requirements):
@property
@@ -64,9 +65,9 @@ class SuiteRequirements(Requirements):
# somehow only_if([x, y]) isn't working here, negation/conjunctions
# getting confused.
return exclusions.only_if(
- lambda: self.on_update_cascade.enabled or self.deferrable_fks.enabled
- )
-
+ lambda: self.on_update_cascade.enabled or
+ self.deferrable_fks.enabled
+ )
@property
def self_referential_foreign_keys(self):
@@ -94,13 +95,17 @@ class SuiteRequirements(Requirements):
@property
def offset(self):
- """target database can render OFFSET, or an equivalent, in a SELECT."""
+ """target database can render OFFSET, or an equivalent, in a
+ SELECT.
+ """
return exclusions.open()
@property
def bound_limit_offset(self):
- """target database can render LIMIT and/or OFFSET using a bound parameter"""
+ """target database can render LIMIT and/or OFFSET using a bound
+ parameter
+ """
return exclusions.open()
@@ -159,17 +164,16 @@ class SuiteRequirements(Requirements):
return exclusions.open()
-
@property
def empty_inserts(self):
"""target platform supports INSERT with no values, i.e.
INSERT DEFAULT VALUES or equivalent."""
return exclusions.only_if(
- lambda config: config.db.dialect.supports_empty_insert or \
- config.db.dialect.supports_default_values,
- "empty inserts not supported"
- )
+ lambda config: config.db.dialect.supports_empty_insert or
+ config.db.dialect.supports_default_values,
+ "empty inserts not supported"
+ )
@property
def insert_from_select(self):
@@ -182,9 +186,9 @@ class SuiteRequirements(Requirements):
"""target platform supports RETURNING."""
return exclusions.only_if(
- lambda config: config.db.dialect.implicit_returning,
- "'returning' not supported by database"
- )
+ lambda config: config.db.dialect.implicit_returning,
+ "'returning' not supported by database"
+ )
@property
def duplicate_names_in_cursor_description(self):
@@ -199,9 +203,9 @@ class SuiteRequirements(Requirements):
UPPERCASE as case insensitive names."""
return exclusions.skip_if(
- lambda config: not config.db.dialect.requires_name_normalize,
- "Backend does not require denormalized names."
- )
+ lambda config: not config.db.dialect.requires_name_normalize,
+ "Backend does not require denormalized names."
+ )
@property
def multivalues_inserts(self):
@@ -209,10 +213,9 @@ class SuiteRequirements(Requirements):
INSERT statement."""
return exclusions.skip_if(
- lambda config: not config.db.dialect.supports_multivalues_insert,
- "Backend does not support multirow inserts."
- )
-
+ lambda config: not config.db.dialect.supports_multivalues_insert,
+ "Backend does not support multirow inserts."
+ )
@property
def implements_get_lastrowid(self):
@@ -260,8 +263,8 @@ class SuiteRequirements(Requirements):
"""Target database must support SEQUENCEs."""
return exclusions.only_if([
- lambda config: config.db.dialect.supports_sequences
- ], "no sequence support")
+ lambda config: config.db.dialect.supports_sequences
+ ], "no sequence support")
@property
def sequences_optional(self):
@@ -269,13 +272,9 @@ class SuiteRequirements(Requirements):
as a means of generating new PK values."""
return exclusions.only_if([
- lambda config: config.db.dialect.supports_sequences and \
- config.db.dialect.sequences_optional
- ], "no sequence support, or sequences not optional")
-
-
-
-
+ lambda config: config.db.dialect.supports_sequences and
+ config.db.dialect.sequences_optional
+ ], "no sequence support, or sequences not optional")
@property
def reflects_pk_names(self):
@@ -339,7 +338,9 @@ class SuiteRequirements(Requirements):
@property
def unicode_ddl(self):
- """Target driver must support some degree of non-ascii symbol names."""
+ """Target driver must support some degree of non-ascii symbol
+ names.
+ """
return exclusions.closed()
@property
@@ -531,7 +532,6 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
-
@property
def update_from(self):
"""Target must support UPDATE..FROM syntax"""
@@ -587,7 +587,9 @@ class SuiteRequirements(Requirements):
@property
def unicode_connections(self):
- """Target driver must support non-ASCII characters being passed at all."""
+ """Target driver must support non-ASCII characters being passed at
+ all.
+ """
return exclusions.open()
@property
@@ -600,11 +602,12 @@ class SuiteRequirements(Requirements):
"""Test environment must allow ad-hoc engine/connection creation.
DBs that scale poorly for many connections, even when closed, i.e.
- Oracle, may use the "--low-connections" option which flags this requirement
- as not present.
+ Oracle, may use the "--low-connections" option which flags this
+ requirement as not present.
"""
- return exclusions.skip_if(lambda config: config.options.low_connections)
+ return exclusions.skip_if(
+ lambda config: config.options.low_connections)
def _has_mysql_on_windows(self, config):
return False
@@ -619,8 +622,8 @@ class SuiteRequirements(Requirements):
@property
def cextensions(self):
return exclusions.skip_if(
- lambda: not self._has_cextensions(), "C extensions not installed"
- )
+ lambda: not self._has_cextensions(), "C extensions not installed"
+ )
def _has_sqlite(self):
from sqlalchemy import create_engine
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index d0c9afeeb..df254520b 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -38,6 +38,7 @@ import nose
def main():
nose.main(addplugins=[NoseSQLAlchemy()])
+
def setup_py_test():
"""Runner to use for the 'test_suite' entry of your setup.py.
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index 2e2a9b5ee..1cb356dd7 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -72,7 +72,7 @@ def Column(*args, **kw):
col = schema.Column(*args, **kw)
if 'test_needs_autoincrement' in test_opts and \
- kw.get('primary_key', False):
+ kw.get('primary_key', False):
# allow any test suite to pick up on this
col.info['test_needs_autoincrement'] = True
@@ -83,19 +83,16 @@ def Column(*args, **kw):
def add_seq(c, tbl):
c._init_items(
schema.Sequence(_truncate_name(
- config.db.dialect, tbl.name + '_' + c.name + '_seq'),
+ config.db.dialect, tbl.name + '_' + c.name + '_seq'),
optional=True)
)
event.listen(col, 'after_parent_attach', add_seq, propagate=True)
return col
-
-
-
def _truncate_name(dialect, name):
if len(name) > dialect.max_identifier_length:
return name[0:max(dialect.max_identifier_length - 6, 0)] + \
- "_" + hex(hash(name) % 64)[2:]
+ "_" + hex(hash(name) % 64)[2:]
else:
return name
diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py
index 2dca1443d..1d8010c8a 100644
--- a/lib/sqlalchemy/testing/suite/test_ddl.py
+++ b/lib/sqlalchemy/testing/suite/test_ddl.py
@@ -12,15 +12,17 @@ class TableDDLTest(fixtures.TestBase):
def _simple_fixture(self):
return Table('test_table', self.metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ autoincrement=False),
+ Column('data', String(50))
+ )
def _underscore_fixture(self):
return Table('_test_table', self.metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('_data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ autoincrement=False),
+ Column('_data', String(50))
+ )
def _simple_roundtrip(self, table):
with config.db.begin() as conn:
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index 3444e15c8..92d3d93e5 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -21,15 +21,15 @@ class LastrowidTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50))
+ )
Table('manual_pk', metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('data', String(50))
+ )
def _assert_round_trip(self, table, conn):
row = conn.execute(table.select()).first()
@@ -59,8 +59,9 @@ class LastrowidTest(fixtures.TablesTest):
)
# failed on pypy1.9 but seems to be OK on pypy 2.1
- #@exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after "
- # "connection close")
+ # @exclusions.fails_if(lambda: util.pypy,
+ # "lastrowid not maintained after "
+ # "connection close")
@requirements.dbapi_lastrowid
def test_native_lastrowid_autoinc(self):
r = config.db.execute(
@@ -81,19 +82,19 @@ class InsertBehaviorTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
- Column('id', Integer, primary_key=True, \
- test_needs_autoincrement=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50))
+ )
Table('manual_pk', metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('data', String(50))
+ )
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
engine = engines.testing_engine(
- options={'implicit_returning': False})
+ options={'implicit_returning': False})
else:
engine = config.db
@@ -119,12 +120,12 @@ class InsertBehaviorTest(fixtures.TablesTest):
def test_empty_insert(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
- )
+ )
assert r.closed
r = config.db.execute(
- self.tables.autoinc_pk.select().\
- where(self.tables.autoinc_pk.c.id != None)
+ self.tables.autoinc_pk.select().
+ where(self.tables.autoinc_pk.c.id != None)
)
assert len(r.fetchall())
@@ -133,21 +134,20 @@ class InsertBehaviorTest(fixtures.TablesTest):
def test_insert_from_select(self):
table = self.tables.manual_pk
config.db.execute(
- table.insert(),
- [
- dict(id=1, data="data1"),
- dict(id=2, data="data2"),
- dict(id=3, data="data3"),
- ]
+ table.insert(),
+ [
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
+ ]
)
-
config.db.execute(
- table.insert(inline=True).
- from_select(
- ("id", "data",), select([table.c.id + 5, table.c.data]).where(
- table.c.data.in_(["data2", "data3"]))
- ),
+ table.insert(inline=True).
+ from_select(("id", "data",),
+ select([table.c.id + 5, table.c.data]).
+ where(table.c.data.in_(["data2", "data3"]))
+ ),
)
eq_(
@@ -158,6 +158,7 @@ class InsertBehaviorTest(fixtures.TablesTest):
("data3", ), ("data3", )]
)
+
class ReturningTest(fixtures.TablesTest):
run_create_tables = 'each'
__requires__ = 'returning', 'autoincrement_insert'
@@ -175,10 +176,10 @@ class ReturningTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
- Column('id', Integer, primary_key=True, \
- test_needs_autoincrement=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('data', String(50))
+ )
@requirements.fetch_rows_post_commit
def test_explicit_returning_pk_autocommit(self):
@@ -186,7 +187,7 @@ class ReturningTest(fixtures.TablesTest):
table = self.tables.autoinc_pk
r = engine.execute(
table.insert().returning(
- table.c.id),
+ table.c.id),
data="some data"
)
pk = r.first()[0]
@@ -199,7 +200,7 @@ class ReturningTest(fixtures.TablesTest):
with engine.begin() as conn:
r = conn.execute(
table.insert().returning(
- table.c.id),
+ table.c.id),
data="some data"
)
pk = r.first()[0]
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 762c9955c..7cc5fd160 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -24,9 +24,9 @@ class HasTableTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('test_table', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50))
+ )
def test_has_table(self):
with config.db.begin() as conn:
@@ -34,8 +34,6 @@ class HasTableTest(fixtures.TablesTest):
assert not config.db.dialect.has_table(conn, "nonexistent_table")
-
-
class ComponentReflectionTest(fixtures.TablesTest):
run_inserts = run_deletes = None
@@ -56,41 +54,42 @@ class ComponentReflectionTest(fixtures.TablesTest):
if testing.requires.self_referential_foreign_keys.enabled:
users = Table('users', metadata,
- Column('user_id', sa.INT, primary_key=True),
- Column('test1', sa.CHAR(5), nullable=False),
- Column('test2', sa.Float(5), nullable=False),
- Column('parent_user_id', sa.Integer,
- sa.ForeignKey('%susers.user_id' % schema_prefix)),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('user_id', sa.INT, primary_key=True),
+ Column('test1', sa.CHAR(5), nullable=False),
+ Column('test2', sa.Float(5), nullable=False),
+ Column('parent_user_id', sa.Integer,
+ sa.ForeignKey('%susers.user_id' %
+ schema_prefix)),
+ schema=schema,
+ test_needs_fk=True,
+ )
else:
users = Table('users', metadata,
- Column('user_id', sa.INT, primary_key=True),
- Column('test1', sa.CHAR(5), nullable=False),
- Column('test2', sa.Float(5), nullable=False),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('user_id', sa.INT, primary_key=True),
+ Column('test1', sa.CHAR(5), nullable=False),
+ Column('test2', sa.Float(5), nullable=False),
+ schema=schema,
+ test_needs_fk=True,
+ )
Table("dingalings", metadata,
- Column('dingaling_id', sa.Integer, primary_key=True),
- Column('address_id', sa.Integer,
- sa.ForeignKey('%semail_addresses.address_id' %
- schema_prefix)),
- Column('data', sa.String(30)),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('dingaling_id', sa.Integer, primary_key=True),
+ Column('address_id', sa.Integer,
+ sa.ForeignKey('%semail_addresses.address_id' %
+ schema_prefix)),
+ Column('data', sa.String(30)),
+ schema=schema,
+ test_needs_fk=True,
+ )
Table('email_addresses', metadata,
- Column('address_id', sa.Integer),
- Column('remote_user_id', sa.Integer,
- sa.ForeignKey(users.c.user_id)),
- Column('email_address', sa.String(20)),
- sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'),
- schema=schema,
- test_needs_fk=True,
- )
+ Column('address_id', sa.Integer),
+ Column('remote_user_id', sa.Integer,
+ sa.ForeignKey(users.c.user_id)),
+ Column('email_address', sa.String(20)),
+ sa.PrimaryKeyConstraint('address_id', name='email_ad_pk'),
+ schema=schema,
+ test_needs_fk=True,
+ )
if testing.requires.index_reflection.enabled:
cls.define_index(metadata, users)
@@ -110,7 +109,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + '_v'
query = "CREATE VIEW %s AS SELECT * FROM %s" % (
- view_name, fullname)
+ view_name, fullname)
event.listen(
metadata,
@@ -146,7 +145,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
order_by=None):
meta = self.metadata
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
insp = inspect(meta.bind)
if table_type == 'view':
table_names = insp.get_view_names(schema)
@@ -195,13 +194,13 @@ class ComponentReflectionTest(fixtures.TablesTest):
def _test_get_columns(self, schema=None, table_type='table'):
meta = MetaData(testing.db)
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
table_names = ['users', 'email_addresses']
if table_type == 'view':
table_names = ['users_v', 'email_addresses_v']
insp = inspect(meta.bind)
for table_name, table in zip(table_names, (users,
- addresses)):
+ addresses)):
schema_name = schema
cols = insp.get_columns(table_name, schema=schema_name)
self.assert_(len(cols) > 0, len(cols))
@@ -218,23 +217,24 @@ class ComponentReflectionTest(fixtures.TablesTest):
# Oracle returns Date for DateTime.
if testing.against('oracle') and ctype_def \
- in (sql_types.Date, sql_types.DateTime):
+ in (sql_types.Date, sql_types.DateTime):
ctype_def = sql_types.Date
# assert that the desired type and return type share
# a base within one of the generic types.
self.assert_(len(set(ctype.__mro__).
- intersection(ctype_def.__mro__).intersection([
- sql_types.Integer,
- sql_types.Numeric,
- sql_types.DateTime,
- sql_types.Date,
- sql_types.Time,
- sql_types.String,
- sql_types._Binary,
- ])) > 0, '%s(%s), %s(%s)' % (col.name,
- col.type, cols[i]['name'], ctype))
+ intersection(ctype_def.__mro__).
+ intersection([
+ sql_types.Integer,
+ sql_types.Numeric,
+ sql_types.DateTime,
+ sql_types.Date,
+ sql_types.Time,
+ sql_types.String,
+ sql_types._Binary,
+ ])) > 0, '%s(%s), %s(%s)' %
+ (col.name, col.type, cols[i]['name'], ctype))
if not col.primary_key:
assert cols[i]['default'] is None
@@ -246,11 +246,11 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.provide_metadata
def _type_round_trip(self, *types):
t = Table('t', self.metadata,
- *[
- Column('t%d' % i, type_)
- for i, type_ in enumerate(types)
- ]
- )
+ *[
+ Column('t%d' % i, type_)
+ for i, type_ in enumerate(types)
+ ]
+ )
t.create()
return [
@@ -261,8 +261,8 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.requires.table_reflection
def test_numeric_reflection(self):
for typ in self._type_round_trip(
- sql_types.Numeric(18, 5),
- ):
+ sql_types.Numeric(18, 5),
+ ):
assert isinstance(typ, sql_types.Numeric)
eq_(typ.precision, 18)
eq_(typ.scale, 5)
@@ -277,8 +277,8 @@ class ComponentReflectionTest(fixtures.TablesTest):
@testing.provide_metadata
def test_nullable_reflection(self):
t = Table('t', self.metadata,
- Column('a', Integer, nullable=True),
- Column('b', Integer, nullable=False))
+ Column('a', Integer, nullable=True),
+ Column('b', Integer, nullable=False))
t.create()
eq_(
dict(
@@ -288,7 +288,6 @@ class ComponentReflectionTest(fixtures.TablesTest):
{"a": True, "b": False}
)
-
@testing.requires.table_reflection
@testing.requires.schemas
def test_get_columns_with_schema(self):
@@ -311,11 +310,11 @@ class ComponentReflectionTest(fixtures.TablesTest):
users_cons = insp.get_pk_constraint(users.name, schema=schema)
users_pkeys = users_cons['constrained_columns']
- eq_(users_pkeys, ['user_id'])
+ eq_(users_pkeys, ['user_id'])
addr_cons = insp.get_pk_constraint(addresses.name, schema=schema)
addr_pkeys = addr_cons['constrained_columns']
- eq_(addr_pkeys, ['address_id'])
+ eq_(addr_pkeys, ['address_id'])
with testing.requires.reflects_pk_names.fail_if():
eq_(addr_cons['name'], 'email_ad_pk')
@@ -347,7 +346,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
def _test_get_foreign_keys(self, schema=None):
meta = self.metadata
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
insp = inspect(meta.bind)
expected_schema = schema
# users
@@ -366,7 +365,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
if testing.requires.self_referential_foreign_keys.enabled:
eq_(fkey1['constrained_columns'], ['parent_user_id'])
- #addresses
+ # addresses
addr_fkeys = insp.get_foreign_keys(addresses.name,
schema=schema)
fkey1 = addr_fkeys[0]
@@ -392,7 +391,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
def _test_get_indexes(self, schema=None):
meta = self.metadata
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
# The database may decide to create indexes for foreign keys, etc.
# so there may be more indexes than expected.
insp = inspect(meta.bind)
@@ -421,7 +420,6 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_indexes_with_schema(self):
self._test_get_indexes(schema='test_schema')
-
@testing.requires.unique_constraint_reflection
def test_get_unique_constraints(self):
self._test_get_unique_constraints()
@@ -468,12 +466,11 @@ class ComponentReflectionTest(fixtures.TablesTest):
for orig, refl in zip(uniques, reflected):
eq_(orig, refl)
-
@testing.provide_metadata
def _test_get_view_definition(self, schema=None):
meta = self.metadata
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
view_name1 = 'users_v'
view_name2 = 'email_addresses_v'
insp = inspect(meta.bind)
@@ -496,7 +493,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
def _test_get_table_oid(self, table_name, schema=None):
meta = self.metadata
users, addresses, dingalings = self.tables.users, \
- self.tables.email_addresses, self.tables.dingalings
+ self.tables.email_addresses, self.tables.dingalings
insp = inspect(meta.bind)
oid = insp.get_table_oid(table_name, schema)
self.assert_(isinstance(oid, int))
@@ -527,14 +524,13 @@ class ComponentReflectionTest(fixtures.TablesTest):
insp = inspect(meta.bind)
for tname, cname in [
- ('users', 'user_id'),
- ('email_addresses', 'address_id'),
- ('dingalings', 'dingaling_id'),
- ]:
+ ('users', 'user_id'),
+ ('email_addresses', 'address_id'),
+ ('dingalings', 'dingaling_id'),
+ ]:
cols = insp.get_columns(tname)
id_ = dict((c['name'], c) for c in cols)[cname]
assert id_.get('autoincrement', True)
-
__all__ = ('ComponentReflectionTest', 'HasTableTest')
diff --git a/lib/sqlalchemy/testing/suite/test_results.py b/lib/sqlalchemy/testing/suite/test_results.py
index 2fdab4d17..9ffaa6e04 100644
--- a/lib/sqlalchemy/testing/suite/test_results.py
+++ b/lib/sqlalchemy/testing/suite/test_results.py
@@ -15,13 +15,13 @@ class RowFetchTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('plain_pk', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50))
+ )
Table('has_dates', metadata,
- Column('id', Integer, primary_key=True),
- Column('today', DateTime)
- )
+ Column('id', Integer, primary_key=True),
+ Column('today', DateTime)
+ )
@classmethod
def insert_data(cls):
@@ -43,9 +43,9 @@ class RowFetchTest(fixtures.TablesTest):
def test_via_string(self):
row = config.db.execute(
- self.tables.plain_pk.select().\
- order_by(self.tables.plain_pk.c.id)
- ).first()
+ self.tables.plain_pk.select().
+ order_by(self.tables.plain_pk.c.id)
+ ).first()
eq_(
row['id'], 1
@@ -56,9 +56,9 @@ class RowFetchTest(fixtures.TablesTest):
def test_via_int(self):
row = config.db.execute(
- self.tables.plain_pk.select().\
- order_by(self.tables.plain_pk.c.id)
- ).first()
+ self.tables.plain_pk.select().
+ order_by(self.tables.plain_pk.c.id)
+ ).first()
eq_(
row[0], 1
@@ -69,9 +69,9 @@ class RowFetchTest(fixtures.TablesTest):
def test_via_col_object(self):
row = config.db.execute(
- self.tables.plain_pk.select().\
- order_by(self.tables.plain_pk.c.id)
- ).first()
+ self.tables.plain_pk.select().
+ order_by(self.tables.plain_pk.c.id)
+ ).first()
eq_(
row[self.tables.plain_pk.c.id], 1
@@ -83,15 +83,14 @@ class RowFetchTest(fixtures.TablesTest):
@requirements.duplicate_names_in_cursor_description
def test_row_with_dupe_names(self):
result = config.db.execute(
- select([self.tables.plain_pk.c.data,
- self.tables.plain_pk.c.data.label('data')]).\
- order_by(self.tables.plain_pk.c.id)
- )
+ select([self.tables.plain_pk.c.data,
+ self.tables.plain_pk.c.data.label('data')]).
+ order_by(self.tables.plain_pk.c.id)
+ )
row = result.first()
eq_(result.keys(), ['data', 'data'])
eq_(row, ('d1', 'd1'))
-
def test_row_w_scalar_select(self):
"""test that a scalar select as a column is returned as such
and that type conversion works OK.
@@ -124,12 +123,13 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
cls.tables.percent_table = Table('percent%table', metadata,
- Column("percent%", Integer),
- Column("spaces % more spaces", Integer),
- )
- cls.tables.lightweight_percent_table = sql.table('percent%table',
- sql.column("percent%"),
- sql.column("spaces % more spaces"),
+ Column("percent%", Integer),
+ Column(
+ "spaces % more spaces", Integer),
+ )
+ cls.tables.lightweight_percent_table = sql.table(
+ 'percent%table', sql.column("percent%"),
+ sql.column("spaces % more spaces")
)
def test_single_roundtrip(self):
@@ -152,8 +152,8 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
config.db.execute(
percent_table.insert(),
[{'percent%': 7, 'spaces % more spaces': 11},
- {'percent%': 9, 'spaces % more spaces': 10},
- {'percent%': 11, 'spaces % more spaces': 9}]
+ {'percent%': 9, 'spaces % more spaces': 10},
+ {'percent%': 11, 'spaces % more spaces': 9}]
)
self._assert_table()
@@ -162,10 +162,10 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
lightweight_percent_table = self.tables.lightweight_percent_table
for table in (
- percent_table,
- percent_table.alias(),
- lightweight_percent_table,
- lightweight_percent_table.alias()):
+ percent_table,
+ percent_table.alias(),
+ lightweight_percent_table,
+ lightweight_percent_table.alias()):
eq_(
list(
config.db.execute(
@@ -184,18 +184,18 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
list(
config.db.execute(
table.select().
- where(table.c['spaces % more spaces'].in_([9, 10])).
- order_by(table.c['percent%']),
+ where(table.c['spaces % more spaces'].in_([9, 10])).
+ order_by(table.c['percent%']),
)
),
- [
- (9, 10),
- (11, 9)
- ]
+ [
+ (9, 10),
+ (11, 9)
+ ]
)
- row = config.db.execute(table.select().\
- order_by(table.c['percent%'])).first()
+ row = config.db.execute(table.select().
+ order_by(table.c['percent%'])).first()
eq_(row['percent%'], 5)
eq_(row['spaces % more spaces'], 12)
@@ -211,9 +211,9 @@ class PercentSchemaNamesTest(fixtures.TablesTest):
eq_(
list(
config.db.execute(
- percent_table.\
- select().\
- order_by(percent_table.c['percent%'])
+ percent_table.
+ select().
+ order_by(percent_table.c['percent%'])
)
),
[(5, 15), (7, 15), (9, 15), (11, 15)]
diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py
index 3461b1e94..3f14ada05 100644
--- a/lib/sqlalchemy/testing/suite/test_select.py
+++ b/lib/sqlalchemy/testing/suite/test_select.py
@@ -21,12 +21,12 @@ class OrderByLabelTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table("some_table", metadata,
- Column('id', Integer, primary_key=True),
- Column('x', Integer),
- Column('y', Integer),
- Column('q', String(50)),
- Column('p', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer),
+ Column('q', String(50)),
+ Column('p', String(50))
+ )
@classmethod
def insert_data(cls):
@@ -86,15 +86,16 @@ class OrderByLabelTest(fixtures.TablesTest):
[(7, ), (5, ), (3, )]
)
+
class LimitOffsetTest(fixtures.TablesTest):
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table("some_table", metadata,
- Column('id', Integer, primary_key=True),
- Column('x', Integer),
- Column('y', Integer))
+ Column('id', Integer, primary_key=True),
+ Column('x', Integer),
+ Column('y', Integer))
@classmethod
def insert_data(cls):
@@ -157,8 +158,8 @@ class LimitOffsetTest(fixtures.TablesTest):
def test_bound_limit_offset(self):
table = self.tables.some_table
self._assert_result(
- select([table]).order_by(table.c.id).\
- limit(bindparam("l")).offset(bindparam("o")),
+ select([table]).order_by(table.c.id).
+ limit(bindparam("l")).offset(bindparam("o")),
[(2, 2, 3), (3, 3, 4)],
params={"l": 2, "o": 1}
)
diff --git a/lib/sqlalchemy/testing/suite/test_sequence.py b/lib/sqlalchemy/testing/suite/test_sequence.py
index 6bc2822fc..bbb4ba65c 100644
--- a/lib/sqlalchemy/testing/suite/test_sequence.py
+++ b/lib/sqlalchemy/testing/suite/test_sequence.py
@@ -7,6 +7,7 @@ from ... import Integer, String, Sequence, schema
from ..schema import Table, Column
+
class SequenceTest(fixtures.TablesTest):
__requires__ = ('sequences',)
__backend__ = True
@@ -16,15 +17,15 @@ class SequenceTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('seq_pk', metadata,
- Column('id', Integer, Sequence('tab_id_seq'), primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, Sequence('tab_id_seq'), primary_key=True),
+ Column('data', String(50))
+ )
Table('seq_opt_pk', metadata,
- Column('id', Integer, Sequence('tab_id_seq', optional=True),
- primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, Sequence('tab_id_seq', optional=True),
+ primary_key=True),
+ Column('data', String(50))
+ )
def test_insert_roundtrip(self):
config.db.execute(
@@ -62,7 +63,6 @@ class SequenceTest(fixtures.TablesTest):
[1]
)
-
def _assert_round_trip(self, table, conn):
row = conn.execute(table.select()).first()
eq_(
@@ -80,7 +80,7 @@ class HasSequenceTest(fixtures.TestBase):
testing.db.execute(schema.CreateSequence(s1))
try:
eq_(testing.db.dialect.has_sequence(testing.db,
- 'user_id_seq'), True)
+ 'user_id_seq'), True)
finally:
testing.db.execute(schema.DropSequence(s1))
@@ -89,8 +89,8 @@ class HasSequenceTest(fixtures.TestBase):
s1 = Sequence('user_id_seq', schema="test_schema")
testing.db.execute(schema.CreateSequence(s1))
try:
- eq_(testing.db.dialect.has_sequence(testing.db,
- 'user_id_seq', schema="test_schema"), True)
+ eq_(testing.db.dialect.has_sequence(
+ testing.db, 'user_id_seq', schema="test_schema"), True)
finally:
testing.db.execute(schema.DropSequence(s1))
@@ -101,7 +101,7 @@ class HasSequenceTest(fixtures.TestBase):
@testing.requires.schemas
def test_has_sequence_schemas_neg(self):
eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq',
- schema="test_schema"),
+ schema="test_schema"),
False)
@testing.requires.schemas
@@ -110,7 +110,7 @@ class HasSequenceTest(fixtures.TestBase):
testing.db.execute(schema.CreateSequence(s1))
try:
eq_(testing.db.dialect.has_sequence(testing.db, 'user_id_seq',
- schema="test_schema"),
+ schema="test_schema"),
False)
finally:
testing.db.execute(schema.DropSequence(s1))
@@ -124,5 +124,3 @@ class HasSequenceTest(fixtures.TestBase):
False)
finally:
testing.db.execute(schema.DropSequence(s1))
-
-
diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py
index 3a5134c96..230aeb1e9 100644
--- a/lib/sqlalchemy/testing/suite/test_types.py
+++ b/lib/sqlalchemy/testing/suite/test_types.py
@@ -5,7 +5,7 @@ from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
- Text, Numeric, Float, literal, Boolean
+ Text, Numeric, Float, literal, Boolean
from ..schema import Table, Column
from ... import testing
import decimal
@@ -28,9 +28,9 @@ class _LiteralRoundTripFixture(object):
for value in input_:
ins = t.insert().values(x=literal(value)).compile(
- dialect=testing.db.dialect,
- compile_kwargs=dict(literal_binds=True)
- )
+ dialect=testing.db.dialect,
+ compile_kwargs=dict(literal_binds=True)
+ )
testing.db.execute(ins)
for row in t.select().execute():
@@ -43,17 +43,17 @@ class _LiteralRoundTripFixture(object):
class _UnicodeFixture(_LiteralRoundTripFixture):
__requires__ = 'unicode_data',
- data = u("Alors vous imaginez ma surprise, au lever du jour, "\
- "quand une drôle de petite voix m’a réveillé. Elle "\
- "disait: « S’il vous plaît… dessine-moi un mouton! »")
+ data = u("Alors vous imaginez ma surprise, au lever du jour, "
+ "quand une drôle de petite voix m’a réveillé. Elle "
+ "disait: « S’il vous plaît… dessine-moi un mouton! »")
@classmethod
def define_tables(cls, metadata):
Table('unicode_table', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('unicode_data', cls.datatype),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('unicode_data', cls.datatype),
+ )
def test_round_trip(self):
unicode_table = self.tables.unicode_table
@@ -66,10 +66,10 @@ class _UnicodeFixture(_LiteralRoundTripFixture):
)
row = config.db.execute(
- select([
- unicode_table.c.unicode_data,
- ])
- ).first()
+ select([
+ unicode_table.c.unicode_data,
+ ])
+ ).first()
eq_(
row,
@@ -91,10 +91,10 @@ class _UnicodeFixture(_LiteralRoundTripFixture):
)
rows = config.db.execute(
- select([
- unicode_table.c.unicode_data,
- ])
- ).fetchall()
+ select([
+ unicode_table.c.unicode_data,
+ ])
+ ).fetchall()
eq_(
rows,
[(self.data, ) for i in range(3)]
@@ -110,8 +110,8 @@ class _UnicodeFixture(_LiteralRoundTripFixture):
{"unicode_data": u('')}
)
row = config.db.execute(
- select([unicode_table.c.unicode_data])
- ).first()
+ select([unicode_table.c.unicode_data])
+ ).first()
eq_(row, (u(''),))
def test_literal(self):
@@ -139,6 +139,7 @@ class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest):
def test_empty_strings_text(self):
self._test_empty_strings()
+
class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
__requires__ = 'text_type',
__backend__ = True
@@ -146,10 +147,10 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('text_table', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('text_data', Text),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('text_data', Text),
+ )
def test_text_roundtrip(self):
text_table = self.tables.text_table
@@ -159,8 +160,8 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
{"text_data": 'some text'}
)
row = config.db.execute(
- select([text_table.c.text_data])
- ).first()
+ select([text_table.c.text_data])
+ ).first()
eq_(row, ('some text',))
def test_text_empty_strings(self):
@@ -171,8 +172,8 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
{"text_data": ''}
)
row = config.db.execute(
- select([text_table.c.text_data])
- ).first()
+ select([text_table.c.text_data])
+ ).first()
eq_(row, ('',))
def test_literal(self):
@@ -186,6 +187,7 @@ class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
data = r'backslash one \ backslash two \\ end'
self._literal_round_trip(Text, [data], [data])
+
class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
@@ -194,7 +196,7 @@ class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
metadata = MetaData()
foo = Table('foo', metadata,
Column('one', String)
- )
+ )
foo.create(config.db)
foo.drop(config.db)
@@ -217,10 +219,10 @@ class _DateFixture(_LiteralRoundTripFixture):
@classmethod
def define_tables(cls, metadata):
Table('date_table', metadata,
- Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('date_data', cls.datatype),
- )
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('date_data', cls.datatype),
+ )
def test_round_trip(self):
date_table = self.tables.date_table
@@ -231,10 +233,10 @@ class _DateFixture(_LiteralRoundTripFixture):
)
row = config.db.execute(
- select([
- date_table.c.date_data,
- ])
- ).first()
+ select([
+ date_table.c.date_data,
+ ])
+ ).first()
compare = self.compare or self.data
eq_(row,
@@ -250,10 +252,10 @@ class _DateFixture(_LiteralRoundTripFixture):
)
row = config.db.execute(
- select([
- date_table.c.date_data,
- ])
- ).first()
+ select([
+ date_table.c.date_data,
+ ])
+ ).first()
eq_(row, (None,))
@testing.requires.datetime_literals
@@ -262,7 +264,6 @@ class _DateFixture(_LiteralRoundTripFixture):
self._literal_round_trip(self.datatype, [self.data], [compare])
-
class DateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime',
__backend__ = True
@@ -322,19 +323,22 @@ class DateHistoricTest(_DateFixture, fixtures.TablesTest):
class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
+
def test_literal(self):
self._literal_round_trip(Integer, [5], [5])
+
class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
@testing.provide_metadata
- def _do_test(self, type_, input_, output, filter_=None, check_scale=False):
+ def _do_test(self, type_, input_, output,
+ filter_=None, check_scale=False):
metadata = self.metadata
t = Table('t', metadata, Column('x', type_))
t.create()
- t.insert().execute([{'x':x} for x in input_])
+ t.insert().execute([{'x': x} for x in input_])
result = set([row[0] for row in t.select().execute()])
output = set(output)
@@ -348,7 +352,6 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
[str(x) for x in output],
)
-
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
def test_render_literal_numeric(self):
self._literal_round_trip(
@@ -369,17 +372,16 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
self._literal_round_trip(
Float(4),
[15.7563, decimal.Decimal("15.7563")],
- [15.7563,],
+ [15.7563, ],
filter_=lambda n: n is not None and round(n, 5) or None
)
-
@testing.requires.precision_generic_float_type
def test_float_custom_scale(self):
self._do_test(
Float(None, decimal_return_scale=7, asdecimal=True),
[15.7563827, decimal.Decimal("15.7563827")],
- [decimal.Decimal("15.7563827"),],
+ [decimal.Decimal("15.7563827"), ],
check_scale=True
)
@@ -421,7 +423,6 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
[decimal.Decimal("15.7563"), None],
)
-
def test_float_as_float(self):
self._do_test(
Float(precision=8),
@@ -430,7 +431,6 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
filter_=lambda n: n is not None and round(n, 5) or None
)
-
@testing.requires.precision_numerics_general
def test_precision_decimal(self):
numbers = set([
@@ -445,7 +445,6 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
numbers,
)
-
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal(self):
"""test exceedingly small decimals.
@@ -475,7 +474,6 @@ class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
numbers
)
-
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal_large(self):
"""test exceedingly large decimals.
@@ -526,10 +524,10 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('boolean_table', metadata,
- Column('id', Integer, primary_key=True, autoincrement=False),
- Column('value', Boolean),
- Column('unconstrained_value', Boolean(create_constraint=False)),
- )
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('value', Boolean),
+ Column('unconstrained_value', Boolean(create_constraint=False)),
+ )
def test_render_literal_bool(self):
self._literal_round_trip(
@@ -551,11 +549,11 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
)
row = config.db.execute(
- select([
- boolean_table.c.value,
- boolean_table.c.unconstrained_value
- ])
- ).first()
+ select([
+ boolean_table.c.value,
+ boolean_table.c.unconstrained_value
+ ])
+ ).first()
eq_(
row,
@@ -576,11 +574,11 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
)
row = config.db.execute(
- select([
- boolean_table.c.value,
- boolean_table.c.unconstrained_value
- ])
- ).first()
+ select([
+ boolean_table.c.value,
+ boolean_table.c.unconstrained_value
+ ])
+ ).first()
eq_(
row,
@@ -588,11 +586,9 @@ class BooleanTest(_LiteralRoundTripFixture, fixtures.TablesTest):
)
-
-
__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest',
- 'DateTest', 'DateTimeTest', 'TextTest',
- 'NumericTest', 'IntegerTest',
- 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
- 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest',
- 'DateHistoricTest', 'StringTest', 'BooleanTest')
+ 'DateTest', 'DateTimeTest', 'TextTest',
+ 'NumericTest', 'IntegerTest',
+ 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
+ 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest',
+ 'DateHistoricTest', 'StringTest', 'BooleanTest')
diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py
index 88dc95355..e4c61e74a 100644
--- a/lib/sqlalchemy/testing/suite/test_update_delete.py
+++ b/lib/sqlalchemy/testing/suite/test_update_delete.py
@@ -12,18 +12,18 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('plain_pk', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', String(50))
- )
+ Column('id', Integer, primary_key=True),
+ Column('data', String(50))
+ )
@classmethod
def insert_data(cls):
config.db.execute(
cls.tables.plain_pk.insert(),
[
- {"id":1, "data":"d1"},
- {"id":2, "data":"d2"},
- {"id":3, "data":"d3"},
+ {"id": 1, "data": "d1"},
+ {"id": 2, "data": "d2"},
+ {"id": 3, "data": "d3"},
]
)
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index 35557582e..fc8390a79 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -61,8 +61,8 @@ def round_decimal(value, prec):
# can also use shift() here but that is 2.6 only
return (value * decimal.Decimal("1" + "0" * prec)
- ).to_integral(decimal.ROUND_FLOOR) / \
- pow(10, prec)
+ ).to_integral(decimal.ROUND_FLOOR) / \
+ pow(10, prec)
class RandomSet(set):
@@ -138,7 +138,7 @@ def function_named(fn, name):
fn.__name__ = name
except TypeError:
fn = types.FunctionType(fn.__code__, fn.__globals__, name,
- fn.__defaults__, fn.__closure__)
+ fn.__defaults__, fn.__closure__)
return fn
@@ -196,6 +196,7 @@ def provide_metadata(fn, *args, **kw):
class adict(dict):
"""Dict keys available as attributes. Shadows."""
+
def __getattribute__(self, key):
try:
return self[key]
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 33d338e8f..b3314de6e 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -12,6 +12,7 @@ from .. import exc as sa_exc
from .. import util
import re
+
def testing_warn(msg, stacklevel=3):
"""Replaces sqlalchemy.util.warn during tests."""
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 2e61f595f..5236d0120 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -210,6 +210,7 @@ class Properties(object):
class OrderedProperties(Properties):
"""Provide a __getattr__/__setattr__ interface with an OrderedDict
as backing store."""
+
def __init__(self):
Properties.__init__(self, OrderedDict())
@@ -263,7 +264,6 @@ class OrderedDict(dict):
def __iter__(self):
return iter(self._list)
-
if py2k:
def values(self):
return [self[key] for key in self._list]
@@ -284,15 +284,15 @@ class OrderedDict(dict):
return [(key, self[key]) for key in self._list]
else:
def values(self):
- #return (self[key] for key in self)
+ # return (self[key] for key in self)
return (self[key] for key in self._list)
def keys(self):
- #return iter(self)
+ # return iter(self)
return iter(self._list)
def items(self):
- #return ((key, self[key]) for key in self)
+ # return ((key, self[key]) for key in self)
return ((key, self[key]) for key in self._list)
_debug_iter = False
@@ -304,15 +304,17 @@ class OrderedDict(dict):
for item in self._list:
yield item
assert len_ == len(self._list), \
- "Dictionary changed size during iteration"
+ "Dictionary changed size during iteration"
+
def values(self):
return (self[key] for key in self)
+
def keys(self):
return iter(self)
+
def items(self):
return ((key, self[key]) for key in self)
-
def __setitem__(self, key, object):
if key not in self:
try:
@@ -506,7 +508,7 @@ class IdentitySet(object):
if len(self) > len(other):
return False
for m in itertools_filterfalse(other._members.__contains__,
- iter(self._members.keys())):
+ iter(self._members.keys())):
return False
return True
@@ -527,7 +529,7 @@ class IdentitySet(object):
return False
for m in itertools_filterfalse(self._members.__contains__,
- iter(other._members.keys())):
+ iter(other._members.keys())):
return False
return True
@@ -668,7 +670,7 @@ class WeakSequence(object):
def __iter__(self):
return (obj for obj in
- (ref() for ref in self._storage) if obj is not None)
+ (ref() for ref in self._storage) if obj is not None)
def __getitem__(self, index):
try:
@@ -719,6 +721,7 @@ column_dict = dict
ordered_column_set = OrderedSet
populate_column_dict = PopulateDict
+
def unique_list(seq, hashfunc=None):
seen = {}
if not hashfunc:
@@ -757,12 +760,14 @@ class UniqueAppender(object):
def __iter__(self):
return iter(self.data)
+
def coerce_generator_arg(arg):
if len(arg) == 1 and isinstance(arg[0], types.GeneratorType):
return list(arg[0])
else:
return arg
+
def to_list(x, default=None):
if x is None:
return default
@@ -818,6 +823,7 @@ class LRUCache(dict):
recently used items.
"""
+
def __init__(self, capacity=100, threshold=.5):
self.capacity = capacity
self.threshold = threshold
@@ -854,8 +860,8 @@ class LRUCache(dict):
def _manage_size(self):
while len(self) > self.capacity + self.capacity * self.threshold:
by_counter = sorted(dict.values(self),
- key=operator.itemgetter(2),
- reverse=True)
+ key=operator.itemgetter(2),
+ reverse=True)
for item in by_counter[self.capacity:]:
try:
del self[item[0]]
@@ -927,6 +933,7 @@ class ThreadLocalRegistry(ScopedRegistry):
variable for storage.
"""
+
def __init__(self, createfunc):
self.createfunc = createfunc
self.registry = threading.local()
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index 35dca92ff..7f2238a13 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -42,13 +42,14 @@ else:
safe_kwarg = str
ArgSpec = collections.namedtuple("ArgSpec",
- ["args", "varargs", "keywords", "defaults"])
+ ["args", "varargs", "keywords", "defaults"])
if py3k:
import builtins
from inspect import getfullargspec as inspect_getfullargspec
- from urllib.parse import quote_plus, unquote_plus, parse_qsl, quote, unquote
+ from urllib.parse import (quote_plus, unquote_plus,
+ parse_qsl, quote, unquote)
import configparser
from io import StringIO
@@ -56,8 +57,8 @@ if py3k:
def inspect_getargspec(func):
return ArgSpec(
- *inspect_getfullargspec(func)[0:4]
- )
+ *inspect_getfullargspec(func)[0:4]
+ )
string_types = str,
binary_type = bytes
@@ -95,10 +96,11 @@ if py3k:
itertools_imap = map
from itertools import zip_longest
-
import base64
+
def b64encode(x):
return base64.b64encode(x).decode('ascii')
+
def b64decode(x):
return base64.b64decode(x.encode('ascii'))
@@ -115,6 +117,7 @@ else:
binary_type = str
text_type = unicode
int_types = int, long
+
def iterbytes(buf):
return (ord(byte) for byte in buf)
@@ -160,7 +163,6 @@ else:
from itertools import izip_longest as zip_longest
-
import time
if win32 or jython:
time_func = time.clock
@@ -186,7 +188,7 @@ if py3k:
reraise(type(exception), exception, tb=exc_tb, cause=exc_value)
else:
exec("def reraise(tp, value, tb=None, cause=None):\n"
- " raise tp, value, tb\n")
+ " raise tp, value, tb\n")
def raise_from_cause(exception, exc_info=None):
# not as nice as that of Py3K, but at least preserves
@@ -218,10 +220,9 @@ def with_metaclass(meta, *bases):
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
+
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
-
-
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index c9147db70..d48efbaaa 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -38,7 +38,7 @@ def deprecated(version, message=None, add_deprecation_to_docstring=True):
if add_deprecation_to_docstring:
header = ".. deprecated:: %s %s" % \
- (version, (message or ''))
+ (version, (message or ''))
else:
header = None
@@ -72,7 +72,7 @@ def pending_deprecation(version, message=None,
if add_deprecation_to_docstring:
header = ".. deprecated:: %s (pending) %s" % \
- (version, (message or ''))
+ (version, (message or ''))
else:
header = None
@@ -117,6 +117,7 @@ def _decorate_with_warning(func, wtype, message, docstring_header=None):
import textwrap
+
def _dedent_docstring(text):
split_text = text.split("\n", 1)
if len(split_text) == 1:
@@ -128,6 +129,7 @@ def _dedent_docstring(text):
else:
return textwrap.dedent(text)
+
def inject_docstring_text(doctext, injecttext, pos):
doctext = _dedent_docstring(doctext or "")
lines = doctext.split('\n')
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 7960bde7f..8d6fe5a28 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -22,6 +22,7 @@ import hashlib
from . import compat
from . import _collections
+
def md5_hex(x):
if compat.py3k:
x = x.encode('utf-8')
@@ -29,6 +30,7 @@ def md5_hex(x):
m.update(x)
return m.hexdigest()
+
class safe_reraise(object):
"""Reraise an exception after invoking some
handler code.
@@ -60,6 +62,7 @@ class safe_reraise(object):
self._exc_info = None # remove potential circular references
compat.reraise(type_, value, traceback)
+
def decode_slice(slc):
"""decode a slice object as sent to __getitem__.
@@ -73,12 +76,13 @@ def decode_slice(slc):
ret.append(x)
return tuple(ret)
+
def _unique_symbols(used, *bases):
used = set(used)
for base in bases:
pool = itertools.chain((base,),
compat.itertools_imap(lambda i: base + str(i),
- range(1000)))
+ range(1000)))
for sym in pool:
if sym not in used:
used.add(sym)
@@ -106,17 +110,19 @@ def %(name)s(%(args)s):
return %(target)s(%(fn)s, %(apply_kw)s)
""" % metadata
decorated = _exec_code_in_env(code,
- {targ_name: target, fn_name: fn},
- fn.__name__)
+ {targ_name: target, fn_name: fn},
+ fn.__name__)
decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
decorated.__wrapped__ = fn
return update_wrapper(decorated, fn)
return update_wrapper(decorate, target)
+
def _exec_code_in_env(code, env, fn_name):
exec(code, env)
return env[fn_name]
+
def public_factory(target, location):
"""Produce a wrapping function for the given cls or classmethod.
@@ -128,13 +134,13 @@ def public_factory(target, location):
fn = target.__init__
callable_ = target
doc = "Construct a new :class:`.%s` object. \n\n"\
- "This constructor is mirrored as a public API function; see :func:`~%s` "\
- "for a full usage and argument description." % (
- target.__name__, location, )
+ "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "for a full usage and argument description." % (
+ target.__name__, location, )
else:
fn = callable_ = target
doc = "This function is mirrored; see :func:`~%s` "\
- "for a description of arguments." % location
+ "for a description of arguments." % location
location_name = location.split(".")[-1]
spec = compat.inspect_getfullargspec(fn)
@@ -179,13 +185,13 @@ class PluginLoader(object):
pass
else:
for impl in pkg_resources.iter_entry_points(
- self.group, name):
+ self.group, name):
self.impls[name] = impl.load
return impl.load()
raise exc.NoSuchModuleError(
- "Can't load plugin: %s:%s" %
- (self.group, name))
+ "Can't load plugin: %s:%s" %
+ (self.group, name))
def register(self, name, modulepath, objname):
def load():
@@ -200,15 +206,15 @@ def get_cls_kwargs(cls, _set=None):
"""Return the full set of inherited kwargs for the given `cls`.
Probes a class's __init__ method, collecting all named arguments. If the
- __init__ defines a \**kwargs catch-all, then the constructor is presumed to
- pass along unrecognized keywords to its base classes, and the collection
- process is repeated recursively on each of the bases.
+ __init__ defines a \**kwargs catch-all, then the constructor is presumed
+ to pass along unrecognized keywords to its base classes, and the
+ collection process is repeated recursively on each of the bases.
Uses a subset of inspect.getargspec() to cut down on method overhead.
No anonymous tuple arguments please !
"""
- toplevel = _set == None
+ toplevel = _set is None
if toplevel:
_set = set()
@@ -233,7 +239,6 @@ def get_cls_kwargs(cls, _set=None):
return _set
-
try:
# TODO: who doesn't have this constant?
from inspect import CO_VARKEYWORDS
@@ -262,6 +267,7 @@ def get_func_kwargs(func):
return compat.inspect_getargspec(func)[0]
+
def get_callable_argspec(fn, no_self=False, _is_init=False):
"""Return the argument signature for any callable.
@@ -277,18 +283,19 @@ def get_callable_argspec(fn, no_self=False, _is_init=False):
if _is_init and no_self:
spec = compat.inspect_getargspec(fn)
return compat.ArgSpec(spec.args[1:], spec.varargs,
- spec.keywords, spec.defaults)
+ spec.keywords, spec.defaults)
else:
return compat.inspect_getargspec(fn)
elif inspect.ismethod(fn):
if no_self and (_is_init or fn.__self__):
spec = compat.inspect_getargspec(fn.__func__)
return compat.ArgSpec(spec.args[1:], spec.varargs,
- spec.keywords, spec.defaults)
+ spec.keywords, spec.defaults)
else:
return compat.inspect_getargspec(fn.__func__)
elif inspect.isclass(fn):
- return get_callable_argspec(fn.__init__, no_self=no_self, _is_init=True)
+ return get_callable_argspec(
+ fn.__init__, no_self=no_self, _is_init=True)
elif hasattr(fn, '__func__'):
return compat.inspect_getargspec(fn.__func__)
elif hasattr(fn, '__call__'):
@@ -299,6 +306,7 @@ def get_callable_argspec(fn, no_self=False, _is_init=False):
else:
raise TypeError("Can't inspect callable: %s" % fn)
+
def format_argspec_plus(fn, grouped=True):
"""Returns a dictionary of formatted, introspected function arguments.
@@ -346,7 +354,7 @@ def format_argspec_plus(fn, grouped=True):
if compat.py3k:
apply_pos = inspect.formatargspec(spec[0], spec[1],
- spec[2], None, spec[4])
+ spec[2], None, spec[4])
num_defaults = 0
if spec[3]:
num_defaults += len(spec[3])
@@ -366,7 +374,7 @@ def format_argspec_plus(fn, grouped=True):
defaulted_vals = ()
apply_kw = inspect.formatargspec(name_args, spec[1], spec[2],
- defaulted_vals,
+ defaulted_vals,
formatvalue=lambda x: '=' + x)
if grouped:
return dict(args=args, self_arg=self_arg,
@@ -393,7 +401,7 @@ def format_argspec_init(method, grouped=True):
return format_argspec_plus(method, grouped=grouped)
except TypeError:
args = (grouped and '(self, *args, **kwargs)'
- or 'self, *args, **kwargs')
+ or 'self, *args, **kwargs')
return dict(self_arg='self', args=args, apply_pos=args, apply_kw=args)
@@ -465,8 +473,8 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
if default_len:
kw_args.update([
(arg, default)
- for arg, default
- in zip(_args[-default_len:], defaults)
+ for arg, default
+ in zip(_args[-default_len:], defaults)
])
output = []
@@ -500,6 +508,7 @@ class portable_instancemethod(object):
to produce a serializable callable.
"""
+
def __init__(self, meth):
self.target = meth.__self__
self.name = meth.__name__
@@ -533,7 +542,7 @@ def class_hierarchy(cls):
if isinstance(c, types.ClassType):
continue
bases = (_ for _ in c.__bases__
- if _ not in hier and not isinstance(_, types.ClassType))
+ if _ not in hier and not isinstance(_, types.ClassType))
else:
bases = (_ for _ in c.__bases__ if _ not in hier)
@@ -545,7 +554,8 @@ def class_hierarchy(cls):
if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
continue
else:
- if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
+ if c.__module__ == '__builtin__' or not hasattr(
+ c, '__subclasses__'):
continue
for s in [_ for _ in c.__subclasses__() if _ not in hier]:
@@ -615,7 +625,8 @@ def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None,
def methods_equivalent(meth1, meth2):
"""Return True if the two methods are the same implementation."""
- return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
+ return getattr(meth1, '__func__', meth1) is getattr(
+ meth2, '__func__', meth2)
def as_interface(obj, cls=None, methods=None, required=None):
@@ -673,7 +684,7 @@ def as_interface(obj, cls=None, methods=None, required=None):
return obj
# No dict duck typing here.
- if not type(obj) is dict:
+ if not isinstance(obj, dict):
qualifier = complies is operator.gt and 'any of' or 'all of'
raise TypeError("%r does not implement %s: %s" % (
obj, qualifier, ', '.join(interface)))
@@ -702,6 +713,7 @@ def as_interface(obj, cls=None, methods=None, required=None):
class memoized_property(object):
"""A read-only @property that is only evaluated once."""
+
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
@@ -729,6 +741,7 @@ class memoized_instancemethod(object):
called with different arguments.
"""
+
def __init__(self, fget, doc=None):
self.fget = fget
self.__doc__ = doc or fget.__doc__
@@ -774,18 +787,19 @@ class group_expirable_memoized_property(object):
return memoized_instancemethod(fn)
-
def dependency_for(modulename):
def decorate(obj):
# TODO: would be nice to improve on this import silliness,
# unfortunately importlib doesn't work that great either
tokens = modulename.split(".")
- mod = compat.import_(".".join(tokens[0:-1]), globals(), locals(), tokens[-1])
+ mod = compat.import_(
+ ".".join(tokens[0:-1]), globals(), locals(), tokens[-1])
mod = getattr(mod, tokens[-1])
setattr(mod, obj.__name__, obj)
return obj
return decorate
+
class dependencies(object):
"""Apply imported dependencies as arguments to a function.
@@ -809,7 +823,7 @@ class dependencies(object):
for dep in deps:
tokens = dep.split(".")
self.import_deps.append(
- dependencies._importlater(
+ dependencies._importlater(
".".join(tokens[0:-1]),
tokens[-1]
)
@@ -834,8 +848,8 @@ class dependencies(object):
outer_spec = format_argspec_plus(spec, grouped=False)
code = 'lambda %(args)s: fn(%(apply_kw)s)' % {
- "args": outer_spec['args'],
- "apply_kw": inner_spec['apply_kw']
+ "args": outer_spec['args'],
+ "apply_kw": inner_spec['apply_kw']
}
decorated = eval(code, locals())
@@ -869,7 +883,6 @@ class dependencies(object):
self._il_addtl = addtl
dependencies._unresolved.add(self)
-
@property
def _full_path(self):
return self._il_path + "." + self._il_addtl
@@ -878,29 +891,29 @@ class dependencies(object):
def module(self):
if self in dependencies._unresolved:
raise ImportError(
- "importlater.resolve_all() hasn't "
- "been called (this is %s %s)"
- % (self._il_path, self._il_addtl))
+ "importlater.resolve_all() hasn't "
+ "been called (this is %s %s)"
+ % (self._il_path, self._il_addtl))
return getattr(self._initial_import, self._il_addtl)
def _resolve(self):
dependencies._unresolved.discard(self)
self._initial_import = compat.import_(
- self._il_path, globals(), locals(),
- [self._il_addtl])
+ self._il_path, globals(), locals(),
+ [self._il_addtl])
def __getattr__(self, key):
if key == 'module':
raise ImportError("Could not resolve module %s"
- % self._full_path)
+ % self._full_path)
try:
attr = getattr(self.module, key)
except AttributeError:
raise AttributeError(
- "Module %s has no attribute '%s'" %
- (self._full_path, key)
- )
+ "Module %s has no attribute '%s'" %
+ (self._full_path, key)
+ )
self.__dict__[key] = attr
return attr
@@ -945,7 +958,7 @@ def coerce_kw_type(kw, key, type_, flexi_bool=True):
when coercing to boolean.
"""
- if key in kw and type(kw[key]) is not type_ and kw[key] is not None:
+ if key in kw and not isinstance(kw[key], type_) and kw[key] is not None:
if type_ is bool and flexi_bool:
kw[key] = asbool(kw[key])
else:
@@ -1077,6 +1090,7 @@ class classproperty(property):
class hybridmethod(object):
"""Decorate a function as cls- or instance- level."""
+
def __init__(self, func, expr=None):
self.func = func
@@ -1198,6 +1212,7 @@ def only_once(fn):
once."""
once = [fn]
+
def go(*arg, **kw):
if once:
once_fn = once.pop()
@@ -1209,6 +1224,7 @@ def only_once(fn):
_SQLA_RE = re.compile(r'sqlalchemy/([a-z_]+/){0,2}[a-z_]+\.py')
_UNITTEST_RE = re.compile(r'unit(?:2|test2?/)')
+
def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE):
"""Chop extraneous lines off beginning and end of a traceback.
@@ -1216,7 +1232,8 @@ def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE):
a list of traceback lines as returned by ``traceback.format_stack()``
:param exclude_prefix:
- a regular expression object matching lines to skip at beginning of ``tb``
+ a regular expression object matching lines to skip at beginning of
+ ``tb``
:param exclude_suffix:
a regular expression object matching lines to skip at end of ``tb``
diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py
index 0296f05c1..796c6a33e 100644
--- a/lib/sqlalchemy/util/queue.py
+++ b/lib/sqlalchemy/util/queue.py
@@ -58,7 +58,6 @@ class Queue:
# a thread waiting to put is notified then.
self.not_full = threading.Condition(self.mutex)
-
def qsize(self):
"""Return the approximate size of the queue (not reliable!)."""
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index 76e041a9c..2bfcccc63 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -29,10 +29,10 @@ def sort_as_subsets(tuples, allitems):
if not output:
raise CircularDependencyError(
- "Circular dependency detected.",
- find_cycles(tuples, allitems),
- _gen_edges(edges)
- )
+ "Circular dependency detected.",
+ find_cycles(tuples, allitems),
+ _gen_edges(edges)
+ )
todo.difference_update(output)
yield output
@@ -91,7 +91,7 @@ def find_cycles(tuples, allitems):
def _gen_edges(edges):
return set([
- (right, left)
- for left in edges
- for right in edges[left]
- ])
+ (right, left)
+ for left in edges
+ for right in edges[left]
+ ])