summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2011-01-02 14:23:42 -0500
committerMike Bayer <mike_mp@zzzcomputing.com>2011-01-02 14:23:42 -0500
commit350aed3fdb9f1e73e69655e53f44ca6a91c196da (patch)
tree3d2a128667b5f6ca6d0b4e1f4865fc98aac6b60b /lib
parent71f92436bdc86f30e2c21d8f5244733601e8c39e (diff)
downloadsqlalchemy-350aed3fdb9f1e73e69655e53f44ca6a91c196da.tar.gz
- whitespace removal bonanza
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/__init__.py3
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py10
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py14
-rw-r--r--lib/sqlalchemy/connectors/zxJDBC.py12
-rw-r--r--lib/sqlalchemy/dialects/access/base.py4
-rw-r--r--lib/sqlalchemy/dialects/firebird/__init__.py6
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py24
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py8
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py2
-rw-r--r--lib/sqlalchemy/dialects/maxdb/base.py12
-rw-r--r--lib/sqlalchemy/dialects/maxdb/sapdb.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py104
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py8
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py4
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py8
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py38
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py160
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py22
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py26
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py4
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py152
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py86
-rw-r--r--lib/sqlalchemy/dialects/postgres.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py114
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py10
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py16
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py42
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py18
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py40
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py6
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py2
-rw-r--r--lib/sqlalchemy/dialects/type_migration_guidelines.txt34
-rw-r--r--lib/sqlalchemy/engine/__init__.py10
-rw-r--r--lib/sqlalchemy/engine/base.py344
-rw-r--r--lib/sqlalchemy/engine/ddl.py14
-rw-r--r--lib/sqlalchemy/engine/default.py150
-rw-r--r--lib/sqlalchemy/engine/reflection.py54
-rw-r--r--lib/sqlalchemy/engine/strategies.py22
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py28
-rw-r--r--lib/sqlalchemy/engine/url.py10
-rw-r--r--lib/sqlalchemy/event.py106
-rw-r--r--lib/sqlalchemy/events.py112
-rw-r--r--lib/sqlalchemy/exc.py4
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py16
-rw-r--r--lib/sqlalchemy/ext/compiler.py34
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py146
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py22
-rw-r--r--lib/sqlalchemy/ext/hybrid.py32
-rw-r--r--lib/sqlalchemy/ext/mutable.py110
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py10
-rw-r--r--lib/sqlalchemy/ext/serializer.py26
-rw-r--r--lib/sqlalchemy/ext/sqlsoup.py112
-rw-r--r--lib/sqlalchemy/interfaces.py88
-rw-r--r--lib/sqlalchemy/log.py42
-rw-r--r--lib/sqlalchemy/orm/__init__.py132
-rw-r--r--lib/sqlalchemy/orm/attributes.py203
-rw-r--r--lib/sqlalchemy/orm/collections.py24
-rw-r--r--lib/sqlalchemy/orm/dependency.py180
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py98
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py96
-rw-r--r--lib/sqlalchemy/orm/dynamic.py14
-rw-r--r--lib/sqlalchemy/orm/events.py246
-rw-r--r--lib/sqlalchemy/orm/exc.py12
-rw-r--r--lib/sqlalchemy/orm/identity.py66
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py102
-rw-r--r--lib/sqlalchemy/orm/interfaces.py59
-rw-r--r--lib/sqlalchemy/orm/mapper.py254
-rw-r--r--lib/sqlalchemy/orm/properties.py112
-rw-r--r--lib/sqlalchemy/orm/query.py306
-rw-r--r--lib/sqlalchemy/orm/scoping.py10
-rw-r--r--lib/sqlalchemy/orm/session.py160
-rw-r--r--lib/sqlalchemy/orm/state.py124
-rw-r--r--lib/sqlalchemy/orm/strategies.py244
-rw-r--r--lib/sqlalchemy/orm/sync.py6
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py120
-rw-r--r--lib/sqlalchemy/orm/util.py38
-rw-r--r--lib/sqlalchemy/pool.py82
-rw-r--r--lib/sqlalchemy/processors.py2
-rw-r--r--lib/sqlalchemy/schema.py398
-rw-r--r--lib/sqlalchemy/sql/compiler.py212
-rw-r--r--lib/sqlalchemy/sql/expression.py414
-rw-r--r--lib/sqlalchemy/sql/functions.py2
-rw-r--r--lib/sqlalchemy/sql/operators.py4
-rw-r--r--lib/sqlalchemy/sql/util.py146
-rw-r--r--lib/sqlalchemy/sql/visitors.py52
-rw-r--r--lib/sqlalchemy/types.py252
-rw-r--r--lib/sqlalchemy/util/__init__.py2
-rw-r--r--lib/sqlalchemy/util/_collections.py62
-rw-r--r--lib/sqlalchemy/util/compat.py4
-rw-r--r--lib/sqlalchemy/util/deprecations.py8
-rw-r--r--lib/sqlalchemy/util/langhelpers.py58
-rw-r--r--lib/sqlalchemy/util/topological.py6
96 files changed, 3400 insertions, 3403 deletions
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 86fced34f..e9976cd13 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -114,7 +114,7 @@ from sqlalchemy.engine import create_engine, engine_from_config
__all__ = sorted(name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)))
-
+
__version__ = '0.7b1'
del inspect, sys
diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py
index 0c5730450..340c5b8fb 100644
--- a/lib/sqlalchemy/connectors/__init__.py
+++ b/lib/sqlalchemy/connectors/__init__.py
@@ -7,5 +7,4 @@
class Connector(object):
pass
-
- \ No newline at end of file
+
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index 63c8c1d53..f467234ca 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -26,13 +26,13 @@ from sqlalchemy.connectors import Connector
class MxODBCConnector(Connector):
driver='mxodbc'
-
+
supports_sane_multi_rowcount = False
supports_unicode_statements = False
supports_unicode_binds = False
-
+
supports_native_decimal = True
-
+
@classmethod
def dbapi(cls):
# this classmethod will normally be replaced by an instance
@@ -67,7 +67,7 @@ class MxODBCConnector(Connector):
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
conn.errorhandler = self._error_handler()
return connect
-
+
def _error_handler(self):
""" Return a handler that adjusts mxODBC's raised Warnings to
emit Python standard warnings.
@@ -97,7 +97,7 @@ class MxODBCConnector(Connector):
The arg 'errorhandler' is not used by SQLAlchemy and will
not be populated.
-
+
"""
opts = url.translate_connect_args(username='user')
opts.update(url.query)
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index 4f2aa390f..c66a8a8ae 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -20,15 +20,15 @@ class PyODBCConnector(Connector):
supports_unicode_statements = supports_unicode
supports_native_decimal = True
default_paramstyle = 'named'
-
+
# for non-DSN connections, this should
# hold the desired driver name
pyodbc_driver_name = None
-
+
# will be set to True after initialize()
# if the freetds.so is detected
freetds = False
-
+
@classmethod
def dbapi(cls):
return __import__('pyodbc')
@@ -36,7 +36,7 @@ class PyODBCConnector(Connector):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
-
+
keys = opts
query = url.query
@@ -80,7 +80,7 @@ class PyODBCConnector(Connector):
connectors.extend(['%s=%s' % (k,v) for k,v in keys.iteritems()])
return [[";".join (connectors)], connect_args]
-
+
def is_disconnect(self, e):
if isinstance(e, self.dbapi.ProgrammingError):
return "The cursor's connection has been closed." in str(e) or \
@@ -93,7 +93,7 @@ class PyODBCConnector(Connector):
def initialize(self, connection):
# determine FreeTDS first. can't issue SQL easily
# without getting unicode_statements/binds set up.
-
+
pyodbc = self.dbapi
dbapi_con = connection.connection
@@ -108,7 +108,7 @@ class PyODBCConnector(Connector):
self.supports_unicode_statements = not self.freetds
self.supports_unicode_binds = not self.freetds
# end Py2K
-
+
# run other initialization which asks for user name, etc.
super(PyODBCConnector, self).initialize(connection)
diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py
index 941dd17ae..a9ff5ec95 100644
--- a/lib/sqlalchemy/connectors/zxJDBC.py
+++ b/lib/sqlalchemy/connectors/zxJDBC.py
@@ -9,18 +9,18 @@ from sqlalchemy.connectors import Connector
class ZxJDBCConnector(Connector):
driver = 'zxjdbc'
-
+
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
-
+
supports_unicode_binds = True
supports_unicode_statements = sys.version > '2.5.0+'
description_encoding = None
default_paramstyle = 'qmark'
-
+
jdbc_db_name = None
jdbc_driver_name = None
-
+
@classmethod
def dbapi(cls):
from com.ziclix.python.sql import zxJDBC
@@ -29,14 +29,14 @@ class ZxJDBCConnector(Connector):
def _driver_kwargs(self):
"""Return kw arg dict to be sent to connect()."""
return {}
-
+
def _create_jdbc_url(self, url):
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
url.port is not None
and ':%s' % url.port or '',
url.database)
-
+
def create_connect_args(self, url):
opts = self._driver_kwargs()
opts.update(url.query)
diff --git a/lib/sqlalchemy/dialects/access/base.py b/lib/sqlalchemy/dialects/access/base.py
index dfeaf23c2..0dd09cebf 100644
--- a/lib/sqlalchemy/dialects/access/base.py
+++ b/lib/sqlalchemy/dialects/access/base.py
@@ -153,7 +153,7 @@ class AccessDialect(default.DefaultDialect):
supports_sane_multi_rowcount = False
ported_sqla_06 = False
-
+
def type_descriptor(self, typeobj):
newobj = types.adapt_type(typeobj, self.colspecs)
return newobj
@@ -341,7 +341,7 @@ class AccessCompiler(compiler.SQLCompiler):
'dow': 'w',
'week': 'ww'
})
-
+
def visit_select_precolumns(self, select):
"""Access puts TOP, it's version of LIMIT here """
s = select.distinct and "DISTINCT " or ""
diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py
index 1fdedbafa..e87b5bb5c 100644
--- a/lib/sqlalchemy/dialects/firebird/__init__.py
+++ b/lib/sqlalchemy/dialects/firebird/__init__.py
@@ -12,11 +12,11 @@ from sqlalchemy.dialects.firebird.base import \
SMALLINT, BIGINT, FLOAT, FLOAT, DATE, TIME, \
TEXT, NUMERIC, FLOAT, TIMESTAMP, VARCHAR, CHAR, BLOB,\
dialect
-
+
__all__ = (
'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
'dialect'
)
-
-
+
+
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index d6939777b..de880171f 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -194,7 +194,7 @@ class FBTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARCHAR(self, type_):
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_)
return self._extend_string(type_, basic)
-
+
class FBCompiler(sql.compiler.SQLCompiler):
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index a2624534c..ad8d44262 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -13,7 +13,7 @@ The connection URL is of the form
Kinterbasedb backend specific keyword arguments are:
-* type_conv - select the kind of mapping done on the types: by default
+* type_conv - select the kind of mapping done on the types: by default
SQLAlchemy uses 200 with Unicode, datetime and decimal support (see
details__).
@@ -34,11 +34,11 @@ Kinterbasedb backend specific keyword arguments are:
SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
per-execution basis using the `enable_rowcount` option with
:meth:`execution_options()`::
-
+
conn = engine.connect().execution_options(enable_rowcount=True)
r = conn.execute(stmt)
print r.rowcount
-
+
__ http://sourceforge.net/projects/kinterbasdb
__ http://firebirdsql.org/index.php?op=devel&sub=python
__ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
@@ -66,23 +66,23 @@ class FBExecutionContext_kinterbasdb(FBExecutionContext):
return self.cursor.rowcount
else:
return -1
-
+
class FBDialect_kinterbasdb(FBDialect):
driver = 'kinterbasdb'
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
execution_ctx_cls = FBExecutionContext_kinterbasdb
-
+
supports_native_decimal = True
-
+
colspecs = util.update_copy(
FBDialect.colspecs,
{
sqltypes.Numeric:_FBNumeric_kinterbasdb
}
-
+
)
-
+
def __init__(self, type_conv=200, concurrency_level=1,
enable_rowcount=True, **kwargs):
super(FBDialect_kinterbasdb, self).__init__(**kwargs)
@@ -91,7 +91,7 @@ class FBDialect_kinterbasdb(FBDialect):
self.concurrency_level = concurrency_level
if enable_rowcount:
self.supports_sane_rowcount = True
-
+
@classmethod
def dbapi(cls):
k = __import__('kinterbasdb')
@@ -103,13 +103,13 @@ class FBDialect_kinterbasdb(FBDialect):
opts['host'] = "%s/%s" % (opts['host'], opts['port'])
del opts['port']
opts.update(url.query)
-
+
util.coerce_kw_type(opts, 'type_conv', int)
-
+
type_conv = opts.pop('type_conv', self.type_conv)
concurrency_level = opts.pop('concurrency_level',
self.concurrency_level)
-
+
if self.dbapi is not None:
initialized = getattr(self.dbapi, 'initialized', None)
if initialized is None:
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
index a97b445a3..1ea8d4e39 100644
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ b/lib/sqlalchemy/dialects/informix/base.py
@@ -218,7 +218,7 @@ class InformixDialect(default.DefaultDialect):
name = 'informix'
max_identifier_length = 128 # adjusts at runtime based on server version
-
+
type_compiler = InfoTypeCompiler
statement_compiler = InfoSQLCompiler
ddl_compiler = InfoDDLCompiler
@@ -232,13 +232,13 @@ class InformixDialect(default.DefaultDialect):
def initialize(self, connection):
super(InformixDialect, self).initialize(connection)
-
+
# http://www.querix.com/support/knowledge-base/error_number_message/error_200
if self.server_version_info < (9, 2):
self.max_identifier_length = 18
else:
self.max_identifier_length = 128
-
+
def do_begin(self, connection):
cu = connection.cursor()
cu.execute('SET LOCK MODE TO WAIT')
@@ -327,7 +327,7 @@ class InformixDialect(default.DefaultDialect):
util.warn("Did not recognize type '%s' of column '%s'" %
(coltype, name))
coltype = sqltypes.NULLTYPE
-
+
column_info = dict(name=name, type=coltype, nullable=not not_nullable,
default=default, autoincrement=autoincrement,
primary_key=primary_key)
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
index 150a69d7f..c81983816 100644
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ b/lib/sqlalchemy/dialects/informix/informixdb.py
@@ -10,7 +10,7 @@ Support for the informixdb DBAPI.
informixdb is available at:
http://informixdb.sourceforge.net/
-
+
Connecting
^^^^^^^^^^
diff --git a/lib/sqlalchemy/dialects/maxdb/base.py b/lib/sqlalchemy/dialects/maxdb/base.py
index 4ca81b24c..abc7ff10b 100644
--- a/lib/sqlalchemy/dialects/maxdb/base.py
+++ b/lib/sqlalchemy/dialects/maxdb/base.py
@@ -323,7 +323,7 @@ class MaxDBTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return "LONG BYTE"
-
+
def visit_numeric(self, type_):
if type_.scale and type_.precision:
return 'FIXED(%s, %s)' % (type_.precision, type_.scale)
@@ -331,10 +331,10 @@ class MaxDBTypeCompiler(compiler.GenericTypeCompiler):
return 'FIXED(%s)' % type_.precision
else:
return 'INTEGER'
-
+
def visit_BOOLEAN(self, type_):
return "BOOLEAN"
-
+
colspecs = {
sqltypes.Numeric: MaxNumeric,
sqltypes.DateTime: MaxTimestamp,
@@ -480,7 +480,7 @@ class MaxDBCompiler(compiler.SQLCompiler):
def visit_mod(self, binary, **kw):
return "mod(%s, %s)" % \
(self.process(binary.left), self.process(binary.right))
-
+
def default_from(self):
return ' FROM DUAL'
@@ -768,7 +768,7 @@ class MaxDBDDLCompiler(compiler.DDLCompiler):
Defaults to False. If true, sets NOCACHE.
"""
sequence = create.element
-
+
if (not sequence.optional and
(not self.checkfirst or
not self.dialect.has_sequence(self.connection, sequence.name))):
@@ -825,7 +825,7 @@ class MaxDBDialect(default.DefaultDialect):
colspecs = colspecs
ischema_names = ischema_names
-
+
# MaxDB-specific
datetimeformat = 'internal'
diff --git a/lib/sqlalchemy/dialects/maxdb/sapdb.py b/lib/sqlalchemy/dialects/maxdb/sapdb.py
index 4fd9dd418..da04d809f 100644
--- a/lib/sqlalchemy/dialects/maxdb/sapdb.py
+++ b/lib/sqlalchemy/dialects/maxdb/sapdb.py
@@ -8,7 +8,7 @@ from sqlalchemy.dialects.maxdb.base import MaxDBDialect
class MaxDBDialect_sapdb(MaxDBDialect):
driver = 'sapdb'
-
+
@classmethod
def dbapi(cls):
from sapdb import dbapi as _dbapi
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index d18880931..355214d89 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -31,7 +31,7 @@ class MSDialect_adodbapi(MSDialect):
supports_unicode = sys.maxunicode == 65535
supports_unicode_statements = True
driver = 'adodbapi'
-
+
@classmethod
def import_dbapi(cls):
import adodbapi as module
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index dda63080d..290cd1019 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -18,7 +18,7 @@ Auto Increment Behavior
``schema.Sequence()`` objects. In other words::
from sqlalchemy import Table, Integer, Sequence, Column
-
+
Table('test', metadata,
Column('id', Integer,
Sequence('blah',100,10), primary_key=True),
@@ -261,7 +261,7 @@ class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime):
class DATETIME2(_DateTimeBase, sqltypes.DateTime):
__visit_name__ = 'DATETIME2'
-
+
def __init__(self, precision=None, **kw):
super(DATETIME2, self).__init__(**kw)
self.precision = precision
@@ -270,7 +270,7 @@ class DATETIME2(_DateTimeBase, sqltypes.DateTime):
# TODO: is this not an Interval ?
class DATETIMEOFFSET(sqltypes.TypeEngine):
__visit_name__ = 'DATETIMEOFFSET'
-
+
def __init__(self, precision=None, **kwargs):
self.precision = precision
@@ -298,7 +298,7 @@ class NTEXT(_StringType, sqltypes.UnicodeText):
characters."""
__visit_name__ = 'NTEXT'
-
+
def __init__(self, length=None, collation=None, **kw):
"""Construct a NTEXT.
@@ -405,7 +405,7 @@ class IMAGE(sqltypes.LargeBinary):
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
-
+
class MONEY(sqltypes.TypeEngine):
__visit_name__ = 'MONEY'
@@ -487,13 +487,13 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
collation = 'COLLATE %s' % type_.collation
else:
collation = None
-
+
if not length:
length = type_.length
-
+
if length:
spec = spec + "(%s)" % length
-
+
return ' '.join([c for c in (spec, collation)
if c is not None])
@@ -535,10 +535,10 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
def visit_unicode(self, type_):
return self.visit_NVARCHAR(type_)
-
+
def visit_unicode_text(self, type_):
return self.visit_NTEXT(type_)
-
+
def visit_NTEXT(self, type_):
return self._extend("NTEXT", type_)
@@ -570,7 +570,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self.visit_DATETIME(type_)
else:
return self.visit_TIME(type_)
-
+
def visit_large_binary(self, type_):
return self.visit_IMAGE(type_)
@@ -600,7 +600,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
_select_lastrowid = False
_result_proxy = None
_lastrowid = None
-
+
def pre_exec(self):
"""Activate IDENTITY_INSERT if needed."""
@@ -608,25 +608,25 @@ class MSExecutionContext(default.DefaultExecutionContext):
tbl = self.compiled.statement.table
seq_column = tbl._autoincrement_column
insert_has_sequence = seq_column is not None
-
+
if insert_has_sequence:
self._enable_identity_insert = \
seq_column.key in self.compiled_parameters[0]
else:
self._enable_identity_insert = False
-
+
self._select_lastrowid = insert_has_sequence and \
not self.compiled.returning and \
not self._enable_identity_insert and \
not self.executemany
-
+
if self._enable_identity_insert:
self.cursor.execute("SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl))
def post_exec(self):
"""Disable IDENTITY_INSERT if enabled."""
-
+
if self._select_lastrowid:
if self.dialect.use_scope_identity:
self.cursor.execute(
@@ -640,17 +640,17 @@ class MSExecutionContext(default.DefaultExecutionContext):
if (self.isinsert or self.isupdate or self.isdelete) and \
self.compiled.returning:
self._result_proxy = base.FullyBufferedResultProxy(self)
-
+
if self._enable_identity_insert:
self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
+ "SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.
format_table(self.compiled.statement.table)
)
-
+
def get_lastrowid(self):
return self._lastrowid
-
+
def handle_dbapi_exception(self, e):
if self._enable_identity_insert:
try:
@@ -670,7 +670,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
class MSSQLCompiler(compiler.SQLCompiler):
returning_precedes_values = True
-
+
extract_map = util.update_copy(
compiler.SQLCompiler.extract_map,
{
@@ -686,31 +686,31 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
-
+
def visit_current_date_func(self, fn, **kw):
return "GETDATE()"
-
+
def visit_length_func(self, fn, **kw):
return "LEN%s" % self.function_argspec(fn, **kw)
-
+
def visit_char_length_func(self, fn, **kw):
return "LEN%s" % self.function_argspec(fn, **kw)
-
+
def visit_concat_op(self, binary, **kw):
return "%s + %s" % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw))
-
+
def visit_match_op(self, binary, **kw):
return "CONTAINS (%s, %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw))
-
+
def get_select_precolumns(self, select):
""" MS-SQL puts TOP, it's version of LIMIT here """
if select._distinct or select._limit:
s = select._distinct and "DISTINCT " or ""
-
+
# ODBC drivers and possibly others
# don't support bind params in the SELECT clause on SQL Server.
# so have to use literal here.
@@ -743,7 +743,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" \
% orderby).label("mssql_rn")
).order_by(None).alias()
-
+
mssql_rn = sql.column('mssql_rn')
limitselect = sql.select([c for c in select.c if
c.key!='mssql_rn'])
@@ -853,7 +853,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
target = stmt.table.alias("inserted")
else:
target = stmt.table.alias("deleted")
-
+
adapter = sql_util.ClauseAdapter(target)
def col_label(col):
adapted = adapter.traverse(col)
@@ -861,7 +861,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
return adapted.label(c.key)
else:
return self.label_select_column(None, adapted, asfrom=False)
-
+
columns = [
self.process(
col_label(c),
@@ -896,10 +896,10 @@ class MSSQLCompiler(compiler.SQLCompiler):
class MSSQLStrictCompiler(MSSQLCompiler):
"""A subclass of MSSQLCompiler which disables the usage of bind
parameters where not allowed natively by MS-SQL.
-
+
A dialect may use this compiler on a platform where native
binds are used.
-
+
"""
ansi_bind_rules = True
@@ -927,9 +927,9 @@ class MSSQLStrictCompiler(MSSQLCompiler):
format acceptable to MSSQL. That seems to be the
so-called ODBC canonical date format which looks
like this:
-
+
yyyy-mm-dd hh:mi:ss.mmm(24h)
-
+
For other data types, call the base class implementation.
"""
# datetime and date are both subclasses of datetime.date
@@ -950,12 +950,12 @@ class MSDDLCompiler(compiler.DDLCompiler):
colspec += " NOT NULL"
else:
colspec += " NULL"
-
+
if column.table is None:
raise exc.InvalidRequestError(
"mssql requires Table-bound columns "
"in order to generate DDL")
-
+
seq_col = column.table._autoincrement_column
# install a IDENTITY Sequence if we have an implicit IDENTITY column
@@ -1015,13 +1015,13 @@ class MSDialect(default.DefaultDialect):
}
ischema_names = ischema_names
-
+
supports_native_boolean = False
supports_unicode_binds = True
postfetch_lastrowid = True
-
+
server_version_info = ()
-
+
statement_compiler = MSSQLCompiler
ddl_compiler = MSDDLCompiler
type_compiler = MSTypeCompiler
@@ -1039,7 +1039,7 @@ class MSDialect(default.DefaultDialect):
self.max_identifier_length = int(max_identifier_length or 0) or \
self.max_identifier_length
super(MSDialect, self).__init__(**opts)
-
+
def do_savepoint(self, connection, name):
util.warn("Savepoint support in mssql is experimental and "
"may lead to data loss.")
@@ -1048,7 +1048,7 @@ class MSDialect(default.DefaultDialect):
def do_release_savepoint(self, connection, name):
pass
-
+
def initialize(self, connection):
super(MSDialect, self).initialize(connection)
if self.server_version_info[0] not in range(8, 17):
@@ -1064,7 +1064,7 @@ class MSDialect(default.DefaultDialect):
if self.server_version_info >= MS_2005_VERSION and \
'implicit_returning' not in self.__dict__:
self.implicit_returning = True
-
+
def _get_default_schema_name(self, connection):
user_name = connection.scalar("SELECT user_name() as user_name;")
if user_name is not None:
@@ -1138,7 +1138,7 @@ class MSDialect(default.DefaultDialect):
# below MS 2005
if self.server_version_info < MS_2005_VERSION:
return []
-
+
current_schema = schema or self.default_schema_name
full_tname = "%s.%s" % (current_schema, tablename)
@@ -1186,7 +1186,7 @@ class MSDialect(default.DefaultDialect):
for row in rp:
if row['index_id'] in indexes:
indexes[row['index_id']]['column_names'].append(row['name'])
-
+
return indexes.values()
@reflection.cache
@@ -1315,7 +1315,7 @@ class MSDialect(default.DefaultDialect):
# the constrained column
C = ischema.key_constraints.alias('C')
# information_schema.constraint_column_usage:
- # the referenced column
+ # the referenced column
R = ischema.key_constraints.alias('R')
# Primary key constraints
@@ -1337,7 +1337,7 @@ class MSDialect(default.DefaultDialect):
#information_schema.referential_constraints
RR = ischema.ref_constraints
# information_schema.table_constraints
- TC = ischema.constraints
+ TC = ischema.constraints
# information_schema.constraint_column_usage:
# the constrained column
C = ischema.key_constraints.alias('C')
@@ -1361,12 +1361,12 @@ class MSDialect(default.DefaultDialect):
order_by = [
RR.c.constraint_name,
R.c.ordinal_position])
-
+
# group rows by constraint ID, to handle multi-column FKs
fkeys = []
fknm, scols, rcols = (None, [], [])
-
+
def fkey_rec():
return {
'name' : None,
@@ -1377,7 +1377,7 @@ class MSDialect(default.DefaultDialect):
}
fkeys = util.defaultdict(fkey_rec)
-
+
for r in connection.execute(s).fetchall():
scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r
@@ -1388,11 +1388,11 @@ class MSDialect(default.DefaultDialect):
if schema is not None or current_schema != rschema:
rec['referred_schema'] = rschema
-
+
local_cols, remote_cols = \
rec['constrained_columns'],\
rec['referred_columns']
-
+
local_cols.append(scol)
remote_cols.append(rcol)
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 5806ebfa8..87dd0a167 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -13,12 +13,12 @@ ischema = MetaData()
class CoerceUnicode(TypeDecorator):
impl = Unicode
-
+
def process_bind_param(self, value, dialect):
if isinstance(value, str):
value = value.decode(dialect.encoding)
return value
-
+
schemata = Table("SCHEMATA", ischema,
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
@@ -74,8 +74,8 @@ ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
# TODO: is CATLOG misspelled ?
Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
- key="unique_constraint_catalog"),
-
+ key="unique_constraint_catalog"),
+
Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
key="unique_constraint_schema"),
Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ba695ef08..6a830509a 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -20,7 +20,7 @@ Connecting
Connection is via DSN::
mssql+mxodbc://<username>:<password>@<dsnname>
-
+
Execution Modes
~~~~~~~~~~~~~~~
@@ -72,7 +72,7 @@ class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
# won't work.
class MSDialect_mxodbc(MxODBCConnector, MSDialect):
-
+
# TODO: may want to use this only if FreeTDS is not in use,
# since FreeTDS doesn't seem to use native binds.
statement_compiler = MSSQLStrictCompiler
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index aa3bf45d2..192e63366 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -12,10 +12,10 @@ This dialect supports pymssql 1.0 and greater.
pymssql is available at:
http://pymssql.sourceforge.net/
-
+
Connecting
^^^^^^^^^^
-
+
Sample connect string::
mssql+pymssql://<username>:<password>@<freetds_name>
@@ -53,7 +53,7 @@ class MSDialect_pymssql(MSDialect):
supports_sane_rowcount = False
max_identifier_length = 30
driver = 'pymssql'
-
+
colspecs = util.update_copy(
MSDialect.colspecs,
{
@@ -67,7 +67,7 @@ class MSDialect_pymssql(MSDialect):
# pymmsql doesn't have a Binary method. we use string
# TODO: monkeypatching here is less than ideal
module.Binary = str
-
+
client_ver = tuple(int(x) for x in module.__version__.split("."))
if client_ver < (1, ):
util.warn("The pymssql dialect expects at least "
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 90a43889e..9b88dce2a 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -86,15 +86,15 @@ import decimal
class _MSNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
-
+
This is the only method that is proven to work with Pyodbc+MSSQL
without crashing (floats can be used but seem to cause sporadic
crashes).
-
+
"""
def bind_processor(self, dialect):
-
+
super_process = super(_MSNumeric_pyodbc, self).\
bind_processor(dialect)
@@ -104,7 +104,7 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
def process(value):
if self.asdecimal and \
isinstance(value, decimal.Decimal):
-
+
adjusted = value.adjusted()
if adjusted < 0:
return self._small_dec_to_string(value)
@@ -116,10 +116,10 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
else:
return value
return process
-
+
# these routines needed for older versions of pyodbc.
# as of 2.1.8 this logic is integrated.
-
+
def _small_dec_to_string(self, value):
return "%s0.%s%s" % (
(value < 0 and '-' or ''),
@@ -147,24 +147,24 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
"".join(
[str(s) for s in _int][0:value.adjusted() + 1]))
return result
-
-
+
+
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
-
+
def pre_exec(self):
"""where appropriate, issue "select scope_identity()" in the same
statement.
-
+
Background on why "scope_identity()" is preferable to "@@identity":
http://msdn.microsoft.com/en-us/library/ms190315.aspx
-
+
Background on why we attempt to embed "scope_identity()" into the same
statement as the INSERT:
http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
-
+
"""
-
+
super(MSExecutionContext_pyodbc, self).pre_exec()
# don't embed the scope_identity select into an
@@ -173,7 +173,7 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
self.dialect.use_scope_identity and \
len(self.parameters[0]):
self._embedded_scope_identity = True
-
+
self.statement += "; select scope_identity()"
def post_exec(self):
@@ -185,13 +185,13 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
try:
# fetchall() ensures the cursor is consumed
# without closing it (FreeTDS particularly)
- row = self.cursor.fetchall()[0]
+ row = self.cursor.fetchall()[0]
break
except self.dialect.dbapi.Error, e:
# no way around this - nextset() consumes the previous set
# so we need to just keep flipping
self.cursor.nextset()
-
+
self._lastrowid = int(row[0])
else:
super(MSExecutionContext_pyodbc, self).post_exec()
@@ -202,14 +202,14 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
execution_ctx_cls = MSExecutionContext_pyodbc
pyodbc_driver_name = 'SQL Server'
-
+
colspecs = util.update_copy(
MSDialect.colspecs,
{
sqltypes.Numeric:_MSNumeric_pyodbc
}
)
-
+
def __init__(self, description_encoding='latin-1', **params):
super(MSDialect_pyodbc, self).__init__(**params)
self.description_encoding = description_encoding
@@ -217,5 +217,5 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
hasattr(self.dbapi.Cursor, 'nextset')
self._need_decimal_fix = self.dbapi and \
tuple(self.dbapi.version.split(".")) < (2, 1, 8)
-
+
dialect = MSDialect_pyodbc
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index 6eb54588a..fe1ef49b2 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -18,7 +18,7 @@ from sqlalchemy.dialects.mysql.base import \
NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \
TINYBLOB, TINYINT, TINYTEXT,\
VARBINARY, VARCHAR, YEAR, dialect
-
+
__all__ = (
'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE',
'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT',
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index 6585c4016..e26d83f0a 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -39,7 +39,7 @@ Connecting
----------
See the API documentation on individual drivers for details on connecting.
-
+
Connection Timeouts
-------------------
@@ -235,7 +235,7 @@ class _NumericType(object):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
-
+
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
if isinstance(self, (REAL, DOUBLE)) and \
@@ -274,7 +274,7 @@ class _StringType(sqltypes.String):
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
-
+
def __repr__(self):
attributes = inspect.getargspec(self.__init__)[0][1:]
attributes.extend(inspect.getargspec(_StringType.__init__)[0][1:])
@@ -291,9 +291,9 @@ class _StringType(sqltypes.String):
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
-
+
__visit_name__ = 'NUMERIC'
-
+
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a NUMERIC.
@@ -315,9 +315,9 @@ class NUMERIC(_NumericType, sqltypes.NUMERIC):
class DECIMAL(_NumericType, sqltypes.DECIMAL):
"""MySQL DECIMAL type."""
-
+
__visit_name__ = 'DECIMAL'
-
+
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DECIMAL.
@@ -337,7 +337,7 @@ class DECIMAL(_NumericType, sqltypes.DECIMAL):
super(DECIMAL, self).__init__(precision=precision, scale=scale,
asdecimal=asdecimal, **kw)
-
+
class DOUBLE(_FloatType):
"""MySQL DOUBLE type."""
@@ -538,12 +538,12 @@ class BIT(sqltypes.TypeEngine):
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a long.
-
+
TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
already do this, so this logic should be moved to those dialects.
-
+
"""
-
+
def process(value):
if value is not None:
v = 0L
@@ -710,7 +710,7 @@ class LONGTEXT(_StringType):
"""
super(LONGTEXT, self).__init__(**kwargs)
-
+
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""MySQL VARCHAR type, for variable-length character data."""
@@ -818,7 +818,7 @@ class NCHAR(_StringType, sqltypes.NCHAR):
class TINYBLOB(sqltypes._Binary):
"""MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
-
+
__visit_name__ = 'TINYBLOB'
class MEDIUMBLOB(sqltypes._Binary):
@@ -886,7 +886,7 @@ class ENUM(sqltypes.Enum, _StringType):
"""
self.quoting = kw.pop('quoting', 'auto')
-
+
if self.quoting == 'auto' and len(enums):
# What quoting character are we using?
q = None
@@ -919,7 +919,7 @@ class ENUM(sqltypes.Enum, _StringType):
kw.pop('native_enum', None)
_StringType.__init__(self, length=length, **kw)
sqltypes.Enum.__init__(self, *enums)
-
+
@classmethod
def _strip_enums(cls, enums):
strip_enums = []
@@ -929,7 +929,7 @@ class ENUM(sqltypes.Enum, _StringType):
a = a[1:-1].replace(a[0] * 2, a[0])
strip_enums.append(a)
return strip_enums
-
+
def bind_processor(self, dialect):
super_convert = super(ENUM, self).bind_processor(dialect)
def process(value):
@@ -941,7 +941,7 @@ class ENUM(sqltypes.Enum, _StringType):
else:
return value
return process
-
+
def adapt(self, impltype, **kw):
kw['strict'] = self.strict
return sqltypes.Enum.adapt(self, impltype, **kw)
@@ -1121,19 +1121,19 @@ class MySQLCompiler(compiler.SQLCompiler):
extract_map.update ({
'milliseconds': 'millisecond',
})
-
+
def visit_random_func(self, fn, **kw):
return "rand%s" % self.function_argspec(fn)
-
+
def visit_utc_timestamp_func(self, fn, **kw):
return "UTC_TIMESTAMP"
-
+
def visit_sysdate_func(self, fn, **kw):
return "SYSDATE()"
-
+
def visit_concat_op(self, binary, **kw):
return "concat(%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
+
def visit_match_op(self, binary, **kw):
return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % (self.process(binary.left), self.process(binary.right))
@@ -1170,7 +1170,7 @@ class MySQLCompiler(compiler.SQLCompiler):
# No cast until 4, no decimals until 5.
if not self.dialect._supports_cast:
return self.process(cast.clause)
-
+
type_ = self.process(cast.typeclause)
if type_ is None:
return self.process(cast.clause)
@@ -1182,7 +1182,7 @@ class MySQLCompiler(compiler.SQLCompiler):
if self.dialect._backslash_escapes:
value = value.replace('\\', '\\\\')
return value
-
+
def get_select_precolumns(self, select):
if isinstance(select._distinct, basestring):
return select._distinct.upper() + " "
@@ -1274,7 +1274,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
def create_table_constraints(self, table):
"""Get table constraints."""
constraint_string = super(MySQLDDLCompiler, self).create_table_constraints(table)
-
+
is_innodb = table.kwargs.has_key('mysql_engine') and \
table.kwargs['mysql_engine'].lower() == 'innodb'
@@ -1287,7 +1287,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string += ", \n\t"
constraint_string += "KEY `idx_autoinc_%s`(`%s`)" % (auto_inc_column.name, \
self.preparer.format_column(auto_inc_column))
-
+
return constraint_string
@@ -1301,7 +1301,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
default = self.get_column_default_string(column)
if default is not None:
colspec.append('DEFAULT ' + default)
-
+
is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP)
if not column.nullable and not is_timestamp:
colspec.append('NOT NULL')
@@ -1349,7 +1349,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
def visit_drop_index(self, drop):
index = drop.element
-
+
return "\nDROP INDEX %s ON %s" % \
(self.preparer.quote(self._index_identifier(index.name), index.quote),
self.preparer.format_table(index.table))
@@ -1390,10 +1390,10 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
COLLATE annotations and MySQL specific extensions.
"""
-
+
def attr(name):
return getattr(type_, name, defaults.get(name))
-
+
if attr('charset'):
charset = 'CHARACTER SET %s' % attr('charset')
elif attr('ascii'):
@@ -1416,10 +1416,10 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
if c is not None])
return ' '.join([c for c in (spec, charset, collation)
if c is not None])
-
+
def _mysql_type(self, type_):
return isinstance(type_, (_StringType, _NumericType))
-
+
def visit_NUMERIC(self, type_):
if type_.precision is None:
return self._extend_numeric(type_, "NUMERIC")
@@ -1451,7 +1451,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
'scale' : type_.scale})
else:
return self._extend_numeric(type_, 'REAL')
-
+
def visit_FLOAT(self, type_):
if self._mysql_type(type_) and type_.scale is not None and type_.precision is not None:
return self._extend_numeric(type_, "FLOAT(%s, %s)" % (type_.precision, type_.scale))
@@ -1459,19 +1459,19 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,))
else:
return self._extend_numeric(type_, "FLOAT")
-
+
def visit_INTEGER(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
return self._extend_numeric(type_, "INTEGER(%(display_width)s)" % {'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "INTEGER")
-
+
def visit_BIGINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
return self._extend_numeric(type_, "BIGINT(%(display_width)s)" % {'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "BIGINT")
-
+
def visit_MEDIUMINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
return self._extend_numeric(type_, "MEDIUMINT(%(display_width)s)" % {'display_width': type_.display_width})
@@ -1495,7 +1495,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
return "BIT(%s)" % type_.length
else:
return "BIT"
-
+
def visit_DATETIME(self, type_):
return "DATETIME"
@@ -1513,34 +1513,34 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
return "YEAR"
else:
return "YEAR(%s)" % type_.display_width
-
+
def visit_TEXT(self, type_):
if type_.length:
return self._extend_string(type_, {}, "TEXT(%d)" % type_.length)
else:
return self._extend_string(type_, {}, "TEXT")
-
+
def visit_TINYTEXT(self, type_):
return self._extend_string(type_, {}, "TINYTEXT")
def visit_MEDIUMTEXT(self, type_):
return self._extend_string(type_, {}, "MEDIUMTEXT")
-
+
def visit_LONGTEXT(self, type_):
return self._extend_string(type_, {}, "LONGTEXT")
-
+
def visit_VARCHAR(self, type_):
if type_.length:
return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
else:
raise exc.InvalidRequestError("VARCHAR requires a length when rendered on MySQL")
-
+
def visit_CHAR(self, type_):
if type_.length:
return self._extend_string(type_, {}, "CHAR(%(length)s)" % {'length' : type_.length})
else:
return self._extend_string(type_, {}, "CHAR")
-
+
def visit_NVARCHAR(self, type_):
# We'll actually generate the equiv. "NATIONAL VARCHAR" instead
# of "NVARCHAR".
@@ -1548,32 +1548,32 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length})
else:
raise exc.InvalidRequestError("NVARCHAR requires a length when rendered on MySQL")
-
+
def visit_NCHAR(self, type_):
# We'll actually generate the equiv. "NATIONAL CHAR" instead of "NCHAR".
if type_.length:
return self._extend_string(type_, {'national':True}, "CHAR(%(length)s)" % {'length': type_.length})
else:
return self._extend_string(type_, {'national':True}, "CHAR")
-
+
def visit_VARBINARY(self, type_):
return "VARBINARY(%d)" % type_.length
-
+
def visit_large_binary(self, type_):
return self.visit_BLOB(type_)
-
+
def visit_enum(self, type_):
if not type_.native_enum:
return super(MySQLTypeCompiler, self).visit_enum(type_)
else:
return self.visit_ENUM(type_)
-
+
def visit_BLOB(self, type_):
if type_.length:
return "BLOB(%d)" % type_.length
else:
return "BLOB"
-
+
def visit_TINYBLOB(self, type_):
return "TINYBLOB"
@@ -1588,13 +1588,13 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
for e in type_.enums:
quoted_enums.append("'%s'" % e.replace("'", "''"))
return self._extend_string(type_, {}, "ENUM(%s)" % ",".join(quoted_enums))
-
+
def visit_SET(self, type_):
return self._extend_string(type_, {}, "SET(%s)" % ",".join(type_._ddl_values))
def visit_BOOLEAN(self, type):
return "BOOL"
-
+
class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
@@ -1604,7 +1604,7 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
if not server_ansiquotes:
quote = "`"
else:
- quote = '"'
+ quote = '"'
super(MySQLIdentifierPreparer, self).__init__(
dialect,
@@ -1618,34 +1618,34 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
class MySQLDialect(default.DefaultDialect):
"""Details of the MySQL dialect. Not used directly in application code."""
-
+
name = 'mysql'
supports_alter = True
-
+
# identifiers are 64, however aliases can be 255...
max_identifier_length = 255
max_index_name_length = 64
-
+
supports_native_enum = True
-
+
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
-
+
default_paramstyle = 'format'
colspecs = colspecs
-
+
statement_compiler = MySQLCompiler
ddl_compiler = MySQLDDLCompiler
type_compiler = MySQLTypeCompiler
ischema_names = ischema_names
preparer = MySQLIdentifierPreparer
-
+
# default SQL compilation settings -
# these are modified upon initialize(),
# i.e. first connect
_backslash_escapes = True
_server_ansiquotes = False
-
+
def __init__(self, use_ansiquotes=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
@@ -1705,7 +1705,7 @@ class MySQLDialect(default.DefaultDialect):
if isinstance(e, self.dbapi.OperationalError):
return self._extract_error_code(e) in \
(2006, 2013, 2014, 2045, 2055)
- elif isinstance(e, self.dbapi.InterfaceError):
+ elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
return "(0, '')" in str(e)
@@ -1729,7 +1729,7 @@ class MySQLDialect(default.DefaultDialect):
def _extract_error_code(self, exception):
raise NotImplementedError()
-
+
def _get_default_schema_name(self, connection):
return connection.execute('SELECT DATABASE()').scalar()
@@ -1764,7 +1764,7 @@ class MySQLDialect(default.DefaultDialect):
finally:
if rs:
rs.close()
-
+
def initialize(self, connection):
default.DefaultDialect.initialize(self, connection)
self._connection_charset = self._detect_charset(connection)
@@ -1781,7 +1781,7 @@ class MySQLDialect(default.DefaultDialect):
def _supports_cast(self):
return self.server_version_info is None or \
self.server_version_info >= (4, 0, 2)
-
+
@reflection.cache
def get_schema_names(self, connection, **kw):
rp = connection.execute("SHOW schemas")
@@ -1806,7 +1806,7 @@ class MySQLDialect(default.DefaultDialect):
return [row[0] for row in self._compat_fetchall(rp, charset=charset)\
if row[1] == 'BASE TABLE']
-
+
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
charset = self._connection_charset
@@ -1848,7 +1848,7 @@ class MySQLDialect(default.DefaultDialect):
parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
default_schema = None
-
+
fkeys = []
for spec in parsed_state.constraints:
@@ -1886,7 +1886,7 @@ class MySQLDialect(default.DefaultDialect):
def get_indexes(self, connection, table_name, schema=None, **kw):
parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
-
+
indexes = []
for spec in parsed_state.keys:
unique = False
@@ -1926,14 +1926,14 @@ class MySQLDialect(default.DefaultDialect):
schema,
info_cache=kw.get('info_cache', None)
)
-
+
@util.memoized_property
def _tabledef_parser(self):
"""return the MySQLTableDefinitionParser, generate if needed.
-
+
The deferred creation ensures that the dialect has
retrieved server version information first.
-
+
"""
if (self.server_version_info < (4, 1) and self._server_ansiquotes):
# ANSI_QUOTES doesn't affect SHOW CREATE TABLE on < 4.1
@@ -1941,7 +1941,7 @@ class MySQLDialect(default.DefaultDialect):
else:
preparer = self.identifier_preparer
return MySQLTableDefinitionParser(self, preparer)
-
+
@reflection.cache
def _setup_parser(self, connection, table_name, schema=None, **kw):
charset = self._connection_charset
@@ -1956,7 +1956,7 @@ class MySQLDialect(default.DefaultDialect):
full_name=full_name)
sql = parser._describe_to_create(table_name, columns)
return parser.parse(sql, charset)
-
+
def _adjust_casing(self, table, charset=None):
"""Adjust Table name to the server case sensitivity, if needed."""
@@ -2030,10 +2030,10 @@ class MySQLDialect(default.DefaultDialect):
mode = (mode_no | 4 == mode_no) and 'ANSI_QUOTES' or ''
self._server_ansiquotes = 'ANSI_QUOTES' in mode
-
+
# as of MySQL 5.0.1
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
-
+
def _show_create_table(self, connection, table, charset=None,
full_name=None):
"""Run SHOW CREATE TABLE for a ``Table``."""
@@ -2082,17 +2082,17 @@ class MySQLDialect(default.DefaultDialect):
class ReflectedState(object):
"""Stores raw information about a SHOW CREATE TABLE statement."""
-
+
def __init__(self):
self.columns = []
self.table_options = {}
self.table_name = None
self.keys = []
self.constraints = []
-
+
class MySQLTableDefinitionParser(object):
"""Parses the results of a SHOW CREATE TABLE statement."""
-
+
def __init__(self, dialect, preparer):
self.dialect = dialect
self.preparer = preparer
@@ -2125,9 +2125,9 @@ class MySQLTableDefinitionParser(object):
state.constraints.append(spec)
else:
pass
-
+
return state
-
+
def _parse_constraints(self, line):
"""Parse a KEY or CONSTRAINT line.
@@ -2278,7 +2278,7 @@ class MySQLTableDefinitionParser(object):
if default == 'NULL':
# eliminates the need to deal with this later.
default = None
-
+
col_d = dict(name=name, type=type_instance, default=default)
col_d.update(col_kw)
state.columns.append(col_d)
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index ced873039..e9e1cdbba 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -9,7 +9,7 @@
MySQL-Python is available at:
http://sourceforge.net/projects/mysql-python
-
+
At least version 1.2.1 or 1.2.2 should be used.
Connecting
@@ -18,7 +18,7 @@ Connecting
Connect string format::
mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
-
+
Character Sets
--------------
@@ -42,7 +42,7 @@ Known Issues
-------------
MySQL-python at least as of version 1.2.2 has a serious memory leak related
-to unicode conversion, a feature which is disabled via ``use_unicode=0``.
+to unicode conversion, a feature which is disabled via ``use_unicode=0``.
The recommended connection form with SQLAlchemy is::
engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600)
@@ -60,25 +60,25 @@ from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
from sqlalchemy import processors
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
-
+
@property
def rowcount(self):
if hasattr(self, '_rowcount'):
return self._rowcount
else:
return self.cursor.rowcount
-
-
+
+
class MySQLCompiler_mysqldb(MySQLCompiler):
def visit_mod(self, binary, **kw):
return self.process(binary.left) + " %% " + self.process(binary.right)
-
+
def post_process_text(self, text):
return text.replace('%', '%%')
class MySQLIdentifierPreparer_mysqldb(MySQLIdentifierPreparer):
-
+
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace("%", "%%")
@@ -95,13 +95,13 @@ class MySQLDialect_mysqldb(MySQLDialect):
execution_ctx_cls = MySQLExecutionContext_mysqldb
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
-
+
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
}
)
-
+
@classmethod
def dbapi(cls):
return __import__('MySQLdb')
@@ -149,7 +149,7 @@ class MySQLDialect_mysqldb(MySQLDialect):
pass
opts['client_flag'] = client_flag
return [[], opts]
-
+
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index b4d9485d3..d3ef839b1 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -9,7 +9,7 @@
OurSQL is available at:
http://packages.python.org/oursql/
-
+
Connecting
-----------
@@ -61,7 +61,7 @@ class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self):
return self.execution_options.get('_oursql_plain_query', False)
-
+
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
# Py3K
@@ -70,9 +70,9 @@ class MySQLDialect_oursql(MySQLDialect):
supports_unicode_binds = True
supports_unicode_statements = True
# end Py2K
-
+
supports_native_decimal = True
-
+
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
execution_ctx_cls = MySQLExecutionContext_oursql
@@ -132,7 +132,7 @@ class MySQLDialect_oursql(MySQLDialect):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
self._xa_query(connection, 'XA COMMIT "%s"', xid)
-
+
# Q: why didn't we need all these "plain_query" overrides earlier ?
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
@@ -140,7 +140,7 @@ class MySQLDialect_oursql(MySQLDialect):
connection.connect().\
execution_options(_oursql_plain_query=True),
table_name, schema)
-
+
def get_table_options(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_table_options(self,
connection.connect().\
@@ -159,7 +159,7 @@ class MySQLDialect_oursql(MySQLDialect):
schema=schema,
**kw
)
-
+
def get_view_names(self, connection, schema=None, **kw):
return MySQLDialect.get_view_names(self,
connection.connect().\
@@ -167,27 +167,27 @@ class MySQLDialect_oursql(MySQLDialect):
schema=schema,
**kw
)
-
+
def get_table_names(self, connection, schema=None, **kw):
return MySQLDialect.get_table_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
schema
)
-
+
def get_schema_names(self, connection, **kw):
return MySQLDialect.get_schema_names(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
**kw
)
-
+
def initialize(self, connection):
return MySQLDialect.initialize(
self,
connection.execution_options(_oursql_plain_query=True)
)
-
+
def _show_create_table(self, connection, table, charset=None,
full_name=None):
return MySQLDialect._show_create_table(self,
@@ -196,7 +196,7 @@ class MySQLDialect_oursql(MySQLDialect):
table, charset, full_name)
def is_disconnect(self, e):
- if isinstance(e, self.dbapi.ProgrammingError):
+ if isinstance(e, self.dbapi.ProgrammingError):
return e.errno is None and 'cursor' not in e.args[1] and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
@@ -240,7 +240,7 @@ class MySQLDialect_oursql(MySQLDialect):
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
-
+
return connection.connection.charset
def _compat_fetchall(self, rp, charset=None):
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index d103c56c8..84d43cf27 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -47,7 +47,7 @@ class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
execution_ctx_cls = MySQLExecutionContext_pyodbc
pyodbc_driver_name = "MySQL"
-
+
def __init__(self, **kw):
# deal with http://code.google.com/p/pyodbc/issues/detail?id=25
kw.setdefault('convert_unicode', True)
@@ -70,7 +70,7 @@ class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
util.warn("Could not detect the connection character set. Assuming latin1.")
return 'latin1'
-
+
def _extract_error_code(self, exception):
m = re.compile(r"\((\d+)\)").search(str(exception.args))
c = m.group(1)
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index defab2947..bacad3704 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -132,7 +132,7 @@ from sqlalchemy.sql import operators as sql_operators, functions as sql_function
from sqlalchemy import types as sqltypes
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
BLOB, CLOB, TIMESTAMP, FLOAT
-
+
RESERVED_WORDS = set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '
'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '
'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE ANY '
@@ -155,33 +155,33 @@ NVARCHAR2 = NVARCHAR
class NUMBER(sqltypes.Numeric, sqltypes.Integer):
__visit_name__ = 'NUMBER'
-
+
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = bool(scale and scale > 0)
-
+
super(NUMBER, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
-
+
def adapt(self, impltype):
ret = super(NUMBER, self).adapt(impltype)
# leave a hint for the DBAPI handler
ret._is_oracle_number = True
return ret
-
+
@property
def _type_affinity(self):
if bool(self.scale and self.scale > 0):
return sqltypes.Numeric
else:
return sqltypes.Integer
-
-
+
+
class DOUBLE_PRECISION(sqltypes.Numeric):
__visit_name__ = 'DOUBLE_PRECISION'
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = False
-
+
super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
class BFILE(sqltypes.LargeBinary):
@@ -192,44 +192,44 @@ class LONG(sqltypes.Text):
class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
-
+
def __init__(self,
day_precision=None,
second_precision=None):
"""Construct an INTERVAL.
-
+
Note that only DAY TO SECOND intervals are currently supported.
This is due to a lack of support for YEAR TO MONTH intervals
within available DBAPIs (cx_oracle and zxjdbc).
-
+
:param day_precision: the day precision value. this is the number of digits
to store for the day field. Defaults to "2"
:param second_precision: the second precision value. this is the number of digits
to store for the fractional seconds field. Defaults to "6".
-
+
"""
self.day_precision = day_precision
self.second_precision = second_precision
-
+
@classmethod
def _adapt_from_generic_interval(cls, interval):
return INTERVAL(day_precision=interval.day_precision,
second_precision=interval.second_precision)
-
+
@property
def _type_affinity(self):
return sqltypes.Interval
class ROWID(sqltypes.TypeEngine):
"""Oracle ROWID type.
-
+
When used in a cast() or similar, generates ROWID.
-
+
"""
__visit_name__ = 'ROWID'
-
-
-
+
+
+
class _OracleBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi):
return dbapi.NUMBER
@@ -264,19 +264,19 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
# Oracle DATE == DATETIME
# Oracle does not allow milliseconds in DATE
# Oracle does not support TIME columns
-
+
def visit_datetime(self, type_):
return self.visit_DATE(type_)
-
+
def visit_float(self, type_):
return self.visit_FLOAT(type_)
-
+
def visit_unicode(self, type_):
if self.dialect._supports_nchar:
return self.visit_NVARCHAR(type_)
else:
return self.visit_VARCHAR(type_)
-
+
def visit_INTERVAL(self, type_):
return "INTERVAL DAY%s TO SECOND%s" % (
type_.day_precision is not None and
@@ -295,24 +295,24 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_DOUBLE_PRECISION(self, type_):
return self._generate_numeric(type_, "DOUBLE PRECISION")
-
+
def visit_NUMBER(self, type_, **kw):
return self._generate_numeric(type_, "NUMBER", **kw)
-
+
def _generate_numeric(self, type_, name, precision=None, scale=None):
if precision is None:
precision = type_.precision
-
+
if scale is None:
scale = getattr(type_, 'scale', None)
-
+
if precision is None:
return name
elif scale is None:
return "%(name)s(%(precision)s)" % {'name':name,'precision': precision}
else:
return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale}
-
+
def visit_VARCHAR(self, type_):
if self.dialect._supports_char_length:
return "VARCHAR(%(length)s CHAR)" % {'length' : type_.length}
@@ -321,7 +321,7 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_NVARCHAR(self, type_):
return "NVARCHAR2(%(length)s)" % {'length' : type_.length}
-
+
def visit_text(self, type_):
return self.visit_CLOB(type_)
@@ -336,29 +336,29 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_big_integer(self, type_):
return self.visit_NUMBER(type_, precision=19)
-
+
def visit_boolean(self, type_):
return self.visit_SMALLINT(type_)
-
+
def visit_RAW(self, type_):
return "RAW(%(length)s)" % {'length' : type_.length}
def visit_ROWID(self, type_):
return "ROWID"
-
+
class OracleCompiler(compiler.SQLCompiler):
"""Oracle compiler modifies the lexical structure of Select
statements to work under non-ANSI configured Oracle databases, if
the use_ansi flag is False.
"""
-
+
compound_keywords = util.update_copy(
compiler.SQLCompiler.compound_keywords,
- {
+ {
expression.CompoundSelect.EXCEPT : 'MINUS'
}
)
-
+
def __init__(self, *args, **kwargs):
super(OracleCompiler, self).__init__(*args, **kwargs)
self.__wheres = {}
@@ -366,27 +366,27 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_mod(self, binary, **kw):
return "mod(%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
+
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
-
+
def visit_char_length_func(self, fn, **kw):
return "LENGTH" + self.function_argspec(fn, **kw)
-
+
def visit_match_op(self, binary, **kw):
return "CONTAINS (%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
+
def get_select_hint_text(self, byfroms):
return " ".join(
"/*+ %s */" % text for table, text in byfroms.items()
)
-
+
def function_argspec(self, fn, **kw):
if len(fn.clauses) > 0:
return compiler.SQLCompiler.function_argspec(self, fn, **kw)
else:
return ""
-
+
def default_from(self):
"""Called when a ``SELECT`` statement has no froms, and no ``FROM`` clause is to be appended.
@@ -418,15 +418,15 @@ class OracleCompiler(compiler.SQLCompiler):
{'binary':visit_binary}))
else:
clauses.append(join.onclause)
-
+
for j in join.left, join.right:
if isinstance(j, expression.Join):
visit_join(j)
-
+
for f in froms:
if isinstance(f, expression.Join):
visit_join(f)
-
+
if not clauses:
return None
else:
@@ -440,11 +440,11 @@ class OracleCompiler(compiler.SQLCompiler):
def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
"""Oracle doesn't like ``FROM table AS alias``. Is the AS standard SQL??"""
-
+
if asfrom or ashint:
alias_name = isinstance(alias.name, expression._generated_label) and \
self._truncated_identifier("alias", alias.name) or alias.name
-
+
if ashint:
return alias_name
elif asfrom:
@@ -454,19 +454,19 @@ class OracleCompiler(compiler.SQLCompiler):
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
-
+
def create_out_param(col, i):
bindparam = sql.outparam("ret_%d" % i, type_=col.type)
self.binds[bindparam.key] = bindparam
return self.bindparam_string(self._truncate_bindparam(bindparam))
-
+
columnlist = list(expression._select_iterables(returning_cols))
-
+
# within_columns_clause =False so that labels (foo AS bar) don't render
columns = [self.process(c, within_columns_clause=False, result_map=self.result_map) for c in columnlist]
-
+
binds = [create_out_param(c, i) for i, c in enumerate(columnlist)]
-
+
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
def _TODO_visit_compound_select(self, select):
@@ -484,7 +484,7 @@ class OracleCompiler(compiler.SQLCompiler):
existingfroms = self.stack[-1]['from']
else:
existingfroms = None
-
+
froms = select._get_display_froms(existingfroms)
whereclause = self._get_nonansi_join_whereclause(froms)
if whereclause is not None:
@@ -513,7 +513,7 @@ class OracleCompiler(compiler.SQLCompiler):
limitselect._oracle_visit = True
limitselect._is_wrapper = True
-
+
# If needed, add the limiting clause
if select._limit is not None:
max_row = select._limit
@@ -563,7 +563,7 @@ class OracleDDLCompiler(compiler.DDLCompiler):
text = ""
if constraint.ondelete is not None:
text += " ON DELETE %s" % constraint.ondelete
-
+
# oracle has no ON UPDATE CASCADE -
# its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html
if constraint.onupdate is not None:
@@ -571,11 +571,11 @@ class OracleDDLCompiler(compiler.DDLCompiler):
"Oracle does not contain native UPDATE CASCADE "
"functionality - onupdates will not be rendered for foreign keys. "
"Consider using deferrable=True, initially='deferred' or triggers.")
-
+
return text
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
-
+
reserved_words = set([x.lower() for x in RESERVED_WORDS])
illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
@@ -586,18 +586,18 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer):
or value[0] in self.illegal_initial_characters
or not self.legal_characters.match(unicode(value))
)
-
+
def format_savepoint(self, savepoint):
name = re.sub(r'^_+', '', savepoint.ident)
return super(OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
-
-
+
+
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq):
return int(self._execute_scalar("SELECT " +
self.dialect.identifier_preparer.format_sequence(seq) +
".nextval FROM DUAL"))
-
+
class OracleDialect(default.DefaultDialect):
name = 'oracle'
supports_alter = True
@@ -610,21 +610,21 @@ class OracleDialect(default.DefaultDialect):
supports_sequences = True
sequences_optional = False
postfetch_lastrowid = False
-
+
default_paramstyle = 'named'
colspecs = colspecs
ischema_names = ischema_names
requires_name_normalize = True
-
+
supports_default_values = False
supports_empty_insert = False
-
+
statement_compiler = OracleCompiler
ddl_compiler = OracleDDLCompiler
type_compiler = OracleTypeCompiler
preparer = OracleIdentifierPreparer
execution_ctx_cls = OracleExecutionContext
-
+
reflection_options = ('oracle_resolve_synonyms', )
def __init__(self,
@@ -641,7 +641,7 @@ class OracleDialect(default.DefaultDialect):
'implicit_returning',
self.server_version_info > (10, )
)
-
+
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
@@ -651,7 +651,7 @@ class OracleDialect(default.DefaultDialect):
def _is_oracle_8(self):
return self.server_version_info and \
self.server_version_info < (9, )
-
+
@property
def _supports_char_length(self):
return not self._is_oracle_8
@@ -659,7 +659,7 @@ class OracleDialect(default.DefaultDialect):
@property
def _supports_nchar(self):
return not self._is_oracle_8
-
+
def do_release_savepoint(self, connection, name):
# Oracle does not support RELEASE SAVEPOINT
pass
@@ -868,7 +868,7 @@ class OracleDialect(default.DefaultDialect):
def get_indexes(self, connection, table_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
-
+
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
@@ -883,7 +883,7 @@ class OracleDialect(default.DefaultDialect):
a.index_name = b.index_name
AND a.table_owner = b.table_owner
AND a.table_name = b.table_name
-
+
AND a.table_name = :table_name
AND a.table_owner = :schema
ORDER BY a.index_name, a.column_position""" % {'dblink': dblink})
@@ -896,7 +896,7 @@ class OracleDialect(default.DefaultDialect):
dblink=dblink,
info_cache=kw.get('info_cache'))
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
-
+
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
def upper_name_set(names):
@@ -983,7 +983,7 @@ class OracleDialect(default.DefaultDialect):
constraint_data = self._get_constraint_data(connection, table_name,
schema, dblink,
info_cache=kw.get('info_cache'))
-
+
for row in constraint_data:
#print "ROW:" , row
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
@@ -1038,7 +1038,7 @@ class OracleDialect(default.DefaultDialect):
}
fkeys = util.defaultdict(fkey_rec)
-
+
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
@@ -1067,12 +1067,12 @@ class OracleDialect(default.DefaultDialect):
if ref_synonym:
remote_table = self.normalize_name(ref_synonym)
remote_owner = self.normalize_name(ref_remote_owner)
-
+
rec['referred_table'] = remote_table
-
+
if requested_schema is not None or self.denormalize_name(remote_owner) != schema:
rec['referred_schema'] = remote_owner
-
+
local_cols.append(local_column)
remote_cols.append(remote_column)
@@ -1102,7 +1102,7 @@ class OracleDialect(default.DefaultDialect):
class _OuterJoinColumn(sql.ClauseElement):
__visit_name__ = 'outer_join_column'
-
+
def __init__(self, column):
self.column = column
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index f4f4912ef..04f3aab95 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -31,7 +31,7 @@ URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
* *arraysize* - set the cx_oracle.arraysize value on cursors, in SQLAlchemy
it defaults to 50. See the section on "LOB Objects" below.
-
+
* *auto_convert_lobs* - defaults to True, see the section on LOB objects.
* *auto_setinputsizes* - the cx_oracle.setinputsizes() call is issued for all bind parameters.
@@ -62,7 +62,7 @@ these to strings so that the interface of the Binary type is consistent with tha
other backends, and so that the linkage to a live cursor is not needed in scenarios
like result.fetchmany() and result.fetchall(). This means that by default, LOB
objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
-cursor is broken.
+cursor is broken.
To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
@@ -144,7 +144,7 @@ class _OracleNumeric(sqltypes.Numeric):
# regardless of the scale given for the originating type.
# So we still need an old school isinstance() handler
# here for decimals.
-
+
if dialect.supports_native_decimal:
if self.asdecimal:
if self.scale is None:
@@ -190,7 +190,7 @@ class _LOBMixin(object):
if not dialect.auto_convert_lobs:
# return the cx_oracle.LOB directly.
return None
-
+
def process(value):
if value is not None:
return value.read()
@@ -213,11 +213,11 @@ class _NativeUnicodeMixin(object):
else:
return super(_NativeUnicodeMixin, self).bind_processor(dialect)
# end Py2K
-
+
# we apply a connection output handler that returns
# unicode in all cases, so the "native_unicode" flag
# will be set for the default String.result_processor.
-
+
class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR):
def get_dbapi_type(self, dbapi):
return dbapi.FIXED_CHAR
@@ -225,7 +225,7 @@ class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR):
class _OracleNVarChar(_NativeUnicodeMixin, sqltypes.NVARCHAR):
def get_dbapi_type(self, dbapi):
return getattr(dbapi, 'UNICODE', dbapi.STRING)
-
+
class _OracleText(_LOBMixin, sqltypes.Text):
def get_dbapi_type(self, dbapi):
return dbapi.CLOB
@@ -258,7 +258,7 @@ class _OracleInteger(sqltypes.Integer):
val = int(val)
return val
return to_int
-
+
class _OracleBinary(_LOBMixin, sqltypes.LargeBinary):
def get_dbapi_type(self, dbapi):
return dbapi.BLOB
@@ -269,14 +269,14 @@ class _OracleBinary(_LOBMixin, sqltypes.LargeBinary):
class _OracleInterval(oracle.INTERVAL):
def get_dbapi_type(self, dbapi):
return dbapi.INTERVAL
-
+
class _OracleRaw(oracle.RAW):
pass
class _OracleRowid(oracle.ROWID):
def get_dbapi_type(self, dbapi):
return dbapi.ROWID
-
+
class OracleCompiler_cx_oracle(OracleCompiler):
def bindparam_string(self, name):
if self.preparer._bindparam_requires_quotes(name):
@@ -286,9 +286,9 @@ class OracleCompiler_cx_oracle(OracleCompiler):
else:
return OracleCompiler.bindparam_string(self, name)
-
+
class OracleExecutionContext_cx_oracle(OracleExecutionContext):
-
+
def pre_exec(self):
quoted_bind_names = \
getattr(self.compiled, '_quoted_bind_names', None)
@@ -331,7 +331,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
self.out_parameters[name] = self.cursor.var(dbtype)
self.parameters[0][quoted_bind_names.get(name, name)] = \
self.out_parameters[name]
-
+
def create_cursor(self):
c = self._dbapi_connection.cursor()
if self.dialect.arraysize:
@@ -353,15 +353,15 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
type_code = column[1]
if type_code in self.dialect._cx_oracle_binary_types:
result = base.BufferedColumnResultProxy(self)
-
+
if result is None:
result = base.ResultProxy(self)
-
+
if hasattr(self, 'out_parameters'):
if self.compiled_parameters is not None and \
len(self.compiled_parameters) == 1:
result.out_parameters = out_parameters = {}
-
+
for bind, name in self.compiled.bind_names.items():
if name in self.out_parameters:
type = bind.type
@@ -385,16 +385,16 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext):
class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
"""Support WITH_UNICODE in Python 2.xx.
-
+
WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
behavior under Python 2.x. This mode in some cases disallows
and in other cases silently passes corrupted data when
non-Python-unicode strings (a.k.a. plain old Python strings)
are passed as arguments to connect(), the statement sent to execute(),
- or any of the bind parameter keys or values sent to execute().
+ or any of the bind parameter keys or values sent to execute().
This optional context therefore ensures that all statements are
passed as Python unicode objects.
-
+
"""
def __init__(self, *arg, **kw):
OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
@@ -403,17 +403,17 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or
def _execute_scalar(self, stmt):
return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
_execute_scalar(unicode(stmt))
-
+
class ReturningResultProxy(base.FullyBufferedResultProxy):
"""Result proxy which stuffs the _returning clause + outparams into the fetch."""
-
+
def __init__(self, context, returning_params):
self._returning_params = returning_params
super(ReturningResultProxy, self).__init__(context)
-
+
def _cursor_description(self):
returning = self.context.compiled.returning
-
+
ret = []
for c in returning:
if hasattr(c, 'name'):
@@ -421,7 +421,7 @@ class ReturningResultProxy(base.FullyBufferedResultProxy):
else:
ret.append((c.anon_label, c.type))
return ret
-
+
def _buffer_rows(self):
return [tuple(self._returning_params["ret_%d" % i]
for i, c in enumerate(self._returning_params))]
@@ -431,7 +431,7 @@ class OracleDialect_cx_oracle(OracleDialect):
statement_compiler = OracleCompiler_cx_oracle
driver = "cx_oracle"
-
+
colspecs = colspecs = {
sqltypes.Numeric: _OracleNumeric,
sqltypes.Date : _OracleDate, # generic type, assume datetime.date is desired
@@ -452,9 +452,9 @@ class OracleDialect_cx_oracle(OracleDialect):
oracle.ROWID: _OracleRowid,
}
-
+
execute_sequence_format = list
-
+
def __init__(self,
auto_setinputsizes=True,
auto_convert_lobs=True,
@@ -468,12 +468,12 @@ class OracleDialect_cx_oracle(OracleDialect):
self.supports_timestamp = self.dbapi is None or hasattr(self.dbapi, 'TIMESTAMP' )
self.auto_setinputsizes = auto_setinputsizes
self.auto_convert_lobs = auto_convert_lobs
-
+
if hasattr(self.dbapi, 'version'):
self.cx_oracle_ver = tuple([int(x) for x in self.dbapi.version.split('.')])
- else:
+ else:
self.cx_oracle_ver = (0, 0, 0)
-
+
def types(*names):
return set([
getattr(self.dbapi, name, None) for name in names
@@ -536,28 +536,28 @@ class OracleDialect_cx_oracle(OracleDialect):
if self._is_oracle_8:
self.supports_unicode_binds = False
self._detect_decimal_char(connection)
-
+
def _detect_decimal_char(self, connection):
"""detect if the decimal separator character is not '.', as
is the case with european locale settings for NLS_LANG.
-
+
cx_oracle itself uses similar logic when it formats Python
Decimal objects to strings on the bind side (as of 5.0.3),
as Oracle sends/receives string numerics only in the
current locale.
-
+
"""
if self.cx_oracle_ver < (5,):
# no output type handlers before version 5
return
-
+
cx_Oracle = self.dbapi
conn = connection.connection
-
+
# override the output_type_handler that's
# on the cx_oracle connection with a plain
# one on the cursor
-
+
def output_type_handler(cursor, name, defaultType,
size, precision, scale):
return cursor.var(
@@ -576,20 +576,20 @@ class OracleDialect_cx_oracle(OracleDialect):
lambda value: _detect_decimal(value.replace(char, '.'))
self._to_decimal = \
lambda value: decimal.Decimal(value.replace(char, '.'))
-
+
def _detect_decimal(self, value):
if "." in value:
return decimal.Decimal(value)
else:
return int(value)
-
+
_to_decimal = decimal.Decimal
-
+
def on_connect(self):
if self.cx_oracle_ver < (5,):
# no output type handlers before version 5
return
-
+
cx_Oracle = self.dbapi
def output_type_handler(cursor, name, defaultType,
size, precision, scale):
@@ -616,12 +616,12 @@ class OracleDialect_cx_oracle(OracleDialect):
# allow all strings to come back natively as Unicode
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
return cursor.var(unicode, size, cursor.arraysize)
-
+
def on_connect(conn):
conn.outputtypehandler = output_type_handler
-
+
return on_connect
-
+
def create_connect_args(self, url):
dialect_opts = dict(url.query)
for opt in ('use_ansi', 'auto_setinputsizes', 'auto_convert_lobs',
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
index fdf818a99..48d1a8c3b 100644
--- a/lib/sqlalchemy/dialects/postgres.py
+++ b/lib/sqlalchemy/dialects/postgres.py
@@ -11,6 +11,6 @@ warn_deprecated(
"The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'. "
"The new URL format is postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
)
-
+
from sqlalchemy.dialects.postgresql import *
from sqlalchemy.dialects.postgresql import base
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 009f8fd94..31f699d2b 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -4,7 +4,7 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Support for the PostgreSQL database.
+"""Support for the PostgreSQL database.
For information on connecting using specific drivers, see the documentation
section regarding that driver.
@@ -64,7 +64,7 @@ use the :meth:`._UpdateBase.returning` method on a per-statement basis::
result = table.insert().returning(table.c.col1, table.c.col2).\\
values(name='foo')
print result.fetchall()
-
+
# UPDATE..RETURNING
result = table.update().returning(table.c.col1, table.c.col2).\\
where(table.c.name=='foo').values(name='bar')
@@ -113,7 +113,7 @@ class BYTEA(sqltypes.LargeBinary):
class DOUBLE_PRECISION(sqltypes.Float):
__visit_name__ = 'DOUBLE_PRECISION'
-
+
class INET(sqltypes.TypeEngine):
__visit_name__ = "INET"
PGInet = INET
@@ -131,7 +131,7 @@ class TIMESTAMP(sqltypes.TIMESTAMP):
super(TIMESTAMP, self).__init__(timezone=timezone)
self.precision = precision
-
+
class TIME(sqltypes.TIME):
def __init__(self, timezone=False, precision=None):
super(TIME, self).__init__(timezone=timezone)
@@ -139,15 +139,15 @@ class TIME(sqltypes.TIME):
class INTERVAL(sqltypes.TypeEngine):
"""Postgresql INTERVAL type.
-
+
The INTERVAL type may not be supported on all DBAPIs.
It is known to work on psycopg2 and not pg8000 or zxjdbc.
-
+
"""
__visit_name__ = 'INTERVAL'
def __init__(self, precision=None):
self.precision = precision
-
+
@classmethod
def _adapt_from_generic_interval(cls, interval):
return INTERVAL(precision=interval.second_precision)
@@ -155,44 +155,44 @@ class INTERVAL(sqltypes.TypeEngine):
@property
def _type_affinity(self):
return sqltypes.Interval
-
+
PGInterval = INTERVAL
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
def __init__(self, length=1):
self.length= length
-
+
PGBit = BIT
class UUID(sqltypes.TypeEngine):
"""Postgresql UUID type.
-
+
Represents the UUID column type, interpreting
data either as natively returned by the DBAPI
or as Python uuid objects.
The UUID type may not be supported on all DBAPIs.
It is known to work on psycopg2 and not pg8000.
-
+
"""
__visit_name__ = 'UUID'
-
+
def __init__(self, as_uuid=False):
"""Construct a UUID type.
-
-
+
+
:param as_uuid=False: if True, values will be interpreted
as Python uuid objects, converting to/from string via the
DBAPI.
-
+
"""
if as_uuid and _python_UUID is None:
raise NotImplementedError(
"This version of Python does not support the native UUID type."
)
self.as_uuid = as_uuid
-
+
def bind_processor(self, dialect):
if self.as_uuid:
def process(value):
@@ -202,7 +202,7 @@ class UUID(sqltypes.TypeEngine):
return process
else:
return None
-
+
def result_processor(self, dialect, coltype):
if self.as_uuid:
def process(value):
@@ -212,21 +212,21 @@ class UUID(sqltypes.TypeEngine):
return process
else:
return None
-
+
PGUuid = UUID
class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"""Postgresql ARRAY type.
-
+
Represents values as Python lists.
The ARRAY type may not be supported on all DBAPIs.
It is known to work on psycopg2 and not pg8000.
-
-
+
+
"""
__visit_name__ = 'ARRAY'
-
+
def __init__(self, item_type, mutable=False, as_tuple=False):
"""Construct an ARRAY.
@@ -248,18 +248,18 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
notes for :class:`.MutableType` regarding ORM
performance implications (default changed from ``True`` in
0.7.0).
-
+
.. note:: This functionality is now superceded by the
``sqlalchemy.ext.mutable`` extension described in
:ref:`mutable_toplevel`.
-
+
:param as_tuple=False: Specify whether return results
should be converted to tuples from lists. DBAPIs such
as psycopg2 return lists by default. When tuples are
returned, the results are hashable. This flag can only
be set to ``True`` when ``mutable`` is set to
``False``. (new in 0.6.5)
-
+
"""
if isinstance(item_type, ARRAY):
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
@@ -273,7 +273,7 @@ class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
"mutable must be set to False if as_tuple is True."
)
self.as_tuple = as_tuple
-
+
def copy_value(self, value):
if value is None:
return None
@@ -343,7 +343,7 @@ class ENUM(sqltypes.Enum):
def create(self, bind=None, checkfirst=True):
if not bind.dialect.supports_native_enum:
return
-
+
if not checkfirst or \
not bind.dialect.has_type(bind, self.name, schema=self.schema):
bind.execute(CreateEnumType(self))
@@ -355,7 +355,7 @@ class ENUM(sqltypes.Enum):
if not checkfirst or \
bind.dialect.has_type(bind, self.name, schema=self.schema):
bind.execute(DropEnumType(self))
-
+
def _on_table_create(self, event, target, bind, **kw):
self.create(bind=bind, checkfirst=True)
@@ -406,7 +406,7 @@ ischema_names = {
class PGCompiler(compiler.SQLCompiler):
-
+
def visit_match_op(self, binary, **kw):
return "%s @@ to_tsquery(%s)" % (
self.process(binary.left),
@@ -472,7 +472,7 @@ class PGCompiler(compiler.SQLCompiler):
return super(PGCompiler, self).for_update_clause(select)
def returning_clause(self, stmt, returning_cols):
-
+
columns = [
self.process(
self.label_select_column(None, c, asfrom=False),
@@ -480,7 +480,7 @@ class PGCompiler(compiler.SQLCompiler):
result_map=self.result_map)
for c in expression._select_iterables(returning_cols)
]
-
+
return 'RETURNING ' + ', '.join(columns)
def visit_extract(self, extract, **kwargs):
@@ -489,7 +489,7 @@ class PGCompiler(compiler.SQLCompiler):
affinity = extract.expr.type._type_affinity
else:
affinity = None
-
+
casts = {
sqltypes.Date:'date',
sqltypes.DateTime:'timestamp',
@@ -530,7 +530,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
def visit_create_enum_type(self, create):
type_ = create.element
-
+
return "CREATE TYPE %s AS ENUM (%s)" % (
self.preparer.format_type(type_),
",".join("'%s'" % e for e in type_.enums)
@@ -542,7 +542,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
return "DROP TYPE %s" % (
self.preparer.format_type(type_)
)
-
+
def visit_create_index(self, create):
preparer = self.preparer
index = create.element
@@ -555,7 +555,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table),
', '.join([preparer.format_column(c)
for c in index.columns]))
-
+
if "postgres_where" in index.kwargs:
whereclause = index.kwargs['postgres_where']
util.warn_deprecated(
@@ -565,7 +565,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
whereclause = index.kwargs['postgresql_where']
else:
whereclause = None
-
+
if whereclause is not None:
whereclause = sql_util.expression_as_ddl(whereclause)
where_compiled = self.sql_compiler.process(whereclause)
@@ -588,25 +588,25 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
return "FLOAT"
else:
return "FLOAT(%(precision)s)" % {'precision': type_.precision}
-
+
def visit_DOUBLE_PRECISION(self, type_):
return "DOUBLE PRECISION"
-
+
def visit_BIGINT(self, type_):
return "BIGINT"
def visit_datetime(self, type_):
return self.visit_TIMESTAMP(type_)
-
+
def visit_enum(self, type_):
if not type_.native_enum or not self.dialect.supports_native_enum:
return super(PGTypeCompiler, self).visit_enum(type_)
else:
return self.visit_ENUM(type_)
-
+
def visit_ENUM(self, type_):
return self.dialect.identifier_preparer.format_type(type_)
-
+
def visit_TIMESTAMP(self, type_):
return "TIMESTAMP%s %s" % (
getattr(type_, 'precision', None) and "(%d)" %
@@ -635,7 +635,7 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return self.visit_BYTEA(type_)
-
+
def visit_BYTEA(self, type_):
return "BYTEA"
@@ -656,12 +656,12 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
def format_type(self, type_, use_schema=True):
if not type_.name:
raise exc.ArgumentError("Postgresql ENUM type requires a name.")
-
+
name = self.quote(type_.name, type_.quote)
if not self.omit_schema and use_schema and type_.schema is not None:
name = self.quote_schema(type_.schema, type_.quote) + "." + name
return name
-
+
class PGInspector(reflection.Inspector):
def __init__(self, conn):
@@ -716,27 +716,27 @@ class PGExecutionContext(default.DefaultExecutionContext):
return self._execute_scalar(exc)
return super(PGExecutionContext, self).get_insert_default(column)
-
+
class PGDialect(default.DefaultDialect):
name = 'postgresql'
supports_alter = True
max_identifier_length = 63
supports_sane_rowcount = True
-
+
supports_native_enum = True
supports_native_boolean = True
-
+
supports_sequences = True
sequences_optional = True
preexecute_autoincrement_sequences = True
postfetch_lastrowid = False
-
+
supports_default_values = True
supports_empty_insert = False
default_paramstyle = 'pyformat'
ischema_names = ischema_names
colspecs = colspecs
-
+
statement_compiler = PGCompiler
ddl_compiler = PGDDLCompiler
type_compiler = PGTypeCompiler
@@ -776,7 +776,7 @@ class PGDialect(default.DefaultDialect):
return connect
else:
return None
-
+
def do_begin_twophase(self, connection, xid):
self.do_begin(connection.connection)
@@ -1056,24 +1056,24 @@ class PGDialect(default.DefaultDialect):
rows = c.fetchall()
domains = self._load_domains(connection)
enums = self._load_enums(connection)
-
+
# format columns
columns = []
for name, format_type, default, notnull, attnum, table_oid in rows:
## strip (5) from character varying(5), timestamp(5)
# with time zone, etc
attype = re.sub(r'\([\d,]+\)', '', format_type)
-
+
# strip '[]' from integer[], etc.
attype = re.sub(r'\[\]', '', attype)
-
+
nullable = not notnull
is_array = format_type.endswith('[]')
charlen = re.search('\(([\d,]+)\)', format_type)
if charlen:
charlen = charlen.group(1)
kwargs = {}
-
+
if attype == 'numeric':
if charlen:
prec, scale = charlen.split(',')
@@ -1105,7 +1105,7 @@ class PGDialect(default.DefaultDialect):
args = (int(charlen),)
else:
args = ()
-
+
while True:
if attype in self.ischema_names:
coltype = self.ischema_names[attype]
@@ -1132,7 +1132,7 @@ class PGDialect(default.DefaultDialect):
else:
coltype = None
break
-
+
if coltype:
coltype = coltype(*args, **kwargs)
if is_array:
@@ -1183,7 +1183,7 @@ class PGDialect(default.DefaultDialect):
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
cols = self.get_primary_keys(connection, table_name,
schema=schema, **kw)
-
+
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 2187092e5..d3c2f1d50 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -76,27 +76,27 @@ class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace('%', '%%')
-
+
class PGDialect_pg8000(PGDialect):
driver = 'pg8000'
supports_unicode_statements = True
-
+
supports_unicode_binds = True
-
+
default_paramstyle = 'format'
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_pg8000
statement_compiler = PGCompiler_pg8000
preparer = PGIdentifierPreparer_pg8000
-
+
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric : _PGNumeric,
}
)
-
+
@classmethod
def dbapi(cls):
return __import__('pg8000').dbapi
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 75cf53fda..411bd42bd 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -157,7 +157,7 @@ SERVER_SIDE_CURSOR_RE = re.compile(
class PGExecutionContext_psycopg2(PGExecutionContext):
def create_cursor(self):
# TODO: coverage for server side cursors + select.for_update()
-
+
if self.dialect.server_side_cursors:
is_server_side = \
self.execution_options.get('stream_results', True) and (
@@ -185,7 +185,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
# TODO: ouch
if logger.isEnabledFor(logging.INFO):
self._log_notices(self.cursor)
-
+
if self.__is_server_side:
return base.BufferedRowResultProxy(self)
else:
@@ -203,7 +203,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
class PGCompiler_psycopg2(PGCompiler):
def visit_mod(self, binary, **kw):
return self.process(binary.left) + " %% " + self.process(binary.right)
-
+
def post_process_text(self, text):
return text.replace('%', '%%')
@@ -237,12 +237,12 @@ class PGDialect_psycopg2(PGDialect):
self.server_side_cursors = server_side_cursors
self.use_native_unicode = use_native_unicode
self.supports_unicode_binds = use_native_unicode
-
+
@classmethod
def dbapi(cls):
psycopg = __import__('psycopg2')
return psycopg
-
+
def on_connect(self):
if self.isolation_level is not None:
extensions = __import__('psycopg2.extensions').extensions
@@ -251,7 +251,7 @@ class PGDialect_psycopg2(PGDialect):
'READ_UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
'REPEATABLE_READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ,
'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE
-
+
}
def base_on_connect(conn):
try:
@@ -262,7 +262,7 @@ class PGDialect_psycopg2(PGDialect):
self.isolation_level)
else:
base_on_connect = None
-
+
if self.dbapi and self.use_native_unicode:
extensions = __import__('psycopg2.extensions').extensions
def connect(conn):
@@ -292,4 +292,4 @@ class PGDialect_psycopg2(PGDialect):
return False
dialect = PGDialect_psycopg2
-
+
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index 6e050304d..c52668762 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -65,22 +65,22 @@ from sqlalchemy import processors
from sqlalchemy.types import BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL,\
FLOAT, INTEGER, NUMERIC, SMALLINT, TEXT, TIME,\
TIMESTAMP, VARCHAR
-
+
class _DateTimeMixin(object):
_reg = None
_storage_format = None
-
+
def __init__(self, storage_format=None, regexp=None, **kw):
super(_DateTimeMixin, self).__init__(**kw)
if regexp is not None:
self._reg = re.compile(regexp)
if storage_format is not None:
self._storage_format = storage_format
-
+
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
_storage_format = "%04d-%02d-%02d %02d:%02d:%02d.%06d"
-
+
def bind_processor(self, dialect):
datetime_datetime = datetime.datetime
datetime_date = datetime.date
@@ -122,7 +122,7 @@ class DATE(_DateTimeMixin, sqltypes.Date):
raise TypeError("SQLite Date type only accepts Python "
"date objects as input.")
return process
-
+
def result_processor(self, dialect, coltype):
if self._reg:
return processors.str_to_datetime_processor_factory(
@@ -146,7 +146,7 @@ class TIME(_DateTimeMixin, sqltypes.Time):
raise TypeError("SQLite Time type only accepts Python "
"time objects as input.")
return process
-
+
def result_processor(self, dialect, coltype):
if self._reg:
return processors.str_to_datetime_processor_factory(
@@ -200,10 +200,10 @@ class SQLiteCompiler(compiler.SQLCompiler):
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
-
+
def visit_char_length_func(self, fn, **kw):
return "length%s" % self.function_argspec(fn)
-
+
def visit_cast(self, cast, **kwargs):
if self.dialect.supports_cast:
return super(SQLiteCompiler, self).visit_cast(cast)
@@ -252,7 +252,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
isinstance(column.type, sqltypes.Integer) and \
not column.foreign_keys:
colspec += " PRIMARY KEY AUTOINCREMENT"
-
+
return colspec
def visit_primary_key_constraint(self, constraint):
@@ -269,12 +269,12 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
return super(SQLiteDDLCompiler, self).\
visit_primary_key_constraint(constraint)
-
+
def visit_foreign_key_constraint(self, constraint):
-
+
local_table = constraint._elements.values()[0].parent.table
remote_table = list(constraint._elements.values())[0].column.table
-
+
if local_table.schema != remote_table.schema:
return None
else:
@@ -282,7 +282,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
def define_constraint_remote_table(self, constraint, table, preparer):
"""Format the remote table clause of a CREATE CONSTRAINT clause."""
-
+
return preparer.format_table(table, use_schema=False)
def visit_create_index(self, create):
@@ -347,7 +347,7 @@ class SQLiteExecutionContext(default.DefaultExecutionContext):
trunc_col = colname.split(".")[1]
rp._metadata._set_keymap_synonym(trunc_col, colname)
return rp
-
+
class SQLiteDialect(default.DefaultDialect):
name = 'sqlite'
supports_alter = False
@@ -356,7 +356,7 @@ class SQLiteDialect(default.DefaultDialect):
supports_default_values = True
supports_empty_insert = False
supports_cast = True
-
+
default_paramstyle = 'qmark'
statement_compiler = SQLiteCompiler
ddl_compiler = SQLiteDDLCompiler
@@ -366,7 +366,7 @@ class SQLiteDialect(default.DefaultDialect):
colspecs = colspecs
isolation_level = None
execution_ctx_cls = SQLiteExecutionContext
-
+
supports_cast = True
supports_default_values = True
@@ -378,7 +378,7 @@ class SQLiteDialect(default.DefaultDialect):
"Valid isolation levels for sqlite are 'SERIALIZABLE' and "
"'READ UNCOMMITTED'.")
self.isolation_level = isolation_level
-
+
# this flag used by pysqlite dialect, and perhaps others in the
# future, to indicate the driver is handling date/timestamp
# conversions (and perhaps datetime/time as well on some
@@ -391,14 +391,14 @@ class SQLiteDialect(default.DefaultDialect):
self.supports_cast = \
self.dbapi.sqlite_version_info >= (3, 2, 3)
-
+
def on_connect(self):
if self.isolation_level is not None:
if self.isolation_level == 'READ UNCOMMITTED':
isolation_level = 1
else:
isolation_level = 0
-
+
def connect(conn):
cursor = conn.cursor()
cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level)
@@ -439,7 +439,7 @@ class SQLiteDialect(default.DefaultDialect):
qtable = quote(table_name)
cursor = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable)))
row = cursor.fetchone()
-
+
# consume remaining rows, to work around
# http://www.sqlite.org/cvstrac/tktview?tn=1884
while not cursor.closed and cursor.fetchone() is not None:
@@ -626,7 +626,7 @@ class SQLiteDialect(default.DefaultDialect):
def _pragma_cursor(cursor):
"""work around SQLite issue whereby cursor.description is blank when PRAGMA returns no rows."""
-
+
if cursor.closed:
cursor.fetchone = lambda: None
return cursor
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index 261ddffe2..14cfa93d9 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -36,20 +36,20 @@ The file specification for the SQLite database is taken as the "database" portio
the URL. Note that the format of a url is::
driver://user:pass@host/database
-
+
This means that the actual filename to be used starts with the characters to the
**right** of the third slash. So connecting to a relative filepath looks like::
# relative path
e = create_engine('sqlite:///path/to/database.db')
-
+
An absolute path, which is denoted by starting with a slash, means you need **four**
slashes::
# absolute path
e = create_engine('sqlite:////path/to/database.db')
-To use a Windows path, regular drive specifications and backslashes can be used.
+To use a Windows path, regular drive specifications and backslashes can be used.
Double backslashes are probably needed::
# absolute path on Windows
@@ -117,7 +117,7 @@ implementation suitable:
SQLite file-based connections have extermely low overhead, so pooling is not necessary.
The scheme also prevents a connection from being used again in a different thread
and works best with SQLite's coarse-grained file locking.
-
+
.. note:: The default selection of :class:`.NullPool` for SQLite file-based databases
is new in SQLAlchemy 0.7. Previous versions
select :class:`.SingletonThreadPool` by
@@ -150,7 +150,7 @@ class _SQLite_pysqliteTimeStamp(DATETIME):
return None
else:
return DATETIME.bind_processor(self, dialect)
-
+
def result_processor(self, dialect, coltype):
if dialect.native_datetime:
return None
@@ -163,7 +163,7 @@ class _SQLite_pysqliteDate(DATE):
return None
else:
return DATE.bind_processor(self, dialect)
-
+
def result_processor(self, dialect, coltype):
if dialect.native_datetime:
return None
@@ -180,12 +180,12 @@ class SQLiteDialect_pysqlite(SQLiteDialect):
sqltypes.TIMESTAMP:_SQLite_pysqliteTimeStamp,
}
)
-
+
# Py3K
#description_encoding = None
-
+
driver = 'pysqlite'
-
+
def __init__(self, **kwargs):
SQLiteDialect.__init__(self, **kwargs)
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 0668dcc56..3c4706043 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -88,10 +88,10 @@ RESERVED_WORDS = set([
"within", "work", "writetext",
])
-
+
class _SybaseUnitypeMixin(object):
"""these types appear to return a buffer object."""
-
+
def result_processor(self, dialect, coltype):
def process(value):
if value is not None:
@@ -99,7 +99,7 @@ class _SybaseUnitypeMixin(object):
else:
return None
return process
-
+
class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
__visit_name__ = 'UNICHAR'
@@ -114,7 +114,7 @@ class TINYINT(sqltypes.Integer):
class BIT(sqltypes.TypeEngine):
__visit_name__ = 'BIT'
-
+
class MONEY(sqltypes.TypeEngine):
__visit_name__ = "MONEY"
@@ -123,7 +123,7 @@ class SMALLMONEY(sqltypes.TypeEngine):
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
__visit_name__ = "UNIQUEIDENTIFIER"
-
+
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
@@ -131,7 +131,7 @@ class IMAGE(sqltypes.LargeBinary):
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
return self.visit_IMAGE(type_)
-
+
def visit_boolean(self, type_):
return self.visit_BIT(type_)
@@ -149,7 +149,7 @@ class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_TINYINT(self, type_):
return "TINYINT"
-
+
def visit_IMAGE(self, type_):
return "IMAGE"
@@ -158,13 +158,13 @@ class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_MONEY(self, type_):
return "MONEY"
-
+
def visit_SMALLMONEY(self, type_):
return "SMALLMONEY"
-
+
def visit_UNIQUEIDENTIFIER(self, type_):
return "UNIQUEIDENTIFIER"
-
+
ischema_names = {
'integer' : INTEGER,
'unsigned int' : INTEGER, # TODO: unsigned flags
@@ -194,31 +194,31 @@ ischema_names = {
class SybaseExecutionContext(default.DefaultExecutionContext):
_enable_identity_insert = False
-
+
def set_ddl_autocommit(self, connection, value):
"""Must be implemented by subclasses to accommodate DDL executions.
-
+
"connection" is the raw unwrapped DBAPI connection. "value"
is True or False. when True, the connection should be configured
such that a DDL can take place subsequently. when False,
a DDL has taken place and the connection should be resumed
into non-autocommit mode.
-
+
"""
raise NotImplementedError()
-
+
def pre_exec(self):
if self.isinsert:
tbl = self.compiled.statement.table
seq_column = tbl._autoincrement_column
insert_has_sequence = seq_column is not None
-
+
if insert_has_sequence:
self._enable_identity_insert = \
seq_column.key in self.compiled_parameters[0]
else:
self._enable_identity_insert = False
-
+
if self._enable_identity_insert:
self.cursor.execute("SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl))
@@ -238,15 +238,15 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
self.set_ddl_autocommit(
self.root_connection.connection.connection,
True)
-
+
def post_exec(self):
if self.isddl:
self.set_ddl_autocommit(self.root_connection, False)
-
+
if self._enable_identity_insert:
self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
+ "SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.
format_table(self.compiled.statement.table)
)
@@ -395,7 +395,7 @@ class SybaseDialect(default.DefaultDialect):
self.max_identifier_length = 30
else:
self.max_identifier_length = 255
-
+
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
if schema is None:
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 025b33743..c8480cb43 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -31,7 +31,7 @@ Currently *not* supported are::
UNICHAR
UNITEXT
UNIVARCHAR
-
+
"""
from sqlalchemy.dialects.sybase.base import SybaseDialect,\
@@ -42,11 +42,11 @@ from sqlalchemy.util.compat import decimal
class _SybNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < -6 into floats.
-
+
It's not yet known how to get decimals with many
significant digits or very large adjusted() into Sybase
via pyodbc.
-
+
"""
def bind_processor(self, dialect):
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index d6f5c3b6b..fed792817 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -54,7 +54,7 @@ class SybaseExecutionContext_pysybase(SybaseExecutionContext):
class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
def bindparam_string(self, name):
return "@" + name
-
+
class SybaseDialect_pysybase(SybaseDialect):
driver = 'pysybase'
execution_ctx_cls = SybaseExecutionContext_pysybase
diff --git a/lib/sqlalchemy/dialects/type_migration_guidelines.txt b/lib/sqlalchemy/dialects/type_migration_guidelines.txt
index c26b65e08..1ca15f7fb 100644
--- a/lib/sqlalchemy/dialects/type_migration_guidelines.txt
+++ b/lib/sqlalchemy/dialects/type_migration_guidelines.txt
@@ -5,20 +5,20 @@ Rules for Migrating TypeEngine classes to 0.6
a. Specifying behavior which needs to occur for bind parameters
or result row columns.
-
+
b. Specifying types that are entirely specific to the database
in use and have no analogue in the sqlalchemy.types package.
-
+
c. Specifying types where there is an analogue in sqlalchemy.types,
but the database in use takes vendor-specific flags for those
types.
d. If a TypeEngine class doesn't provide any of this, it should be
*removed* from the dialect.
-
+
2. the TypeEngine classes are *no longer* used for generating DDL. Dialects
now have a TypeCompiler subclass which uses the same visit_XXX model as
-other compilers.
+other compilers.
3. the "ischema_names" and "colspecs" dictionaries are now required members on
the Dialect class.
@@ -29,7 +29,7 @@ the current mixed case naming can remain, i.e. _PGNumeric for Numeric - in this
end users would never need to use _PGNumeric directly. However, if a dialect-specific
type is specifying a type *or* arguments that are not present generically, it should
match the real name of the type on that backend, in uppercase. E.g. postgresql.INET,
-mysql.ENUM, postgresql.ARRAY.
+mysql.ENUM, postgresql.ARRAY.
Or follow this handy flowchart:
@@ -61,8 +61,8 @@ Or follow this handy flowchart:
|
v
the type should
- subclass the
- UPPERCASE
+ subclass the
+ UPPERCASE
type in types.py
(i.e. class BLOB(types.BLOB))
@@ -86,14 +86,14 @@ MySQL names it SET in the dialect's base.py, and it subclasses types.String, sin
it ultimately deals with strings.
Example 5. Postgresql has a DATETIME type. The DBAPIs handle dates correctly,
-and no special arguments are used in PG's DDL beyond what types.py provides.
+and no special arguments are used in PG's DDL beyond what types.py provides.
Postgresql dialect therefore imports types.DATETIME into its base.py.
Ideally one should be able to specify a schema using names imported completely from a
dialect, all matching the real name on that backend:
from sqlalchemy.dialects.postgresql import base as pg
-
+
t = Table('mytable', metadata,
Column('id', pg.INTEGER, primary_key=True),
Column('name', pg.VARCHAR(300)),
@@ -110,7 +110,7 @@ indicate a special type only available in this database, it must be *removed* fr
module and from this dictionary.
6. "ischema_names" indicates string descriptions of types as returned from the database
-linked to TypeEngine classes.
+linked to TypeEngine classes.
a. The string name should be matched to the most specific type possible within
sqlalchemy.types, unless there is no matching type within sqlalchemy.types in which
@@ -118,28 +118,28 @@ linked to TypeEngine classes.
own subclass of that type with special bind/result behavior - reflect to the types.py
UPPERCASE type as much as possible. With very few exceptions, all types
should reflect to an UPPERCASE type.
-
+
b. If the dialect contains a matching dialect-specific type that takes extra arguments
which the generic one does not, then point to the dialect-specific type. E.g.
mssql.VARCHAR takes a "collation" parameter which should be preserved.
-
+
5. DDL, or what was formerly issued by "get_col_spec()", is now handled exclusively by
a subclass of compiler.GenericTypeCompiler.
a. your TypeCompiler class will receive generic and uppercase types from
sqlalchemy.types. Do not assume the presence of dialect-specific attributes on
these types.
-
+
b. the visit_UPPERCASE methods on GenericTypeCompiler should *not* be overridden with
methods that produce a different DDL name. Uppercase types don't do any kind of
"guessing" - if visit_TIMESTAMP is called, the DDL should render as TIMESTAMP in
all cases, regardless of whether or not that type is legal on the backend database.
-
+
c. the visit_UPPERCASE methods *should* be overridden with methods that add additional
- arguments and flags to those types.
-
+ arguments and flags to those types.
+
d. the visit_lowercase methods are overridden to provide an interpretation of a generic
type. E.g. visit_large_binary() might be overridden to say "return self.visit_BIT(type_)".
-
+
e. visit_lowercase methods should *never* render strings directly - it should always
be via calling a visit_UPPERCASE() method.
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 9dd7f0653..aebf35436 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -51,7 +51,7 @@ url.py
"""
# not sure what this was used for
-#import sqlalchemy.databases
+#import sqlalchemy.databases
from sqlalchemy.engine.base import (
BufferedColumnResultProxy,
@@ -174,17 +174,17 @@ def create_engine(*args, **kwargs):
:param execution_options: Dictionary execution options which will
be applied to all connections. See
:meth:`~sqlalchemy.engine.base.Connection.execution_options`
-
+
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
-
+
:param listeners: A list of one or more
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
-
+
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.engine" logger. Defaults to a hexstring of the
@@ -246,7 +246,7 @@ def create_engine(*args, **kwargs):
:param strategy='plain': selects alternate engine implementations.
Currently available is the ``threadlocal``
strategy, which is described in :ref:`threadlocal_strategy`.
-
+
"""
strategy = kwargs.pop('strategy', default_strategy)
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 49355bf65..eb48c29d6 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -71,19 +71,19 @@ class Dialect(object):
a tuple containing a version number for the DB backend in use.
This value is only available for supporting dialects, and is
typically populated during the initial connection to the database.
-
+
default_schema_name
the name of the default schema. This value is only available for
supporting dialects, and is typically populated during the
initial connection to the database.
-
+
execution_ctx_cls
a :class:`ExecutionContext` class used to handle statement execution
execute_sequence_format
either the 'tuple' or 'list' type, depending on what cursor.execute()
accepts for the second argument (they vary).
-
+
preparer
a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
quote identifiers.
@@ -115,7 +115,7 @@ class Dialect(object):
True if 'implicit' primary key functions must be executed separately
in order to get their value. This is currently oriented towards
Postgresql.
-
+
implicit_returning
use RETURNING or equivalent during INSERT execution in order to load
newly generated primary keys and other column defaults in one execution,
@@ -123,7 +123,7 @@ class Dialect(object):
If an insert statement has returning() specified explicitly,
the "implicit" functionality is not used and inserted_primary_key
will not be available.
-
+
dbapi_type_map
A mapping of DB-API type objects present in this Dialect's
DB-API implementation mapped to TypeEngine implementations used
@@ -143,17 +143,17 @@ class Dialect(object):
supports_default_values
Indicates if the construct ``INSERT INTO tablename DEFAULT
VALUES`` is supported
-
+
supports_sequences
Indicates if the dialect supports CREATE SEQUENCE or similar.
-
+
sequences_optional
If True, indicates if the "optional" flag on the Sequence() construct
should signal to not generate a CREATE SEQUENCE. Applies only to
dialects that support sequences. Currently used only to allow Postgresql
SERIAL to be used on a column that specifies Sequence() for usage on
other backends.
-
+
supports_native_enum
Indicates if the dialect supports a native ENUM construct.
This will prevent types.Enum from generating a CHECK
@@ -163,7 +163,7 @@ class Dialect(object):
Indicates if the dialect supports a native boolean construct.
This will prevent types.Boolean from generating a CHECK
constraint when that type is used.
-
+
"""
def create_connect_args(self, url):
@@ -172,7 +172,7 @@ class Dialect(object):
Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
consisting of a `*args`/`**kwargs` suitable to send directly
to the dbapi's connect function.
-
+
"""
raise NotImplementedError()
@@ -187,7 +187,7 @@ class Dialect(object):
The returned result is cached *per dialect class* so can
contain no dialect-instance state.
-
+
"""
raise NotImplementedError()
@@ -198,13 +198,13 @@ class Dialect(object):
Allows dialects to configure options based on server version info or
other properties.
-
+
The connection passed here is a SQLAlchemy Connection object,
with full capabilities.
-
+
The initalize() method of the base dialect should be called via
super().
-
+
"""
pass
@@ -217,12 +217,12 @@ class Dialect(object):
properties from the database. If include_columns (a list or
set) is specified, limit the autoload to the given column
names.
-
+
The default implementation uses the
:class:`~sqlalchemy.engine.reflection.Inspector` interface to
provide the output, building upon the granular table/column/
constraint etc. methods of :class:`Dialect`.
-
+
"""
raise NotImplementedError()
@@ -356,7 +356,7 @@ class Dialect(object):
def normalize_name(self, name):
"""convert the given name to lowercase if it is detected as
case insensitive.
-
+
this method is only used if the dialect defines
requires_name_normalize=True.
@@ -366,13 +366,13 @@ class Dialect(object):
def denormalize_name(self, name):
"""convert the given name to a case insensitive identifier
for the backend if it is an all-lowercase name.
-
+
this method is only used if the dialect defines
requires_name_normalize=True.
"""
raise NotImplementedError()
-
+
def has_table(self, connection, table_name, schema=None):
"""Check the existence of a particular table in the database.
@@ -396,15 +396,15 @@ class Dialect(object):
def _get_server_version_info(self, connection):
"""Retrieve the server version info from the given connection.
-
+
This is used by the default implementation to populate the
"server_version_info" attribute and is called exactly
once upon first connect.
-
+
"""
raise NotImplementedError()
-
+
def _get_default_schema_name(self, connection):
"""Return the string name of the currently selected schema from
the given connection.
@@ -412,7 +412,7 @@ class Dialect(object):
This is used by the default implementation to populate the
"default_schema_name" attribute and is called exactly
once upon first connect.
-
+
"""
raise NotImplementedError()
@@ -512,7 +512,7 @@ class Dialect(object):
The callable accepts a single argument "conn" which is the
DBAPI connection itself. It has no return value.
-
+
This is used to set dialect-wide per-connection options such as
isolation modes, unicode modes, etc.
@@ -645,7 +645,7 @@ class ExecutionContext(object):
in some dialects; this is indicated by the
``supports_sane_rowcount`` and ``supports_sane_multi_rowcount``
dialect attributes.
-
+
"""
raise NotImplementedError()
@@ -691,13 +691,13 @@ class Compiled(object):
@property
def sql_compiler(self):
"""Return a Compiled that is capable of processing SQL expressions.
-
+
If this compiler is one, it would likely just return 'self'.
-
+
"""
-
+
raise NotImplementedError()
-
+
def process(self, obj, **kwargs):
return obj._compiler_dispatch(self, **kwargs)
@@ -705,7 +705,7 @@ class Compiled(object):
"""Return the string text of the generated SQL or DDL."""
return self.string or ''
-
+
def construct_params(self, params=None):
"""Return the bind params for this compiled object.
@@ -793,7 +793,7 @@ class Connection(Connectable):
shared among threads using properly synchronized access, it is still
possible that the underlying DBAPI connection may not support shared
access between threads. Check the DBAPI documentation for details.
-
+
The Connection object represents a single dbapi connection checked out
from the connection pool. In this state, the connection pool has no affect
upon the connection, including its expiration or timeout state. For the
@@ -803,9 +803,9 @@ class Connection(Connectable):
.. index::
single: thread safety; Connection
-
+
"""
-
+
def __init__(self, engine, connection=None, close_with_result=False,
_branch=False, _execution_options=None):
"""Construct a new Connection.
@@ -813,7 +813,7 @@ class Connection(Connectable):
The constructor here is not public and is only called only by an
:class:`.Engine`. See :meth:`.Engine.connect` and
:meth:`.Engine.contextual_connect` methods.
-
+
"""
self.engine = engine
self.dialect = engine.dialect
@@ -850,21 +850,21 @@ class Connection(Connectable):
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
return c
-
+
def execution_options(self, **opt):
""" Set non-SQL options for the connection which take effect
during execution.
-
+
The method returns a copy of this :class:`Connection` which references
the same underlying DBAPI connection, but also defines the given
execution options which will take effect for a call to
:meth:`execute`. As the new :class:`Connection` references the same
underlying resource, it is probably best to ensure that the copies
would be discarded immediately, which is implicit if used as in::
-
+
result = connection.execution_options(stream_results=True).\
execute(stmt)
-
+
The options are the same as those accepted by
:meth:`sqlalchemy.sql.expression.Executable.execution_options`.
@@ -872,7 +872,7 @@ class Connection(Connectable):
c = self._clone()
c._execution_options = c._execution_options.union(opt)
return c
-
+
@property
def closed(self):
"""Return True if this connection is closed."""
@@ -894,7 +894,7 @@ class Connection(Connectable):
return self.__connection
except AttributeError:
return self._revalidate_connection()
-
+
def _revalidate_connection(self):
if self.__invalid:
if self.__transaction is not None:
@@ -905,13 +905,13 @@ class Connection(Connectable):
self.__invalid = False
return self.__connection
raise exc.ResourceClosedError("This Connection is closed")
-
+
@property
def _connection_is_valid(self):
# use getattr() for is_valid to support exceptions raised in
# dialect initializer, where the connection is not wrapped in
# _ConnectionFairy
-
+
return getattr(self.__connection, 'is_valid', False)
@property
@@ -960,7 +960,7 @@ class Connection(Connectable):
"""
if self.invalidated:
return
-
+
if self.closed:
raise exc.ResourceClosedError("This Connection is closed")
@@ -968,8 +968,8 @@ class Connection(Connectable):
self.__connection.invalidate(exception)
del self.__connection
self.__invalid = True
-
-
+
+
def detach(self):
"""Detach the underlying DB-API connection from its connection pool.
@@ -1140,7 +1140,7 @@ class Connection(Connectable):
self.__invalid = False
del self.__connection
self.__transaction = None
-
+
def scalar(self, object, *multiparams, **params):
"""Executes and returns the first column of the first row.
@@ -1151,9 +1151,9 @@ class Connection(Connectable):
def execute(self, object, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`.
-
+
The construct can be one of:
-
+
* a textual SQL string
* any :class:`.ClauseElement` construct that is also
a subclass of :class:`.Executable`, such as a
@@ -1164,7 +1164,7 @@ class Connection(Connectable):
* a :class:`.DDLElement` object
* a :class:`.DefaultGenerator` object
* a :class:`.Compiled` object
-
+
"""
for c in type(object).__mro__:
@@ -1186,7 +1186,7 @@ class Connection(Connectable):
In the case of 'raw' execution which accepts positional parameters,
it may be a list of tuples or lists.
-
+
"""
if not multiparams:
@@ -1219,28 +1219,28 @@ class Connection(Connectable):
def _execute_default(self, default, multiparams, params):
"""Execute a schema.ColumnDefault object."""
-
+
try:
try:
conn = self.__connection
except AttributeError:
conn = self._revalidate_connection()
-
+
dialect = self.dialect
ctx = dialect.execution_ctx_cls._init_default(
dialect, self, conn)
except Exception, e:
self._handle_dbapi_exception(e, None, None, None, None)
raise
-
+
ret = ctx._exec_default(default)
if self.should_close_with_result:
self.close()
return ret
-
+
def _execute_ddl(self, ddl, params, multiparams):
"""Execute a schema.DDL object."""
-
+
dialect = self.dialect
return self._execute_context(
dialect,
@@ -1252,7 +1252,7 @@ class Connection(Connectable):
def _execute_clauseelement(self, elem, multiparams, params):
"""Execute a sql.ClauseElement object."""
-
+
params = self.__distill_params(multiparams, params)
if params:
keys = params[0].keys()
@@ -1298,7 +1298,7 @@ class Connection(Connectable):
def _execute_text(self, statement, multiparams, params):
"""Execute a string SQL statement."""
-
+
dialect = self.dialect
parameters = self.__distill_params(multiparams, params)
return self._execute_context(
@@ -1316,13 +1316,13 @@ class Connection(Connectable):
statement, parameters, *args):
"""Create an :class:`.ExecutionContext` and execute, returning
a :class:`.ResultProxy`."""
-
+
try:
try:
conn = self.__connection
except AttributeError:
conn = self._revalidate_connection()
-
+
context = constructor(dialect, self, conn, *args)
except Exception, e:
self._handle_dbapi_exception(e,
@@ -1332,14 +1332,14 @@ class Connection(Connectable):
if context.compiled:
context.pre_exec()
-
+
cursor, statement, parameters = context.cursor, \
context.statement, \
context.parameters
-
+
if not context.executemany:
parameters = parameters[0]
-
+
if self._before_cursor_execute:
statement, parameters = self._before_cursor_execute(
context,
@@ -1376,13 +1376,13 @@ class Connection(Connectable):
if self._after_cursor_execute:
self._after_cursor_execute(context, cursor,
statement, parameters)
-
+
if context.compiled:
context.post_exec()
-
+
if context.isinsert and not context.executemany:
context.post_insert()
-
+
# create a resultproxy, get rowcount/implicit RETURNING
# rows, close cursor if no further results pending
result = context.get_result_proxy()
@@ -1399,25 +1399,25 @@ class Connection(Connectable):
# such as kintersbasdb, mxodbc),
result.rowcount
result.close(_autoclose_connection=False)
-
+
if self.__transaction is None and context.should_autocommit:
self._commit_impl()
-
+
if result.closed and self.should_close_with_result:
self.close()
-
+
return result
def _cursor_execute(self, cursor, statement, parameters):
"""Execute a statement + params on the given cursor.
Adds appropriate logging and exception handling.
-
+
This method is used by DefaultDialect for special-case
- executions, such as for sequences and column defaults.
+ executions, such as for sequences and column defaults.
The path of statement execution in the majority of cases
terminates at _execute_context().
-
+
"""
if self._echo:
self.engine.logger.info(statement)
@@ -1439,7 +1439,7 @@ class Connection(Connectable):
def _safe_close_cursor(self, cursor):
"""Close the given cursor, catching exceptions
and turning into log warnings.
-
+
"""
try:
cursor.close()
@@ -1452,7 +1452,7 @@ class Connection(Connectable):
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
-
+
def _handle_dbapi_exception(self,
e,
statement,
@@ -1499,7 +1499,7 @@ class Connection(Connectable):
connection_invalidated=is_disconnect), \
None, sys.exc_info()[2]
# end Py2K
-
+
finally:
del self._reentrant_error
@@ -1538,9 +1538,9 @@ class Connection(Connectable):
This is a shortcut for explicitly calling `begin()` and `commit()`
and optionally `rollback()` when exceptions are raised. The
given `*args` and `**kwargs` will be passed to the function.
-
+
See also transaction() on engine.
-
+
"""
trans = self.begin()
@@ -1564,7 +1564,7 @@ class Transaction(object):
also implements a context manager interface so that
the Python ``with`` statement can be used with the
:meth:`.Connection.begin` method.
-
+
The Transaction object is **not** threadsafe.
.. index::
@@ -1575,7 +1575,7 @@ class Transaction(object):
"""The constructor for :class:`.Transaction` is private
and is called from within the :class:`.Connection.begin`
implementation.
-
+
"""
self.connection = connection
self._parent = parent or self
@@ -1590,7 +1590,7 @@ class Transaction(object):
This is used to cancel a Transaction without affecting the scope of
an enclosing transaction.
-
+
"""
if not self._parent.is_active:
return
@@ -1599,7 +1599,7 @@ class Transaction(object):
def rollback(self):
"""Roll back this :class:`.Transaction`.
-
+
"""
if not self._parent.is_active:
return
@@ -1611,7 +1611,7 @@ class Transaction(object):
def commit(self):
"""Commit this :class:`.Transaction`."""
-
+
if not self._parent.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
self._do_commit()
@@ -1679,13 +1679,13 @@ class TwoPhaseTransaction(Transaction):
def _do_commit(self):
self.connection._commit_twophase_impl(self.xid, self._is_prepared)
-
+
class Engine(Connectable, log.Identified):
"""
Connects a :class:`~sqlalchemy.pool.Pool` and
:class:`~sqlalchemy.engine.base.Dialect` together to provide a source
of database connectivity and behavior.
-
+
An :class:`Engine` object is instantiated publically using the
:func:`~sqlalchemy.create_engine` function.
@@ -1693,7 +1693,7 @@ class Engine(Connectable, log.Identified):
_execution_options = util.frozendict()
Connection = Connection
-
+
def __init__(self, pool, dialect, url,
logging_name=None, echo=None, proxy=None,
execution_options=None
@@ -1711,17 +1711,17 @@ class Engine(Connectable, log.Identified):
if execution_options:
self.update_execution_options(**execution_options)
-
+
dispatch = event.dispatcher(events.EngineEvents)
-
+
def update_execution_options(self, **opt):
"""update the execution_options dictionary of this :class:`Engine`.
-
+
For details on execution_options, see
:meth:`Connection.execution_options` as well as
:meth:`sqlalchemy.sql.expression.Executable.execution_options`.
-
-
+
+
"""
self._execution_options = \
self._execution_options.union(opt)
@@ -1751,23 +1751,23 @@ class Engine(Connectable, log.Identified):
A new connection pool is created immediately after the old one has
been disposed. This new pool, like all SQLAlchemy connection pools,
does not make any actual connections to the database until one is
- first requested.
-
+ first requested.
+
This method has two general use cases:
-
+
* When a dropped connection is detected, it is assumed that all
connections held by the pool are potentially dropped, and
the entire pool is replaced.
-
+
* An application may want to use :meth:`dispose` within a test
suite that is creating multiple engines.
-
+
It is critical to note that :meth:`dispose` does **not** guarantee
that the application will release all open database connections - only
- those connections that are checked into the pool are closed.
+ those connections that are checked into the pool are closed.
Connections which remain checked out or have been detached from
the engine are not affected.
-
+
"""
self.pool.dispose()
self.pool = self.pool.recreate()
@@ -1804,11 +1804,11 @@ class Engine(Connectable, log.Identified):
def text(self, text, *args, **kwargs):
"""Return a :func:`~sqlalchemy.sql.expression.text` construct,
bound to this engine.
-
+
This is equivalent to::
-
+
text("SELECT * FROM table", bind=engine)
-
+
"""
return expression.text(text, bind=self, *args, **kwargs)
@@ -1832,13 +1832,13 @@ class Engine(Connectable, log.Identified):
This is a shortcut for explicitly calling `begin()` and `commit()`
and optionally `rollback()` when exceptions are raised. The
given `*args` and `**kwargs` will be passed to the function.
-
+
The connection used is that of contextual_connect().
-
+
See also the similar method on Connection itself.
-
+
"""
-
+
conn = self.contextual_connect()
try:
return conn.transaction(callable_, *args, **kwargs)
@@ -1854,10 +1854,10 @@ class Engine(Connectable, log.Identified):
def execute(self, statement, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`.
-
+
The arguments are the same as those used by
:meth:`.Connection.execute`.
-
+
Here, a :class:`.Connection` is acquired using the
:meth:`~.Engine.contextual_connect` method, and the statement executed
with that connection. The returned :class:`.ResultProxy` is flagged
@@ -1865,7 +1865,7 @@ class Engine(Connectable, log.Identified):
underlying cursor is closed, the :class:`.Connection` created here
will also be closed, which allows its associated DBAPI connection
resource to be returned to the connection pool.
-
+
"""
connection = self.contextual_connect(close_with_result=True)
@@ -1884,29 +1884,29 @@ class Engine(Connectable, log.Identified):
def connect(self, **kwargs):
"""Return a new :class:`.Connection` object.
-
+
The :class:`.Connection`, upon construction, will procure a DBAPI connection
from the :class:`.Pool` referenced by this :class:`.Engine`,
returning it back to the :class:`.Pool` after the :meth:`.Connection.close`
method is called.
-
+
"""
return self.Connection(self, **kwargs)
def contextual_connect(self, close_with_result=False, **kwargs):
"""Return a :class:`.Connection` object which may be part of some ongoing context.
-
+
By default, this method does the same thing as :meth:`.Engine.connect`.
Subclasses of :class:`.Engine` may override this method
to provide contextual behavior.
:param close_with_result: When True, the first :class:`.ResultProxy` created
by the :class:`.Connection` will call the :meth:`.Connection.close` method
- of that connection as soon as any pending result rows are exhausted.
+ of that connection as soon as any pending result rows are exhausted.
This is used to supply the "connectionless execution" behavior provided
by the :meth:`.Engine.execute` method.
-
+
"""
return self.Connection(self,
@@ -1960,25 +1960,25 @@ class Engine(Connectable, log.Identified):
def _listener_connection_cls(cls, dispatch):
"""Produce a wrapper for :class:`.Connection` which will apply event
dispatch to each method.
-
+
:class:`.Connection` does not provide event dispatch built in so that
method call overhead is avoided in the absense of any listeners.
-
+
"""
class EventListenerConnection(cls):
def execute(self, clauseelement, *multiparams, **params):
for fn in dispatch.before_execute:
clauseelement, multiparams, params = \
fn(self, clauseelement, multiparams, params)
-
+
ret = super(EventListenerConnection, self).\
execute(clauseelement, *multiparams, **params)
for fn in dispatch.after_execute:
fn(self, clauseelement, multiparams, params, ret)
-
+
return ret
-
+
def _execute_clauseelement(self, clauseelement,
multiparams=None, params=None):
return self.execute(clauseelement,
@@ -1992,7 +1992,7 @@ def _listener_connection_cls(cls, dispatch):
fn(self, cursor, statement, parameters,
context, context.executemany)
return statement, parameters
-
+
def _after_cursor_execute(self, context, cursor,
statement, parameters):
dispatch.after_cursor_execute(self, cursor,
@@ -2000,12 +2000,12 @@ def _listener_connection_cls(cls, dispatch):
parameters,
context,
context.executemany)
-
+
def _begin_impl(self):
dispatch.begin(self)
return super(EventListenerConnection, self).\
_begin_impl()
-
+
def _rollback_impl(self):
dispatch.rollback(self)
return super(EventListenerConnection, self).\
@@ -2020,17 +2020,17 @@ def _listener_connection_cls(cls, dispatch):
dispatch.savepoint(self, name)
return super(EventListenerConnection, self).\
_savepoint_impl(name=name)
-
+
def _rollback_to_savepoint_impl(self, name, context):
dispatch.rollback_savepoint(self, name, context)
return super(EventListenerConnection, self).\
_rollback_to_savepoint_impl(name, context)
-
+
def _release_savepoint_impl(self, name, context):
dispatch.release_savepoint(self, name, context)
return super(EventListenerConnection, self).\
_release_savepoint_impl(name, context)
-
+
def _begin_twophase_impl(self, xid):
dispatch.begin_twophase(self, xid)
return super(EventListenerConnection, self).\
@@ -2203,12 +2203,12 @@ try:
Sequence.register(RowProxy)
except ImportError:
pass
-
+
class ResultMetaData(object):
"""Handle cursor.description, applying additional info from an execution
context."""
-
+
def __init__(self, parent, metadata):
self._processors = processors = []
@@ -2224,7 +2224,7 @@ class ResultMetaData(object):
for i, rec in enumerate(metadata):
colname = rec[0]
coltype = rec[1]
-
+
if dialect.description_encoding:
colname = dialect._description_decoder(colname)
@@ -2239,14 +2239,14 @@ class ResultMetaData(object):
colname, None, typemap.get(coltype, types.NULLTYPE)
processor = type_._cached_result_processor(dialect, coltype)
-
+
processors.append(processor)
rec = (processor, i)
# indexes as keys. This is only needed for the Python version of
# RowProxy (the C version uses a faster path for integer indexes).
keymap[i] = rec
-
+
# Column names as keys
if keymap.setdefault(name.lower(), rec) is not rec:
# We do not raise an exception directly because several
@@ -2257,7 +2257,7 @@ class ResultMetaData(object):
if dialect.requires_name_normalize:
colname = dialect.normalize_name(colname)
-
+
self.keys.append(colname)
if obj:
for o in obj:
@@ -2266,19 +2266,19 @@ class ResultMetaData(object):
if parent._echo:
context.engine.logger.debug(
"Col %r", tuple(x[0] for x in metadata))
-
+
def _set_keymap_synonym(self, name, origname):
"""Set a synonym for the given name.
-
+
Some dialects (SQLite at the moment) may use this to
adjust the column names that are significant within a
row.
-
+
"""
rec = (processor, i) = self._keymap[origname.lower()]
if self._keymap.setdefault(name, rec) is not rec:
self._keymap[name] = (processor, None)
-
+
def _key_fallback(self, key):
map = self._keymap
result = None
@@ -2318,7 +2318,7 @@ class ResultMetaData(object):
),
'keys': self.keys
}
-
+
def __setstate__(self, state):
# the row has been processed at pickling time so we don't need any
# processor anymore
@@ -2329,7 +2329,7 @@ class ResultMetaData(object):
self.keys = state['keys']
self._echo = False
-
+
class ResultProxy(object):
"""Wraps a DB-API cursor object to provide easier access to row columns.
@@ -2354,7 +2354,7 @@ class ResultProxy(object):
_process_row = RowProxy
out_parameters = None
_can_close_connection = False
-
+
def __init__(self, context):
self.context = context
self.dialect = context.dialect
@@ -2371,23 +2371,23 @@ class ResultProxy(object):
self._metadata = None
else:
self._metadata = ResultMetaData(self, metadata)
-
+
def keys(self):
"""Return the current set of string keys for rows."""
if self._metadata:
return self._metadata.keys
else:
return []
-
+
@util.memoized_property
def rowcount(self):
"""Return the 'rowcount' for this result.
-
+
The 'rowcount' reports the number of rows affected
by an UPDATE or DELETE statement. It has *no* other
uses and is not intended to provide the number of rows
present from a SELECT.
-
+
Note that this row count may not be properly implemented in some
dialects; this is indicated by
:meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_rowcount()`
@@ -2395,37 +2395,37 @@ class ResultProxy(object):
:meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_multi_rowcount()`.
``rowcount()`` also may not work at this time for a statement that
uses ``returning()``.
-
+
"""
return self.context.rowcount
@property
def lastrowid(self):
"""return the 'lastrowid' accessor on the DBAPI cursor.
-
+
This is a DBAPI specific method and is only functional
for those backends which support it, for statements
where it is appropriate. It's behavior is not
consistent across backends.
-
+
Usage of this method is normally unnecessary; the
:attr:`~ResultProxy.inserted_primary_key` attribute provides a
tuple of primary key values for a newly inserted row,
regardless of database backend.
-
+
"""
return self._saved_cursor.lastrowid
-
+
def _cursor_description(self):
"""May be overridden by subclasses."""
-
+
return self._saved_cursor.description
-
+
def close(self, _autoclose_connection=True):
"""Close this ResultProxy.
Closes the underlying DBAPI cursor corresponding to the execution.
-
+
Note that any data cached within this ResultProxy is still available.
For some types of results, this may include buffered rows.
@@ -2437,7 +2437,7 @@ class ResultProxy(object):
* all result rows are exhausted using the fetchXXX() methods.
* cursor.description is None.
-
+
"""
if not self.closed:
@@ -2448,7 +2448,7 @@ class ResultProxy(object):
self.connection.close()
# allow consistent errors
self.cursor = None
-
+
def __iter__(self):
while True:
row = self.fetchone()
@@ -2456,11 +2456,11 @@ class ResultProxy(object):
raise StopIteration
else:
yield row
-
+
@util.memoized_property
def inserted_primary_key(self):
"""Return the primary key for the row just inserted.
-
+
This only applies to single row insert() constructs which
did not explicitly specify returning().
@@ -2473,19 +2473,19 @@ class ResultProxy(object):
raise exc.InvalidRequestError(
"Can't call inserted_primary_key when returning() "
"is used.")
-
+
return self.context.inserted_primary_key
@util.deprecated("0.6", "Use :attr:`.ResultProxy.inserted_primary_key`")
def last_inserted_ids(self):
"""Return the primary key for the row just inserted."""
-
+
return self.inserted_primary_key
-
+
def last_updated_params(self):
"""Return the collection of updated parameters from this
execution.
-
+
"""
if self.context.executemany:
return self.context.compiled_parameters
@@ -2495,7 +2495,7 @@ class ResultProxy(object):
def last_inserted_params(self):
"""Return the collection of inserted parameters from this
execution.
-
+
"""
if self.context.executemany:
return self.context.compiled_parameters
@@ -2549,7 +2549,7 @@ class ResultProxy(object):
return self.cursor.fetchall()
except AttributeError:
self._non_result()
-
+
def _non_result(self):
if self._metadata is None:
raise exc.ResourceClosedError(
@@ -2558,7 +2558,7 @@ class ResultProxy(object):
)
else:
raise exc.ResourceClosedError("This result object is closed.")
-
+
def process_rows(self, rows):
process_row = self._process_row
metadata = self._metadata
@@ -2591,10 +2591,10 @@ class ResultProxy(object):
def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
``cursor.fetchmany(size=cursor.arraysize)``.
-
+
If rows are present, the cursor remains open after this is called.
Else the cursor is automatically closed and an empty list is returned.
-
+
"""
try:
@@ -2610,10 +2610,10 @@ class ResultProxy(object):
def fetchone(self):
"""Fetch one row, just like DB-API ``cursor.fetchone()``.
-
+
If a row is present, the cursor remains open after this is called.
Else the cursor is automatically closed and None is returned.
-
+
"""
try:
row = self._fetchone_impl()
@@ -2630,9 +2630,9 @@ class ResultProxy(object):
def first(self):
"""Fetch the first row and then close the result set unconditionally.
-
+
Returns None if no row is present.
-
+
"""
if self._metadata is None:
self._non_result()
@@ -2652,12 +2652,12 @@ class ResultProxy(object):
return None
finally:
self.close()
-
+
def scalar(self):
"""Fetch the first column of the first row, and close the result set.
-
+
Returns None if no row is present.
-
+
"""
row = self.first()
if row is not None:
@@ -2726,11 +2726,11 @@ class BufferedRowResultProxy(ResultProxy):
class FullyBufferedResultProxy(ResultProxy):
"""A result proxy that buffers rows fully upon creation.
-
+
Used for operations where a result is to be delivered
after the database conversation can not be continued,
such as MSSQL INSERT...OUTPUT after an autocommit.
-
+
"""
def _init_metadata(self):
super(FullyBufferedResultProxy, self)._init_metadata()
@@ -2738,7 +2738,7 @@ class FullyBufferedResultProxy(ResultProxy):
def _buffer_rows(self):
return self.cursor.fetchall()
-
+
def _fetchone_impl(self):
if self.__rowbuffer:
return self.__rowbuffer.pop(0)
@@ -2772,7 +2772,7 @@ class BufferedColumnRow(RowProxy):
row = tuple(row)
super(BufferedColumnRow, self).__init__(parent, row,
processors, keymap)
-
+
class BufferedColumnResultProxy(ResultProxy):
"""A ResultProxy with column buffering behavior.
@@ -2782,7 +2782,7 @@ class BufferedColumnResultProxy(ResultProxy):
databases where result rows contain "live" results that fall out
of scope unless explicitly fetched. Currently this includes
cx_Oracle LOB objects.
-
+
"""
_process_row = BufferedColumnRow
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
index 10c7d55f2..6b5684f64 100644
--- a/lib/sqlalchemy/engine/ddl.py
+++ b/lib/sqlalchemy/engine/ddl.py
@@ -34,20 +34,20 @@ class SchemaGenerator(DDLBase):
else:
tables = metadata.tables.values()
collection = [t for t in sql_util.sort_tables(tables) if self._can_create(t)]
-
+
metadata.dispatch.before_create(metadata, self.connection,
tables=collection)
-
+
for table in collection:
self.traverse_single(table, create_ok=True)
-
+
metadata.dispatch.after_create(metadata, self.connection,
tables=collection)
def visit_table(self, table, create_ok=False):
if not create_ok and not self._can_create(table):
return
-
+
table.dispatch.before_create(table, self.connection)
for column in table.columns:
@@ -88,10 +88,10 @@ class SchemaDropper(DDLBase):
else:
tables = metadata.tables.values()
collection = [t for t in reversed(sql_util.sort_tables(tables)) if self._can_drop(t)]
-
+
metadata.dispatch.before_drop(metadata, self.connection,
tables=collection)
-
+
for table in collection:
self.traverse_single(table, drop_ok=True)
@@ -118,7 +118,7 @@ class SchemaDropper(DDLBase):
self.traverse_single(column.default)
self.connection.execute(schema.DropTable(table))
-
+
table.dispatch.after_drop(table, self.connection)
def visit_sequence(self, sequence):
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 8721b97aa..76077778c 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -34,23 +34,23 @@ class DefaultDialect(base.Dialect):
supports_alter = True
# most DBAPIs happy with this for execute().
- # not cx_oracle.
+ # not cx_oracle.
execute_sequence_format = tuple
-
+
supports_sequences = False
sequences_optional = False
preexecute_autoincrement_sequences = False
postfetch_lastrowid = True
implicit_returning = False
-
+
supports_native_enum = False
supports_native_boolean = False
-
+
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
supports_native_decimal = False
-
+
# Py3K
#supports_unicode_statements = True
#supports_unicode_binds = True
@@ -63,18 +63,18 @@ class DefaultDialect(base.Dialect):
name = 'default'
-
+
# length at which to truncate
# any identifier.
max_identifier_length = 9999
-
+
# length at which to truncate
# the name of an index.
# Usually None to indicate
# 'use max_identifier_length'.
# thanks to MySQL, sigh
max_index_name_length = None
-
+
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
dbapi_type_map = {}
@@ -82,28 +82,28 @@ class DefaultDialect(base.Dialect):
default_paramstyle = 'named'
supports_default_values = False
supports_empty_insert = True
-
+
server_version_info = None
-
+
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
# if this is True, the methods normalize_name()
# and denormalize_name() must be provided.
requires_name_normalize = False
-
+
reflection_options = ()
def __init__(self, convert_unicode=False, assert_unicode=False,
encoding='utf-8', paramstyle=None, dbapi=None,
implicit_returning=None,
label_length=None, **kwargs):
-
+
if not getattr(self, 'ported_sqla_06', True):
util.warn(
"The %s dialect is not yet ported to SQLAlchemy 0.6" %
self.name)
-
+
self.convert_unicode = convert_unicode
if assert_unicode:
util.warn_deprecated(
@@ -114,7 +114,7 @@ class DefaultDialect(base.Dialect):
"received. "
"This does *not* apply to DBAPIs that coerce Unicode "
"natively.")
-
+
self.encoding = encoding
self.positional = False
self._ischema = None
@@ -137,32 +137,32 @@ class DefaultDialect(base.Dialect):
" maximum identifier length of %d" %
(label_length, self.max_identifier_length))
self.label_length = label_length
-
+
if not hasattr(self, 'description_encoding'):
self.description_encoding = getattr(
self,
'description_encoding',
encoding)
-
+
if self.description_encoding:
self._description_decoder = processors.to_unicode_processor_factory(
self.description_encoding
)
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
-
+
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
-
+
@property
def dialect_description(self):
return self.name + "+" + self.driver
-
+
@classmethod
def get_pool_class(cls, url):
return getattr(cls, 'poolclass', pool.QueuePool)
-
+
def initialize(self, connection):
try:
self.server_version_info = \
@@ -176,23 +176,23 @@ class DefaultDialect(base.Dialect):
self.default_schema_name = None
self.returns_unicode_strings = self._check_unicode_returns(connection)
-
+
self.do_rollback(connection.connection)
def on_connect(self):
"""return a callable which sets up a newly created DBAPI connection.
-
+
This is used to set dialect-wide per-connection options such as
isolation modes, unicode modes, etc.
-
+
If a callable is returned, it will be assembled into a pool listener
that receives the direct DBAPI connection, with all wrappers removed.
-
+
If None is returned, no listener will be generated.
-
+
"""
return None
-
+
def _check_unicode_returns(self, connection):
# Py2K
if self.supports_unicode_statements:
@@ -215,22 +215,22 @@ class DefaultDialect(base.Dialect):
)
)
row = cursor.fetchone()
-
+
return isinstance(row[0], unicode)
finally:
cursor.close()
-
+
# detect plain VARCHAR
unicode_for_varchar = check_unicode(sqltypes.VARCHAR(60))
-
+
# detect if there's an NVARCHAR type with different behavior available
unicode_for_unicode = check_unicode(sqltypes.Unicode(60))
-
+
if unicode_for_unicode and not unicode_for_varchar:
return "conditional"
else:
return unicode_for_varchar
-
+
def type_descriptor(self, typeobj):
"""Provide a database-specific ``TypeEngine`` object, given
the generic object which comes from the types module.
@@ -249,14 +249,14 @@ class DefaultDialect(base.Dialect):
def get_pk_constraint(self, conn, table_name, schema=None, **kw):
"""Compatiblity method, adapts the result of get_primary_keys()
for those dialects which don't implement get_pk_constraint().
-
+
"""
return {
'constrained_columns':
self.get_primary_keys(conn, table_name,
schema=schema, **kw)
}
-
+
def validate_identifier(self, ident):
if len(ident) > self.max_identifier_length:
raise exc.IdentifierError(
@@ -332,11 +332,11 @@ class DefaultExecutionContext(base.ExecutionContext):
statement = None
_is_implicit_returning = False
_is_explicit_returning = False
-
+
@classmethod
def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl):
"""Initialize execution context for a DDLElement construct."""
-
+
self = cls.__new__(cls)
self.dialect = dialect
self.root_connection = connection
@@ -356,7 +356,7 @@ class DefaultExecutionContext(base.ExecutionContext):
self.statement = dialect._encoder(self.unicode_statement)[0]
else:
self.statement = self.unicode_statement = unicode(compiled)
-
+
self.cursor = self.create_cursor()
self.compiled_parameters = []
@@ -366,7 +366,7 @@ class DefaultExecutionContext(base.ExecutionContext):
self.parameters = [{}]
return self
-
+
@classmethod
def _init_compiled(cls, dialect, connection, dbapi_connection, compiled, parameters):
"""Initialize execution context for a Compiled construct."""
@@ -401,7 +401,7 @@ class DefaultExecutionContext(base.ExecutionContext):
self.isinsert = compiled.isinsert
self.isupdate = compiled.isupdate
self.isdelete = compiled.isdelete
-
+
if self.isinsert or self.isupdate or self.isdelete:
self._is_explicit_returning = compiled.statement._returning
self._is_implicit_returning = compiled.returning and \
@@ -413,7 +413,7 @@ class DefaultExecutionContext(base.ExecutionContext):
self.compiled_parameters = \
[compiled.construct_params(m, _group_number=grp) for
grp,m in enumerate(parameters)]
-
+
self.executemany = len(parameters) > 1
self.cursor = self.create_cursor()
@@ -421,7 +421,7 @@ class DefaultExecutionContext(base.ExecutionContext):
self.__process_defaults()
self.postfetch_cols = self.compiled.postfetch
self.prefetch_cols = self.compiled.prefetch
-
+
processors = compiled._bind_processors
# Convert the dictionary of bind parameter values
@@ -456,9 +456,9 @@ class DefaultExecutionContext(base.ExecutionContext):
param[key] = compiled_params[key]
parameters.append(param)
self.parameters = dialect.execute_sequence_format(parameters)
-
+
return self
-
+
@classmethod
def _init_statement(cls, dialect, connection, dbapi_connection, statement, parameters):
"""Initialize execution context for a string SQL statement."""
@@ -490,18 +490,18 @@ class DefaultExecutionContext(base.ExecutionContext):
else:
self.parameters = [dialect.execute_sequence_format(p)
for p in parameters]
-
+
self.executemany = len(parameters) > 1
-
+
if not dialect.supports_unicode_statements and isinstance(statement, unicode):
self.unicode_statement = statement
self.statement = dialect._encoder(statement)[0]
else:
self.statement = self.unicode_statement = statement
-
+
self.cursor = self.create_cursor()
return self
-
+
@classmethod
def _init_default(cls, dialect, connection, dbapi_connection):
"""Initialize execution context for a ColumnDefault construct."""
@@ -514,11 +514,11 @@ class DefaultExecutionContext(base.ExecutionContext):
self.execution_options = connection._execution_options
self.cursor = self.create_cursor()
return self
-
+
@util.memoized_property
def is_crud(self):
return self.isinsert or self.isupdate or self.isdelete
-
+
@util.memoized_property
def should_autocommit(self):
autocommit = self.execution_options.get('autocommit',
@@ -526,20 +526,20 @@ class DefaultExecutionContext(base.ExecutionContext):
self.statement and
expression.PARSE_AUTOCOMMIT
or False)
-
+
if autocommit is expression.PARSE_AUTOCOMMIT:
return self.should_autocommit_text(self.unicode_statement)
else:
return autocommit
-
+
def _execute_scalar(self, stmt):
"""Execute a string statement on the current cursor, returning a
scalar result.
-
+
Used to fire off sequences, default phrases, and "select lastrowid"
types of statements individually or in the context of a parent INSERT
or UPDATE statement.
-
+
"""
conn = self.root_connection
@@ -551,10 +551,10 @@ class DefaultExecutionContext(base.ExecutionContext):
default_params = self.dialect.execute_sequence_format()
else:
default_params = {}
-
+
conn._cursor_execute(self.cursor, stmt, default_params)
return self.cursor.fetchone()[0]
-
+
@property
def connection(self):
return self.root_connection._branch()
@@ -570,32 +570,32 @@ class DefaultExecutionContext(base.ExecutionContext):
def post_exec(self):
pass
-
+
def get_lastrowid(self):
"""return self.cursor.lastrowid, or equivalent, after an INSERT.
-
+
This may involve calling special cursor functions,
issuing a new SELECT on the cursor (or a new one),
or returning a stored value that was
calculated within post_exec().
-
+
This function will only be called for dialects
which support "implicit" primary key generation,
keep preexecute_autoincrement_sequences set to False,
and when no explicit id value was bound to the
statement.
-
+
The function is called once, directly after
post_exec() and before the transaction is committed
or ResultProxy is generated. If the post_exec()
method assigns a value to `self._lastrowid`, the
value is used in place of calling get_lastrowid().
-
+
Note that this method is *not* equivalent to the
``lastrowid`` method on ``ResultProxy``, which is a
direct proxy to the DBAPI ``lastrowid`` accessor
in all cases.
-
+
"""
return self.cursor.lastrowid
@@ -604,7 +604,7 @@ class DefaultExecutionContext(base.ExecutionContext):
def get_result_proxy(self):
return base.ResultProxy(self)
-
+
@property
def rowcount(self):
return self.cursor.rowcount
@@ -614,19 +614,19 @@ class DefaultExecutionContext(base.ExecutionContext):
def supports_sane_multi_rowcount(self):
return self.dialect.supports_sane_multi_rowcount
-
+
def post_insert(self):
if self.dialect.postfetch_lastrowid and \
(not self.inserted_primary_key or \
None in self.inserted_primary_key):
-
+
table = self.compiled.statement.table
lastrowid = self.get_lastrowid()
self.inserted_primary_key = [
c is table._autoincrement_column and lastrowid or v
for c, v in zip(table.primary_key, self.inserted_primary_key)
]
-
+
def _fetch_implicit_returning(self, resultproxy):
table = self.compiled.statement.table
row = resultproxy.fetchone()
@@ -637,9 +637,9 @@ class DefaultExecutionContext(base.ExecutionContext):
ipk.append(v)
else:
ipk.append(row[c])
-
+
self.inserted_primary_key = ipk
-
+
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
bool(self.postfetch_cols)
@@ -648,10 +648,10 @@ class DefaultExecutionContext(base.ExecutionContext):
"""Given a cursor and ClauseParameters, call the appropriate
style of ``setinputsizes()`` on the cursor, using DB-API types
from the bind parameter's ``TypeEngine`` objects.
-
+
This method only called by those dialects which require it,
currently cx_oracle.
-
+
"""
if not hasattr(self.compiled, 'bind_names'):
@@ -696,12 +696,12 @@ class DefaultExecutionContext(base.ExecutionContext):
elif default.is_clause_element:
# TODO: expensive branching here should be
# pulled into _exec_scalar()
- conn = self.connection
+ conn = self.connection
c = expression.select([default.arg]).compile(bind=conn)
return conn._execute_compiled(c, (), {}).scalar()
else:
return default.arg
-
+
def get_insert_default(self, column):
if column.default is None:
return None
@@ -713,7 +713,7 @@ class DefaultExecutionContext(base.ExecutionContext):
return None
else:
return self._exec_default(column.onupdate)
-
+
def __process_defaults(self):
"""Generate default values for compiled insert/update statements,
and generate inserted_primary_key collection.
@@ -722,7 +722,7 @@ class DefaultExecutionContext(base.ExecutionContext):
if self.executemany:
if len(self.compiled.prefetch):
scalar_defaults = {}
-
+
# pre-determine scalar Python-side defaults
# to avoid many calls of get_insert_default()/
# get_update_default()
@@ -731,7 +731,7 @@ class DefaultExecutionContext(base.ExecutionContext):
scalar_defaults[c] = c.default.arg
elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
scalar_defaults[c] = c.onupdate.arg
-
+
for param in self.compiled_parameters:
self.current_parameters = param
for c in self.compiled.prefetch:
@@ -757,7 +757,7 @@ class DefaultExecutionContext(base.ExecutionContext):
if val is not None:
compiled_parameters[c.key] = val
del self.current_parameters
-
+
if self.isinsert:
self.inserted_primary_key = [
self.compiled_parameters[0].get(c.key, None)
@@ -765,5 +765,5 @@ class DefaultExecutionContext(base.ExecutionContext):
statement.table.primary_key
]
-
+
DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 964e9fbee..cf254cba6 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -55,17 +55,17 @@ class Inspector(object):
:class:`~sqlalchemy.engine.base.Dialect`, providing a
consistent interface as well as caching support for previously
fetched metadata.
-
+
The preferred method to construct an :class:`.Inspector` is via the
:meth:`Inspector.from_engine` method. I.e.::
-
+
engine = create_engine('...')
insp = Inspector.from_engine(engine)
-
+
Where above, the :class:`~sqlalchemy.engine.base.Dialect` may opt
to return an :class:`.Inspector` subclass that provides additional
methods specific to the dialect's target database.
-
+
"""
def __init__(self, bind):
@@ -75,7 +75,7 @@ class Inspector(object):
which is typically an instance of
:class:`~sqlalchemy.engine.base.Engine` or
:class:`~sqlalchemy.engine.base.Connection`.
-
+
For a dialect-specific instance of :class:`.Inspector`, see
:meth:`Inspector.from_engine`
@@ -83,10 +83,10 @@ class Inspector(object):
# ensure initialized
bind.connect()
-
+
# this might not be a connection, it could be an engine.
self.bind = bind
-
+
# set the engine
if hasattr(bind, 'engine'):
self.engine = bind.engine
@@ -103,14 +103,14 @@ class Inspector(object):
which is typically an instance of
:class:`~sqlalchemy.engine.base.Engine` or
:class:`~sqlalchemy.engine.base.Connection`.
-
+
This method differs from direct a direct constructor call of :class:`.Inspector`
in that the :class:`~sqlalchemy.engine.base.Dialect` is given a chance to provide
a dialect-specific :class:`.Inspector` instance, which may provide additional
methods.
-
+
See the example at :class:`.Inspector`.
-
+
"""
if hasattr(bind.dialect, 'inspector'):
return bind.dialect.inspector(bind)
@@ -120,10 +120,10 @@ class Inspector(object):
def default_schema_name(self):
"""Return the default schema name presented by the dialect
for the current engine's database user.
-
+
E.g. this is typically ``public`` for Postgresql and ``dbo``
for SQL Server.
-
+
"""
return self.dialect.default_schema_name
@@ -174,9 +174,9 @@ class Inspector(object):
def get_table_options(self, table_name, schema=None, **kw):
"""Return a dictionary of options specified when the table of the given name was created.
-
+
This currently includes some options that apply to MySQL tables.
-
+
"""
if hasattr(self.dialect, 'get_table_options'):
return self.dialect.get_table_options(self.bind, table_name, schema,
@@ -252,10 +252,10 @@ class Inspector(object):
Given a string `table_name`, and an optional string `schema`, return
primary key information as a dictionary with these keys:
-
+
constrained_columns
a list of column names that make up the primary key
-
+
name
optional name of the primary key constraint.
@@ -265,7 +265,7 @@ class Inspector(object):
**kw)
return pkeys
-
+
def get_foreign_keys(self, table_name, schema=None, **kw):
"""Return information about foreign_keys in `table_name`.
@@ -288,7 +288,7 @@ class Inspector(object):
name
optional name of the foreign key constraint.
-
+
\**kw
other options passed to the dialect's get_foreign_keys() method.
@@ -313,7 +313,7 @@ class Inspector(object):
unique
boolean
-
+
\**kw
other options passed to the dialect's get_indexes() method.
"""
@@ -325,23 +325,23 @@ class Inspector(object):
def reflecttable(self, table, include_columns):
"""Given a Table object, load its internal constructs based on introspection.
-
+
This is the underlying method used by most dialects to produce
table reflection. Direct usage is like::
-
+
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.engine import reflection
-
+
engine = create_engine('...')
meta = MetaData()
user_table = Table('user', meta)
insp = Inspector.from_engine(engine)
insp.reflecttable(user_table, None)
-
+
:param table: a :class:`~sqlalchemy.schema.Table` instance.
:param include_columns: a list of string column names to include
in the reflection process. If ``None``, all columns are reflected.
-
+
"""
dialect = self.bind.dialect
@@ -393,13 +393,13 @@ class Inspector(object):
col_kw['autoincrement'] = col_d['autoincrement']
if 'quote' in col_d:
col_kw['quote'] = col_d['quote']
-
+
colargs = []
if col_d.get('default') is not None:
# the "default" value is assumed to be a literal SQL expression,
# so is wrapped in text() so that no quoting occurs on re-issuance.
colargs.append(sa_schema.DefaultClause(sql.text(col_d['default'])))
-
+
if 'sequence' in col_d:
# TODO: mssql, maxdb and sybase are using this.
seq = col_d['sequence']
@@ -409,7 +409,7 @@ class Inspector(object):
if 'increment' in seq:
sequence.increment = seq['increment']
colargs.append(sequence)
-
+
col = sa_schema.Column(name, coltype, *colargs, **col_kw)
table.append_column(col)
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index df3d5b64a..e49d0e99e 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -28,7 +28,7 @@ class EngineStrategy(object):
Provides a ``create`` method that receives input arguments and
produces an instance of base.Engine or a subclass.
-
+
"""
def __init__(self):
@@ -85,7 +85,7 @@ class DefaultEngineStrategy(EngineStrategy):
import sys
raise exc.DBAPIError.instance(None, None, e), None, sys.exc_info()[2]
# end Py2K
-
+
creator = kwargs.pop('creator', connect)
poolclass = kwargs.pop('poolclass', None)
@@ -120,7 +120,7 @@ class DefaultEngineStrategy(EngineStrategy):
engine_args[k] = kwargs.pop(k)
_initialize = kwargs.pop('_initialize', True)
-
+
# all kwargs should be consumed
if kwargs:
raise TypeError(
@@ -131,7 +131,7 @@ class DefaultEngineStrategy(EngineStrategy):
dialect.__class__.__name__,
pool.__class__.__name__,
engineclass.__name__))
-
+
engine = engineclass(pool, dialect, u, **engine_args)
if _initialize:
@@ -142,10 +142,10 @@ class DefaultEngineStrategy(EngineStrategy):
if conn is None:
return
do_on_connect(conn)
-
+
event.listen(pool, 'first_connect', on_connect)
event.listen(pool, 'connect', on_connect)
-
+
def first_connect(dbapi_connection, connection_record):
c = base.Connection(engine, connection=dbapi_connection)
dialect.initialize(c)
@@ -159,13 +159,13 @@ class PlainEngineStrategy(DefaultEngineStrategy):
name = 'plain'
engine_cls = base.Engine
-
+
PlainEngineStrategy()
class ThreadLocalEngineStrategy(DefaultEngineStrategy):
"""Strategy for configuring an Engine with thredlocal behavior."""
-
+
name = 'threadlocal'
engine_cls = threadlocal.TLEngine
@@ -177,11 +177,11 @@ class MockEngineStrategy(EngineStrategy):
Produces a single mock Connectable object which dispatches
statement execution to a passed-in function.
-
+
"""
name = 'mock'
-
+
def create(self, name_or_url, executor, **kwargs):
# create url.URL object
u = url.make_url(name_or_url)
@@ -218,7 +218,7 @@ class MockEngineStrategy(EngineStrategy):
def create(self, entity, **kwargs):
kwargs['checkfirst'] = False
from sqlalchemy.engine import ddl
-
+
ddl.SchemaGenerator(self.dialect, self, **kwargs).traverse(entity)
def drop(self, entity, **kwargs):
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index a083d0113..2ce0922bf 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -19,11 +19,11 @@ class TLConnection(base.Connection):
def __init__(self, *arg, **kw):
super(TLConnection, self).__init__(*arg, **kw)
self.__opencount = 0
-
+
def _increment_connect(self):
self.__opencount += 1
return self
-
+
def close(self):
if self.__opencount == 1:
base.Connection.close(self)
@@ -52,21 +52,21 @@ class TLEngine(base.Engine):
self._connections = util.threading.local()
dispatch = event.dispatcher(TLEvents)
-
+
def contextual_connect(self, **kw):
if not hasattr(self._connections, 'conn'):
connection = None
else:
connection = self._connections.conn()
-
+
if connection is None or connection.closed:
# guards against pool-level reapers, if desired.
# or not connection.connection.is_valid:
connection = self.TLConnection(self, self.pool.connect(), **kw)
self._connections.conn = conn = weakref.ref(connection)
-
+
return connection._increment_connect()
-
+
def begin_twophase(self, xid=None):
if not hasattr(self._connections, 'trans'):
self._connections.trans = []
@@ -76,42 +76,42 @@ class TLEngine(base.Engine):
if not hasattr(self._connections, 'trans'):
self._connections.trans = []
self._connections.trans.append(self.contextual_connect().begin_nested())
-
+
def begin(self):
if not hasattr(self._connections, 'trans'):
self._connections.trans = []
self._connections.trans.append(self.contextual_connect().begin())
-
+
def prepare(self):
if not hasattr(self._connections, 'trans') or \
not self._connections.trans:
return
self._connections.trans[-1].prepare()
-
+
def commit(self):
if not hasattr(self._connections, 'trans') or \
not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.commit()
-
+
def rollback(self):
if not hasattr(self._connections, 'trans') or \
not self._connections.trans:
return
trans = self._connections.trans.pop(-1)
trans.rollback()
-
+
def dispose(self):
self._connections = util.threading.local()
super(TLEngine, self).dispose()
-
+
@property
def closed(self):
return not hasattr(self._connections, 'conn') or \
self._connections.conn() is None or \
self._connections.conn().closed
-
+
def close(self):
if not self.closed:
self.contextual_connect().close()
@@ -119,6 +119,6 @@ class TLEngine(base.Engine):
connection._force_close()
del self._connections.conn
self._connections.trans = []
-
+
def __repr__(self):
return 'TLEngine(%s)' % str(self.url)
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 199b37c46..9f4c2dbdf 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -113,25 +113,25 @@ class URL(object):
return module
else:
raise
-
+
def _load_entry_point(self):
"""attempt to load this url's dialect from entry points, or return None
if pkg_resources is not installed or there is no matching entry point.
-
+
Raise ImportError if the actual load fails.
-
+
"""
try:
import pkg_resources
except ImportError:
return None
-
+
for res in pkg_resources.iter_entry_points('sqlalchemy.dialects'):
if res.name == self.drivername:
return res.load()
else:
return None
-
+
def translate_connect_args(self, names=[], **kw):
"""Translate url attributes into a dictionary of connection arguments.
diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py
index 3637234ee..6c74c101d 100644
--- a/lib/sqlalchemy/event.py
+++ b/lib/sqlalchemy/event.py
@@ -13,9 +13,9 @@ NO_RETVAL = util.symbol('NO_RETVAL')
def listen(target, identifier, fn, *args, **kw):
"""Register a listener function for the given target.
-
+
"""
-
+
for evt_cls in _registrars[identifier]:
tgt = evt_cls._accept_with(target)
if tgt is not None:
@@ -26,11 +26,11 @@ def listen(target, identifier, fn, *args, **kw):
def remove(target, identifier, fn):
"""Remove an event listener.
-
+
Note that some event removals, particularly for those event dispatchers
which create wrapper functions and secondary even listeners, may not yet
be supported.
-
+
"""
for evt_cls in _registrars[identifier]:
for tgt in evt_cls._accept_with(target):
@@ -41,42 +41,42 @@ _registrars = util.defaultdict(list)
def _is_event_name(name):
return not name.startswith('_') and name != 'dispatch'
-
+
class _UnpickleDispatch(object):
"""Serializable callable that re-generates an instance of :class:`_Dispatch`
given a particular :class:`.Events` subclass.
-
+
"""
def __call__(self, _parent_cls):
return _parent_cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
-
+
class _Dispatch(object):
"""Mirror the event listening definitions of an Events class with
listener collections.
-
+
Classes which define a "dispatch" member will return a
non-instantiated :class:`._Dispatch` subclass when the member
is accessed at the class level. When the "dispatch" member is
accessed at the instance level of its owner, an instance
of the :class:`._Dispatch` class is returned.
-
+
A :class:`._Dispatch` class is generated for each :class:`.Events`
- class defined, by the :func:`._create_dispatcher_class` function.
- The original :class:`.Events` classes remain untouched.
+ class defined, by the :func:`._create_dispatcher_class` function.
+ The original :class:`.Events` classes remain untouched.
This decouples the construction of :class:`.Events` subclasses from
the implementation used by the event internals, and allows
inspecting tools like Sphinx to work in an unsurprising
way against the public API.
-
+
"""
-
+
def __init__(self, _parent_cls):
self._parent_cls = _parent_cls
-
+
def __reduce__(self):
-
+
return _UnpickleDispatch(), (self._parent_cls, )
-
+
@property
def _descriptors(self):
return (getattr(self, k) for k in dir(self) if _is_event_name(k))
@@ -87,20 +87,20 @@ class _Dispatch(object):
for ls in other._descriptors:
getattr(self, ls.name)._update(ls, only_propagate=only_propagate)
-
-
+
+
class _EventMeta(type):
"""Intercept new Event subclasses and create
associated _Dispatch classes."""
-
+
def __init__(cls, classname, bases, dict_):
_create_dispatcher_class(cls, classname, bases, dict_)
return type.__init__(cls, classname, bases, dict_)
-
+
def _create_dispatcher_class(cls, classname, bases, dict_):
"""Create a :class:`._Dispatch` class corresponding to an
:class:`.Events` class."""
-
+
# there's all kinds of ways to do this,
# i.e. make a Dispatch class that shares the '_listen' method
# of the Event class, this is the straight monkeypatch.
@@ -109,7 +109,7 @@ def _create_dispatcher_class(cls, classname, bases, dict_):
(dispatch_base, ), {})
dispatch_cls._listen = cls._listen
dispatch_cls._clear = cls._clear
-
+
for k in dict_:
if _is_event_name(k):
setattr(dispatch_cls, k, _DispatchDescriptor(dict_[k]))
@@ -121,13 +121,13 @@ def _remove_dispatcher(cls):
_registrars[k].remove(cls)
if not _registrars[k]:
del _registrars[k]
-
+
class Events(object):
"""Define event listening functions for a particular target type."""
-
-
+
+
__metaclass__ = _EventMeta
-
+
@classmethod
def _accept_with(cls, target):
# Mapper, ClassManager, Session override this to
@@ -144,42 +144,42 @@ class Events(object):
@classmethod
def _listen(cls, target, identifier, fn, propagate=False):
getattr(target.dispatch, identifier).append(fn, target, propagate)
-
+
@classmethod
def _remove(cls, target, identifier, fn):
getattr(target.dispatch, identifier).remove(fn, target)
-
+
@classmethod
def _clear(cls):
for attr in dir(cls.dispatch):
if _is_event_name(attr):
getattr(cls.dispatch, attr).clear()
-
+
class _DispatchDescriptor(object):
"""Class-level attributes on :class:`._Dispatch` classes."""
-
+
def __init__(self, fn):
self.__name__ = fn.__name__
self.__doc__ = fn.__doc__
self._clslevel = util.defaultdict(list)
-
+
def append(self, obj, target, propagate):
assert isinstance(target, type), \
"Class-level Event targets must be classes."
-
+
for cls in [target] + target.__subclasses__():
self._clslevel[cls].append(obj)
-
+
def remove(self, obj, target):
for cls in [target] + target.__subclasses__():
self._clslevel[cls].remove(obj)
-
+
def clear(self):
"""Clear all class level listeners"""
-
+
for dispatcher in self._clslevel.values():
dispatcher[:] = []
-
+
def __get__(self, obj, cls):
if obj is None:
return self
@@ -189,19 +189,19 @@ class _DispatchDescriptor(object):
class _ListenerCollection(object):
"""Instance-level attributes on instances of :class:`._Dispatch`.
-
+
Represents a collection of listeners.
-
+
"""
_exec_once = False
-
+
def __init__(self, parent, target_cls):
self.parent_listeners = parent._clslevel[target_cls]
self.name = parent.__name__
self.listeners = []
self.propagate = set()
-
+
def exec_once(self, *args, **kw):
"""Execute this event, but only if it has not been
executed already for this collection."""
@@ -209,7 +209,7 @@ class _ListenerCollection(object):
if not self._exec_once:
self(*args, **kw)
self._exec_once = True
-
+
def __call__(self, *args, **kw):
"""Execute this event."""
@@ -217,7 +217,7 @@ class _ListenerCollection(object):
fn(*args, **kw)
for fn in self.listeners:
fn(*args, **kw)
-
+
# I'm not entirely thrilled about the overhead here,
# but this allows class-level listeners to be added
# at any point.
@@ -227,23 +227,23 @@ class _ListenerCollection(object):
# to a higher memory model, i.e.weakrefs to all _ListenerCollection
# objects, the _DispatchDescriptor collection repeated
# for all instances.
-
+
def __len__(self):
return len(self.parent_listeners + self.listeners)
-
+
def __iter__(self):
return iter(self.parent_listeners + self.listeners)
-
+
def __getitem__(self, index):
return (self.parent_listeners + self.listeners)[index]
-
+
def __nonzero__(self):
return bool(self.listeners or self.parent_listeners)
-
+
def _update(self, other, only_propagate=True):
"""Populate from the listeners in another :class:`_Dispatch`
object."""
-
+
existing_listeners = self.listeners
existing_listener_set = set(existing_listeners)
self.propagate.update(other.propagate)
@@ -258,27 +258,27 @@ class _ListenerCollection(object):
self.listeners.append(obj)
if propagate:
self.propagate.add(obj)
-
+
def remove(self, obj, target):
if obj in self.listeners:
self.listeners.remove(obj)
self.propagate.discard(obj)
-
+
def clear(self):
self.listeners[:] = []
self.propagate.clear()
-
+
class dispatcher(object):
"""Descriptor used by target classes to
deliver the _Dispatch class at the class level
and produce new _Dispatch instances for target
instances.
-
+
"""
def __init__(self, events):
self.dispatch_cls = events.dispatch
self.events = events
-
+
def __get__(self, obj, cls):
if obj is None:
return self.dispatch_cls
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 7b77831df..c1f10977d 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -11,55 +11,55 @@ from sqlalchemy import event, exc
class DDLEvents(event.Events):
"""
Define create/drop event listers for schema objects.
-
+
These events currently apply to :class:`.Table`
and :class:`.MetaData` objects as targets.
-
+
e.g.::
-
+
from sqlalchemy import event
from sqlalchemy import Table, Column, Metadata, Integer
-
+
m = MetaData()
some_table = Table('some_table', m, Column('data', Integer))
-
+
def after_create(target, connection, **kw):
connection.execute("ALTER TABLE %s SET name=foo_%s" %
(target.name, target.name))
-
+
event.listen(some_table, "after_create", after_create)
-
+
DDL events integrate closely with the
:class:`.DDL` class and the :class:`.DDLElement` hierarchy
of DDL clause constructs, which are themselves appropriate
as listener callables::
-
+
from sqlalchemy import DDL
event.listen(
some_table,
"after_create",
DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
)
-
+
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
-
+
See also:
:ref:`event_toplevel`
-
+
:class:`.DDLElement`
-
+
:class:`.DDL`
-
+
:ref:`schema_ddl_sequences`
-
+
"""
-
+
def before_create(self, target, connection, **kw):
"""Called before CREATE statments are emitted.
-
+
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
@@ -69,12 +69,12 @@ class DDLEvents(event.Events):
argument in the case of a :class:`.MetaData` object,
which is the list of :class:`.Table` objects for which
CREATE will be emitted.
-
+
"""
def after_create(self, target, connection, **kw):
"""Called after CREATE statments are emitted.
-
+
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
@@ -84,12 +84,12 @@ class DDLEvents(event.Events):
argument in the case of a :class:`.MetaData` object,
which is the list of :class:`.Table` objects for which
CREATE has been emitted.
-
+
"""
def before_drop(self, target, connection, **kw):
"""Called before DROP statments are emitted.
-
+
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
@@ -99,12 +99,12 @@ class DDLEvents(event.Events):
argument in the case of a :class:`.MetaData` object,
which is the list of :class:`.Table` objects for which
DROP will be emitted.
-
+
"""
-
+
def after_drop(self, target, connection, **kw):
"""Called after DROP statments are emitted.
-
+
:param target: the :class:`.MetaData` or :class:`.Table`
object which is the target of the event.
:param connection: the :class:`.Connection` where the
@@ -114,24 +114,24 @@ class DDLEvents(event.Events):
argument in the case of a :class:`.MetaData` object,
which is the list of :class:`.Table` objects for which
DROP has been emitted.
-
+
"""
-
+
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
-
+
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
-
+
e.g.::
-
+
from sqlalchemy import event
-
+
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
-
+
events.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`.Pool` class and :class:`.Pool` instances,
@@ -139,19 +139,19 @@ class PoolEvents(event.Events):
the :class:`.Engine` class as targets, which will be resolved
to the ``.pool`` attribute of the given engine or the :class:`.Pool`
class::
-
+
engine = create_engine("postgresql://scott:tiger@localhost/test")
-
+
# will associate with engine.pool
events.listen(engine, 'checkout', my_on_checkout)
"""
-
+
@classmethod
def _accept_with(cls, target):
from sqlalchemy.engine import Engine
from sqlalchemy.pool import Pool
-
+
if isinstance(target, type):
if issubclass(target, Engine):
return Pool
@@ -161,7 +161,7 @@ class PoolEvents(event.Events):
return target.pool
else:
return target
-
+
def connect(self, dbapi_connection, connection_record):
"""Called once for each new DB-API connection or Pool's ``creator()``.
@@ -222,30 +222,30 @@ class PoolEvents(event.Events):
class EngineEvents(event.Events):
"""Available events for :class:`.Engine`.
-
+
The methods here define the name of an event as well as the names of members that are passed to listener functions.
-
+
e.g.::
-
+
from sqlalchemy import event, create_engine
-
+
def before_execute(conn, clauseelement, multiparams, params):
log.info("Received statement: %s" % clauseelement)
-
+
engine = create_engine('postgresql://scott:tiger@localhost/test')
event.listen(engine, "before_execute", before_execute)
-
+
Some events allow modifiers to the listen() function.
-
+
:param retval=False: Applies to the :meth:`.before_execute` and
:meth:`.before_cursor_execute` events only. When True, the
user-defined event function must have a return value, which
is a tuple of parameters that replace the given statement
and parameters. See those methods for a description of
specific return arguments.
-
+
"""
-
+
@classmethod
def _listen(cls, target, identifier, fn, retval=False):
from sqlalchemy.engine.base import Connection, \
@@ -254,7 +254,7 @@ class EngineEvents(event.Events):
target.Connection = _listener_connection_cls(
Connection,
target.dispatch)
-
+
if not retval:
if identifier == 'before_execute':
orig_fn = fn
@@ -270,7 +270,7 @@ class EngineEvents(event.Events):
parameters, context, executemany)
return statement, parameters
fn = wrap
-
+
elif retval and identifier not in ('before_execute', 'before_cursor_execute'):
raise exc.ArgumentError(
"Only the 'before_execute' and "
@@ -284,7 +284,7 @@ class EngineEvents(event.Events):
def after_execute(self, conn, clauseelement, multiparams, params, result):
"""Intercept high level execute() events."""
-
+
def before_cursor_execute(self, conn, cursor, statement,
parameters, context, executemany):
"""Intercept low-level cursor execute() events."""
@@ -295,31 +295,31 @@ class EngineEvents(event.Events):
def begin(self, conn):
"""Intercept begin() events."""
-
+
def rollback(self, conn):
"""Intercept rollback() events."""
-
+
def commit(self, conn):
"""Intercept commit() events."""
-
+
def savepoint(self, conn, name=None):
"""Intercept savepoint() events."""
-
+
def rollback_savepoint(self, conn, name, context):
"""Intercept rollback_savepoint() events."""
-
+
def release_savepoint(self, conn, name, context):
"""Intercept release_savepoint() events."""
-
+
def begin_twophase(self, conn, xid):
"""Intercept begin_twophase() events."""
-
+
def prepare_twophase(self, conn, xid):
"""Intercept prepare_twophase() events."""
-
+
def rollback_twophase(self, conn, xid, is_prepared):
"""Intercept rollback_twophase() events."""
-
+
def commit_twophase(self, conn, xid, is_prepared):
"""Intercept commit_twophase() events."""
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index 1eb5bf916..b50e000a2 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -68,13 +68,13 @@ class InvalidRequestError(SQLAlchemyError):
class ResourceClosedError(InvalidRequestError):
"""An operation was requested from a connection, cursor, or other
object that's in a closed state."""
-
+
class NoSuchColumnError(KeyError, InvalidRequestError):
"""A nonexistent column is requested from a ``RowProxy``."""
class NoReferenceError(InvalidRequestError):
"""Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
-
+
class NoReferencedTableError(NoReferenceError):
"""Raised by ``ForeignKey`` when the referred ``Table`` cannot be located."""
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index bc62c6efa..969f60326 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -179,7 +179,7 @@ class AssociationProxy(object):
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
-
+
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
@@ -233,7 +233,7 @@ class AssociationProxy(object):
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
-
+
if self.collection_class is list:
return _AssociationList(lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
@@ -254,7 +254,7 @@ class AssociationProxy(object):
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
-
+
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
@@ -279,7 +279,7 @@ class AssociationProxy(object):
def any(self, criterion=None, **kwargs):
return self._comparator.any(getattr(self.target_class, self.value_attr).has(criterion, **kwargs))
-
+
def has(self, criterion=None, **kwargs):
return self._comparator.has(getattr(self.target_class, self.value_attr).has(criterion, **kwargs))
@@ -308,15 +308,15 @@ class _lazy_collection(object):
def __getstate__(self):
return {'obj':self.ref(), 'target':self.target}
-
+
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
- """Constructs an _AssociationCollection.
-
+ """Constructs an _AssociationCollection.
+
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
@@ -360,7 +360,7 @@ class _AssociationCollection(object):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
-
+
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index e6a6ca744..0b96ce25d 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -14,24 +14,24 @@ subclasses and one or more callables defining its compilation::
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import ColumnClause
-
+
class MyColumn(ColumnClause):
pass
-
+
@compiles(MyColumn)
def compile_mycolumn(element, compiler, **kw):
return "[%s]" % element.name
-
+
Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`,
the base expression element for named column objects. The ``compiles``
decorator registers itself with the ``MyColumn`` class so that it is invoked
when the object is compiled to a string::
from sqlalchemy import select
-
+
s = select([MyColumn('x'), MyColumn('y')])
print str(s)
-
+
Produces::
SELECT [x], [y]
@@ -71,7 +71,7 @@ and :class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()``
method which can be used for compilation of embedded attributes::
from sqlalchemy.sql.expression import Executable, ClauseElement
-
+
class InsertFromSelect(Executable, ClauseElement):
def __init__(self, table, select):
self.table = table
@@ -86,7 +86,7 @@ method which can be used for compilation of embedded attributes::
insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5))
print insert
-
+
Produces::
"INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
@@ -139,7 +139,7 @@ Changing Compilation of Types
return "VARCHAR('max')"
else:
return compiler.visit_VARCHAR(element, **kw)
-
+
foo = Table('foo', metadata,
Column('data', VARCHAR('max'))
)
@@ -158,12 +158,12 @@ A big part of using the compiler extension is subclassing SQLAlchemy expression
"column-like" elements. Anything that you'd place in the "columns" clause of
a SELECT statement (as well as order by and group by) can derive from this -
the object will automatically have Python "comparison" behavior.
-
+
:class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a
``type`` member which is expression's return type. This can be established
at the instance level in the constructor, or at the class level if its
generally constant::
-
+
class timestamp(ColumnElement):
type = TIMESTAMP()
@@ -173,7 +173,7 @@ A big part of using the compiler extension is subclassing SQLAlchemy expression
statements along the line of "SELECT FROM <some function>"
``FunctionElement`` adds in the ability to be used in the FROM clause of a
``select()`` construct::
-
+
from sqlalchemy.sql.expression import FunctionElement
class coalesce(FunctionElement):
@@ -209,14 +209,14 @@ def compiles(class_, *specs):
existing_dispatch = class_.__dict__.get('_compiler_dispatch')
if not existing:
existing = _dispatcher()
-
+
if existing_dispatch:
existing.specs['default'] = existing_dispatch
-
+
# TODO: why is the lambda needed ?
setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw))
setattr(class_, '_compiler_dispatcher', existing)
-
+
if specs:
for s in specs:
existing.specs[s] = fn
@@ -225,15 +225,15 @@ def compiles(class_, *specs):
existing.specs['default'] = fn
return fn
return decorate
-
+
class _dispatcher(object):
def __init__(self):
self.specs = {}
-
+
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
fn = self.specs['default']
return fn(element, compiler, **kw)
-
+
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index 1199e69f3..feee435ed 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -39,7 +39,7 @@ The resulting table and mapper are accessible via
# access the mapped Table
SomeClass.__table__
-
+
# access the Mapper
SomeClass.__mapper__
@@ -57,7 +57,7 @@ just give the column a name. Below, column "some_table_id" is mapped to the
class SomeClass(Base):
__tablename__ = 'some_table'
id = Column("some_table_id", Integer, primary_key=True)
-
+
Attributes may be added to the class after its construction, and they will be
added to the underlying :class:`.Table` and
:func:`.mapper()` definitions as appropriate::
@@ -66,7 +66,7 @@ added to the underlying :class:`.Table` and
SomeClass.related = relationship(RelatedInfo)
Classes which are constructed using declarative can interact freely
-with classes that are mapped explicitly with :func:`mapper`.
+with classes that are mapped explicitly with :func:`mapper`.
It is recommended, though not required, that all tables
share the same underlying :class:`~sqlalchemy.schema.MetaData` object,
@@ -179,7 +179,7 @@ the :class:`.MetaData` object used by the declarative base::
Column('author_id', Integer, ForeignKey('authors.id')),
Column('keyword_id', Integer, ForeignKey('keywords.id'))
)
-
+
class Author(Base):
__tablename__ = 'authors'
id = Column(Integer, primary_key=True)
@@ -211,11 +211,11 @@ using Python 2.6 style properties::
@property
def attr(self):
return self._attr
-
+
@attr.setter
def attr(self, attr):
self._attr = attr
-
+
attr = synonym('_attr', descriptor=attr)
The above synonym is then usable as an instance attribute as well as a
@@ -230,7 +230,7 @@ conjunction with ``@property``::
class MyClass(Base):
__tablename__ = 'sometable'
-
+
id = Column(Integer, primary_key=True)
_attr = Column('attr', String)
@@ -277,19 +277,19 @@ need either from the local class definition or from remote
classes::
from sqlalchemy.sql import func
-
+
class Address(Base):
__tablename__ = 'address'
id = Column('id', Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
-
+
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String)
-
+
address_count = column_property(
select([func.count(Address.id)]).\\
where(Address.user_id==id)
@@ -357,15 +357,15 @@ to a table::
table metadata, while still getting most of the benefits of using declarative.
An application that uses reflection might want to load table metadata elsewhere
and simply pass it to declarative classes::
-
+
from sqlalchemy.ext.declarative import declarative_base
-
+
Base = declarative_base()
Base.metadata.reflect(some_engine)
-
+
class User(Base):
__table__ = metadata.tables['user']
-
+
class Address(Base):
__table__ = metadata.tables['address']
@@ -386,13 +386,13 @@ mapped columns can reference them directly from within the
class declaration::
from datetime import datetime
-
+
class Widget(Base):
__tablename__ = 'widgets'
-
+
id = Column(Integer, primary_key=True)
timestamp = Column(DateTime, nullable=False)
-
+
__mapper_args__ = {
'version_id_col': timestamp,
'version_id_generator': lambda v:datetime.now()
@@ -488,7 +488,7 @@ Concrete is defined as a subclass which has its own table and sets the
__tablename__ = 'people'
id = Column(Integer, primary_key=True)
name = Column(String(50))
-
+
class Engineer(Person):
__tablename__ = 'engineers'
__mapper_args__ = {'concrete':True}
@@ -509,16 +509,16 @@ requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
Column('name', String(50)),
Column('golf_swing', String(50))
)
-
+
punion = polymorphic_union({
'engineer':engineers,
'manager':managers
}, 'type', 'punion')
-
+
class Person(Base):
__table__ = punion
__mapper_args__ = {'polymorphic_on':punion.c.type}
-
+
class Engineer(Person):
__table__ = engineers
__mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
@@ -526,7 +526,7 @@ requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
class Manager(Person):
__table__ = managers
__mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
-
+
Mixin Classes
==============
@@ -541,10 +541,10 @@ using a "mixin class". A mixin class is one that isn't mapped to a
table and doesn't subclass the declarative :class:`Base`. For example::
class MyMixin(object):
-
+
__table_args__ = {'mysql_engine': 'InnoDB'}
__mapper_args__= {'always_refresh': True}
-
+
id = Column(Integer, primary_key=True)
@@ -600,16 +600,16 @@ is provided so that
patterns common to many classes can be defined as callables::
from sqlalchemy.ext.declarative import declared_attr
-
+
class ReferenceAddressMixin(object):
@declared_attr
def address_id(cls):
return Column(Integer, ForeignKey('address.id'))
-
+
class User(Base, ReferenceAddressMixin):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
-
+
Where above, the ``address_id`` class-level callable is executed at the
point at which the ``User`` class is constructed, and the declarative
extension can use the resulting :class:`Column` object as returned by
@@ -631,7 +631,7 @@ will resolve them at class construction time::
class MyModel(Base,MyMixin):
__tablename__='test'
id = Column(Integer, primary_key=True)
-
+
Mixing in Relationships
~~~~~~~~~~~~~~~~~~~~~~~
@@ -647,26 +647,26 @@ reference a common target class via many-to-one::
@declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
-
+
@declared_attr
def target(cls):
return relationship("Target")
-
+
class Foo(Base, RefTargetMixin):
__tablename__ = 'foo'
id = Column(Integer, primary_key=True)
-
+
class Bar(Base, RefTargetMixin):
__tablename__ = 'bar'
id = Column(Integer, primary_key=True)
-
+
class Target(Base):
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
:func:`~sqlalchemy.orm.relationship` definitions which require explicit
primaryjoin, order_by etc. expressions should use the string forms
-for these arguments, so that they are evaluated as late as possible.
+for these arguments, so that they are evaluated as late as possible.
To reference the mixin class in these expressions, use the given ``cls``
to get it's name::
@@ -674,7 +674,7 @@ to get it's name::
@declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
-
+
@declared_attr
def target(cls):
return relationship("Target",
@@ -810,7 +810,7 @@ from multiple collections::
from sqlalchemy.ext.declarative import declared_attr
class MySQLSettings:
- __table_args__ = {'mysql_engine':'InnoDB'}
+ __table_args__ = {'mysql_engine':'InnoDB'}
class MyOtherMixin:
__table_args__ = {'info':'foo'}
@@ -892,7 +892,7 @@ correctly combines the actions of the other metaclasses. For example::
# This is needed to successfully combine
# two mixins which both have metaclasses
pass
-
+
class MyModel(Base,MyMixin1,MyMixin2):
__tablename__ = 'awooooga'
__metaclass__ = CombinedMeta
@@ -901,7 +901,7 @@ correctly combines the actions of the other metaclasses. For example::
For this reason, if a mixin requires a custom metaclass, this should
be mentioned in any documentation of that mixin to avoid confusion
later down the line.
-
+
Class Constructor
=================
@@ -917,7 +917,7 @@ Sessions
Note that ``declarative`` does nothing special with sessions, and is
only intended as an easier way to configure mappers and
:class:`~sqlalchemy.schema.Table` objects. A typical application
-setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
+setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
engine = create_engine('postgresql://scott:tiger@localhost/test')
Session = scoped_session(sessionmaker(autocommit=False,
@@ -947,7 +947,7 @@ def instrument_declarative(cls, registry, metadata):
"""Given a class, configure the class declaratively,
using the given registry, which can be any dictionary, and
MetaData object.
-
+
"""
if '_decl_class_registry' in cls.__dict__:
raise exceptions.InvalidRequestError(
@@ -973,19 +973,19 @@ def _as_declarative(cls, classname, dict_):
column_copies = {}
potential_columns = {}
-
+
mapper_args = {}
table_args = inherited_table_args = None
tablename = None
parent_columns = ()
-
+
declarative_props = (declared_attr, util.classproperty)
-
+
for base in cls.__mro__:
class_mapped = _is_mapped_class(base)
if class_mapped:
parent_columns = base.__table__.c.keys()
-
+
for name,obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args and (
@@ -1015,7 +1015,7 @@ def _as_declarative(cls, classname, dict_):
continue
elif base is not cls:
# we're a mixin.
-
+
if isinstance(obj, Column):
if obj.foreign_keys:
raise exceptions.InvalidRequestError(
@@ -1048,7 +1048,7 @@ def _as_declarative(cls, classname, dict_):
for k, v in potential_columns.items():
if tablename or (v.name or k) not in parent_columns:
dict_[k] = v
-
+
if inherited_table_args and not tablename:
table_args = None
@@ -1056,7 +1056,7 @@ def _as_declarative(cls, classname, dict_):
# than the original columns from any mixins
for k, v in mapper_args.iteritems():
mapper_args[k] = column_copies.get(v,v)
-
+
if classname in cls._decl_class_registry:
util.warn("The classname %r is already in the registry of this"
@@ -1071,7 +1071,7 @@ def _as_declarative(cls, classname, dict_):
value = dict_[k]
if isinstance(value, declarative_props):
value = getattr(cls, k)
-
+
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
@@ -1108,7 +1108,7 @@ def _as_declarative(cls, classname, dict_):
table = None
if '__table__' not in dict_:
if tablename is not None:
-
+
if isinstance(table_args, dict):
args, table_kw = (), table_args
elif isinstance(table_args, tuple):
@@ -1139,7 +1139,7 @@ def _as_declarative(cls, classname, dict_):
"Can't add additional column %r when "
"specifying __table__" % c.key
)
-
+
if 'inherits' not in mapper_args:
for c in cls.__bases__:
if _is_mapped_class(c):
@@ -1180,7 +1180,7 @@ def _as_declarative(cls, classname, dict_):
"Can't place __table_args__ on an inherited class "
"with no table."
)
-
+
# add any columns declared here to the inherited table.
for c in cols:
if c.primary_key:
@@ -1195,7 +1195,7 @@ def _as_declarative(cls, classname, dict_):
(c, cls, inherited_table.c[c.name])
)
inherited_table.append_column(c)
-
+
# single or joined inheritance
# exclude any cols on the inherited table which are not mapped on the
# parent class, to avoid
@@ -1203,19 +1203,19 @@ def _as_declarative(cls, classname, dict_):
inherited_mapper = class_mapper(mapper_args['inherits'],
compile=False)
inherited_table = inherited_mapper.local_table
-
+
if 'exclude_properties' not in mapper_args:
mapper_args['exclude_properties'] = exclude_properties = \
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update([c.key for c in cols])
-
+
# look through columns in the current mapper that
# are keyed to a propname different than the colname
# (if names were the same, we'd have popped it out above,
# in which case the mapper makes this combination).
- # See if the superclass has a similar column property.
- # If so, join them together.
+ # See if the superclass has a similar column property.
+ # If so, join them together.
for k, col in our_stuff.items():
if not isinstance(col, expression.ColumnElement):
continue
@@ -1227,7 +1227,7 @@ def _as_declarative(cls, classname, dict_):
# append() in mapper._configure_property().
# change this ordering when we do [ticket:1892]
our_stuff[k] = p.columns + [col]
-
+
cls.__mapper__ = mapper_cls(cls,
table,
properties=our_stuff,
@@ -1267,7 +1267,7 @@ class DeclarativeMeta(type):
class _GetColumns(object):
def __init__(self, cls):
self.cls = cls
-
+
def __getattr__(self, key):
mapper = class_mapper(self.cls, compile=False)
if mapper:
@@ -1275,7 +1275,7 @@ class _GetColumns(object):
raise exceptions.InvalidRequestError(
"Class %r does not have a mapped column named %r"
% (self.cls, key))
-
+
prop = mapper.get_property(key)
if not isinstance(prop, ColumnProperty):
raise exceptions.InvalidRequestError(
@@ -1288,16 +1288,16 @@ class _GetTable(object):
def __init__(self, key, metadata):
self.key = key
self.metadata = metadata
-
+
def __getattr__(self, key):
return self.metadata.tables[
_get_table_key(key, self.key)
]
-
+
def _deferred_relationship(cls, prop):
def resolve_arg(arg):
import sqlalchemy
-
+
def access_cls(key):
if key in cls._decl_class_registry:
return _GetColumns(cls._decl_class_registry[key])
@@ -1312,7 +1312,7 @@ def _deferred_relationship(cls, prop):
def return_cls():
try:
x = eval(arg, globals(), d)
-
+
if isinstance(x, _GetColumns):
return x.cls
else:
@@ -1395,7 +1395,7 @@ class declared_attr(property):
.. note:: @declared_attr is available as
``sqlalchemy.util.classproperty`` for SQLAlchemy versions
0.6.2, 0.6.3, 0.6.4.
-
+
@declared_attr turns the attribute into a scalar-like
property that can be invoked from the uninstantiated class.
Declarative treats attributes specifically marked with
@@ -1403,29 +1403,29 @@ class declared_attr(property):
to mapping or declarative table configuration. The name
of the attribute is that of what the non-dynamic version
of the attribute would be.
-
+
@declared_attr is more often than not applicable to mixins,
to define relationships that are to be applied to different
implementors of the class::
-
+
class ProvidesUser(object):
"A mixin that adds a 'user' relationship to classes."
-
+
@declared_attr
def user(self):
return relationship("User")
-
+
It also can be applied to mapped classes, such as to provide
a "polymorphic" scheme for inheritance::
-
+
class Employee(Base):
id = Column(Integer, primary_key=True)
type = Column(String(50), nullable=False)
-
+
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
-
+
@declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Employee':
@@ -1435,13 +1435,13 @@ class declared_attr(property):
}
else:
return {"polymorphic_identity":cls.__name__}
-
+
"""
-
+
def __init__(self, fget, *arg, **kw):
super(declared_attr, self).__init__(fget, *arg, **kw)
self.__doc__ = fget.__doc__
-
+
def __get__(desc, self, cls):
return desc.fget(cls)
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 880dfb743..41fae8e7b 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -40,10 +40,10 @@ class ShardedSession(Session):
:param query_chooser: For a given Query, returns the list of shard_ids where the query
should be issued. Results from all shards returned will be combined
together into a single listing.
-
+
:param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.base.Engine`
- objects.
-
+ objects.
+
"""
super(ShardedSession, self).__init__(**kwargs)
self.shard_chooser = shard_chooser
@@ -55,7 +55,7 @@ class ShardedSession(Session):
if shards is not None:
for k in shards:
self.bind_shard(k, shards[k])
-
+
def connection(self, mapper=None, instance=None, shard_id=None, **kwargs):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance)
@@ -66,7 +66,7 @@ class ShardedSession(Session):
return self.get_bind(mapper,
shard_id=shard_id,
instance=instance).contextual_connect(**kwargs)
-
+
def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
if shard_id is None:
shard_id = self.shard_chooser(mapper, instance, clause=clause)
@@ -81,18 +81,18 @@ class ShardedQuery(Query):
self.id_chooser = self.session.id_chooser
self.query_chooser = self.session.query_chooser
self._shard_id = None
-
+
def set_shard(self, shard_id):
"""return a new query, limited to a single shard ID.
-
+
all subsequent operations with the returned query will
be against the single shard regardless of other state.
"""
-
+
q = self._clone()
q._shard_id = shard_id
return q
-
+
def _execute_and_instances(self, context):
if self._shard_id is not None:
result = self.session.connection(
@@ -106,7 +106,7 @@ class ShardedQuery(Query):
mapper=self._mapper_zero(),
shard_id=shard_id).execute(context.statement, self._params)
partial = partial + list(self.instances(result, context))
-
+
# if some kind of in memory 'sorting'
# were done, this is where it would happen
return iter(partial)
@@ -122,4 +122,4 @@ class ShardedQuery(Query):
return o
else:
return None
-
+
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 153eccce2..f3989a84d 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -13,7 +13,7 @@ Consider a table `interval` as below::
from sqlalchemy import MetaData, Table, Column, Integer
from sqlalchemy.orm import mapper, create_session
-
+
engine = create_engine('sqlite://')
metadata = MetaData()
@@ -22,22 +22,22 @@ Consider a table `interval` as below::
Column('start', Integer, nullable=False),
Column('end', Integer, nullable=False))
metadata.create_all(engine)
-
+
We can define higher level functions on mapped classes that produce SQL
expressions at the class level, and Python expression evaluation at the
instance level. Below, each function decorated with :func:`hybrid.method`
or :func:`hybrid.property` may receive ``self`` as an instance of the class,
or as the class itself::
-
+
# A base class for intervals
from sqlalchemy.orm import hybrid
-
+
class Interval(object):
def __init__(self, start, end):
self.start = start
self.end = end
-
+
@hybrid.property
def length(self):
return self.end - self.start
@@ -45,13 +45,13 @@ or as the class itself::
@hybrid.method
def contains(self,point):
return (self.start <= point) & (point < self.end)
-
+
@hybrid.method
def intersects(self, other):
return self.contains(other.start) | self.contains(other.end)
-
+
"""
from sqlalchemy import util
from sqlalchemy.orm import attributes, interfaces
@@ -60,7 +60,7 @@ class method(object):
def __init__(self, func, expr=None):
self.func = func
self.expr = expr or func
-
+
def __get__(self, instance, owner):
if instance is None:
return new.instancemethod(self.expr, owner, owner.__class__)
@@ -84,13 +84,13 @@ class property_(object):
return self.expr(owner)
else:
return self.fget(instance)
-
+
def __set__(self, instance, value):
self.fset(instance, value)
-
+
def __delete__(self, instance):
self.fdel(instance)
-
+
def setter(self, fset):
self.fset = fset
return self
@@ -98,11 +98,11 @@ class property_(object):
def deleter(self, fdel):
self.fdel = fdel
return self
-
+
def expression(self, expr):
self.expr = expr
return self
-
+
def comparator(self, comparator):
proxy_attr = attributes.\
create_proxied_attribute(self)
@@ -115,15 +115,15 @@ class property_(object):
class Comparator(interfaces.PropComparator):
def __init__(self, expression):
self.expression = expression
-
+
def __clause_element__(self):
expr = self.expression
while hasattr(expr, '__clause_element__'):
expr = expr.__clause_element__()
return expr
-
+
def adapted(self, adapter):
# interesting....
return self
-
+
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index 2bb879322..11a7977f6 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -22,47 +22,47 @@ import weakref
class Mutable(object):
"""Mixin that defines transparent propagation of change
events to a parent object.
-
+
"""
-
+
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent."""
-
+
return weakref.WeakKeyDictionary()
-
+
def change(self):
"""Subclasses should call this method whenever change events occur."""
-
+
for parent, key in self._parents.items():
flag_modified(parent, key)
-
+
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into this type.
-
+
By default raises ValueError.
"""
if value is None:
return None
raise ValueError("Attribute '%s' accepts objects of type %s" % (key, cls))
-
-
+
+
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
-
+
"""
key = attribute.key
parent_cls = attribute.class_
-
+
def load(state, *args):
- """Listen for objects loaded or refreshed.
-
+ """Listen for objects loaded or refreshed.
+
Wrap the target data member's value with
``Mutable``.
-
+
"""
val = state.dict.get(key, None)
if val is not None:
@@ -73,20 +73,20 @@ class Mutable(object):
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
-
+
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
-
+
"""
-
+
if not isinstance(value, cls):
value = cls.coerce(key, value)
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(state.obj(), None)
return value
-
+
event.listen(parent_cls, 'load', load, raw=True)
event.listen(parent_cls, 'refresh', load, raw=True)
event.listen(attribute, 'set', set, raw=True, retval=True)
@@ -97,7 +97,7 @@ class Mutable(object):
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
-
+
This is a convenience method that calls ``associate_with_attribute`` automatically.
.. warning:: The listeners established by this method are *global*
@@ -105,7 +105,7 @@ class Mutable(object):
:meth:`.associate_with` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
def listen_for_type(mapper, class_):
@@ -114,39 +114,39 @@ class Mutable(object):
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
break
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
-
+
@classmethod
def as_mutable(cls, sqltype):
"""Associate a SQL type with this mutable Python type.
-
+
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
-
+
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
-
+
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
-
+
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with that
type instance receive additional instrumentation.
-
+
To associate a particular mutable type with all occurences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :meth:`.Mutable` subclass to establish a global
assoiation.
-
+
.. warning:: The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
sqltype = types.to_instance(sqltype)
@@ -156,9 +156,9 @@ class Mutable(object):
if prop.columns[0].type is sqltype:
cls.associate_with_attribute(getattr(class_, prop.key))
break
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
-
+
return sqltype
@@ -171,14 +171,14 @@ class MutableComposite(object):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
owning parent or parents.
-
+
Composite classes, in addition to meeting the usage contract
defined in :ref:`mapper_composite`, also define some system
of relaying change events to the given :meth:`.change`
method, which will notify all parents of the change. Below
the special Python method ``__setattr__`` is used to intercept
all changes::
-
+
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
@@ -187,10 +187,10 @@ class MutableComposite(object):
def __setattr__(self, key, value):
object.__setattr__(self, key, value)
self.change()
-
+
def __composite_values__(self):
return self.x, self.y
-
+
def __eq__(self, other):
return isinstance(other, Point) and \
other.x == self.x and \
@@ -206,44 +206,44 @@ class MutableComposite(object):
:class:`.MutableComposite` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
-
+
"""
__metaclass__ = _MutableCompositeMeta
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent."""
-
+
return weakref.WeakKeyDictionary()
def change(self):
"""Subclasses should call this method whenever change events occur."""
-
+
for parent, key in self._parents.items():
-
+
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
self.__composite_values__(),
prop._attribute_keys):
setattr(parent, attr_name, value)
-
+
@classmethod
def _listen_on_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
-
+
"""
key = attribute.key
parent_cls = attribute.class_
-
+
def load(state, *args):
- """Listen for objects loaded or refreshed.
-
+ """Listen for objects loaded or refreshed.
+
Wrap the target data member's value with
``Mutable``.
-
+
"""
-
+
val = state.dict.get(key, None)
if val is not None:
val._parents[state.obj()] = key
@@ -251,37 +251,37 @@ class MutableComposite(object):
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
-
+
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
-
+
"""
-
+
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(state.obj(), None)
return value
-
+
event.listen(parent_cls, 'load', load, raw=True)
event.listen(parent_cls, 'refresh', load, raw=True)
event.listen(attribute, 'set', set, raw=True, retval=True)
# TODO: need a deserialize hook here
-
+
@classmethod
def _setup_listeners(cls):
"""Associate this wrapper with all future mapped compoistes
of the given type.
-
+
This is a convenience method that calls ``associate_with_attribute`` automatically.
-
+
"""
-
+
def listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
cls._listen_on_attribute(getattr(class_, prop.key))
-
+
event.listen(mapper, 'mapper_configured', listen_for_type)
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 062172bcc..ce63b88ea 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -52,7 +52,7 @@ An ``orderinglist`` can automate this and manage the 'position' attribute on all
related bullets for you.
.. sourcecode:: python+sql
-
+
mapper(Slide, slides_table, properties={
'bullets': relationship(Bullet,
collection_class=ordering_list('position'),
@@ -71,7 +71,7 @@ related bullets for you.
Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
+starting in the proper order, so be SURE to put an order_by on your relationship.
.. warning:: ``ordering_list`` only provides limited functionality when a primary
key column or unique column is the target of the sort. Since changing the order of
@@ -89,7 +89,7 @@ or some other integer, provide ``count_from=1``.
Ordering values are not limited to incrementing integers. Almost any scheme
can implemented by supplying a custom ``ordering_func`` that maps a Python list
-index to any value you require.
+index to any value you require.
@@ -292,7 +292,7 @@ class OrderingList(list):
stop = index.stop or len(self)
if stop < 0:
stop += len(self)
-
+
for i in xrange(start, stop, step):
self.__setitem__(i, entity[i])
else:
@@ -312,7 +312,7 @@ class OrderingList(list):
super(OrderingList, self).__delslice__(start, end)
self._reorder()
# end Py2K
-
+
for func_name, func in locals().items():
if (util.callable(func) and func.func_name == func_name and
not func.__doc__ and hasattr(list, func_name)):
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 8c098c3df..077a0fd9e 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -18,17 +18,17 @@ Usage is nearly the same as that of the standard Python pickle module::
from sqlalchemy.ext.serializer import loads, dumps
metadata = MetaData(bind=some_engine)
Session = scoped_session(sessionmaker())
-
+
# ... define mappers
-
+
query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
-
+
# pickle the query
serialized = dumps(query)
-
+
# unpickle. Pass in metadata + scoped_session
query2 = loads(serialized, metadata, Session)
-
+
print query2.all()
Similar restrictions as when using raw pickle apply; mapped classes must be
@@ -81,7 +81,7 @@ __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
def Serializer(*args, **kw):
pickler = pickle.Pickler(*args, **kw)
-
+
def persistent_id(obj):
#print "serializing:", repr(obj)
if isinstance(obj, QueryableAttribute):
@@ -101,15 +101,15 @@ def Serializer(*args, **kw):
else:
return None
return id
-
+
pickler.persistent_id = persistent_id
return pickler
-
+
our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
def Deserializer(file, metadata=None, scoped_session=None, engine=None):
unpickler = pickle.Unpickler(file)
-
+
def get_engine():
if engine:
return engine
@@ -119,7 +119,7 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
return metadata.bind
else:
return None
-
+
def persistent_load(id):
m = our_ids.match(id)
if not m:
@@ -152,10 +152,10 @@ def dumps(obj, protocol=0):
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
-
+
def loads(data, metadata=None, scoped_session=None, engine=None):
buf = byte_buffer(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()
-
-
+
+
diff --git a/lib/sqlalchemy/ext/sqlsoup.py b/lib/sqlalchemy/ext/sqlsoup.py
index ebe2feb7f..9e6f63aca 100644
--- a/lib/sqlalchemy/ext/sqlsoup.py
+++ b/lib/sqlalchemy/ext/sqlsoup.py
@@ -257,7 +257,7 @@ The default session is available at the module level in SQLSoup,
via::
>>> from sqlalchemy.ext.sqlsoup import Session
-
+
The configuration of this session is ``autoflush=True``,
``autocommit=False``. This means when you work with the SqlSoup
object, you need to call ``db.commit()`` in order to have
@@ -460,7 +460,7 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
engine_encoding = engine.dialect.encoding
mapname = mapname.encode(engine_encoding)
# end Py2K
-
+
if isinstance(selectable, Table):
klass = TableClassType(mapname, (base_cls,), {})
else:
@@ -475,10 +475,10 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
except AttributeError:
raise TypeError('unable to compare with %s' % o.__class__)
return t1, t2
-
+
# python2/python3 compatible system of
# __cmp__ - __lt__ + __eq__
-
+
def __lt__(self, o):
t1, t2 = _compare(self, o)
return t1 < t2
@@ -486,12 +486,12 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
def __eq__(self, o):
t1, t2 = _compare(self, o)
return t1 == t2
-
+
def __repr__(self):
L = ["%s=%r" % (key, getattr(self, key, ''))
for key in self.__class__.c.keys()]
return '%s(%s)' % (self.__class__.__name__, ','.join(L))
-
+
for m in ['__eq__', '__repr__', '__lt__']:
setattr(klass, m, eval(m))
klass._table = selectable
@@ -500,16 +500,16 @@ def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
selectable,
extension=AutoAdd(session),
**mapper_kwargs)
-
+
for k in mappr.iterate_properties:
klass.c[k.key] = k.columns[0]
-
+
klass._query = session.query_property()
return klass
class SqlSoup(object):
"""Represent an ORM-wrapped database resource."""
-
+
def __init__(self, engine_or_metadata, base=object, session=None):
"""Initialize a new :class:`.SqlSoup`.
@@ -525,10 +525,10 @@ class SqlSoup(object):
module is used.
"""
-
+
self.session = session or Session
self.base=base
-
+
if isinstance(engine_or_metadata, MetaData):
self._metadata = engine_or_metadata
elif isinstance(engine_or_metadata, (basestring, Engine)):
@@ -536,10 +536,10 @@ class SqlSoup(object):
else:
raise ArgumentError("invalid engine or metadata argument %r" %
engine_or_metadata)
-
+
self._cache = {}
self.schema = None
-
+
@property
def bind(self):
"""The :class:`.Engine` associated with this :class:`.SqlSoup`."""
@@ -551,83 +551,83 @@ class SqlSoup(object):
"""Mark an instance as deleted."""
self.session.delete(instance)
-
+
def execute(self, stmt, **params):
"""Execute a SQL statement.
-
+
The statement may be a string SQL string,
an :func:`.expression.select` construct, or an :func:`.expression.text`
construct.
-
+
"""
return self.session.execute(sql.text(stmt, bind=self.bind), **params)
-
+
@property
def _underlying_session(self):
if isinstance(self.session, session.Session):
return self.session
else:
return self.session()
-
+
def connection(self):
"""Return the current :class:`.Connection` in use by the current transaction."""
-
+
return self._underlying_session._connection_for_bind(self.bind)
-
+
def flush(self):
"""Flush pending changes to the database.
-
+
See :meth:`.Session.flush`.
-
+
"""
self.session.flush()
-
+
def rollback(self):
"""Rollback the current transction.
-
+
See :meth:`.Session.rollback`.
-
+
"""
self.session.rollback()
-
+
def commit(self):
"""Commit the current transaction.
-
+
See :meth:`.Session.commit`.
-
+
"""
self.session.commit()
-
+
def clear(self):
"""Synonym for :meth:`.SqlSoup.expunge_all`."""
-
+
self.session.expunge_all()
-
+
def expunge(self, instance):
"""Remove an instance from the :class:`.Session`.
-
+
See :meth:`.Session.expunge`.
-
+
"""
self.session.expunge(instance)
-
+
def expunge_all(self):
"""Clear all objects from the current :class:`.Session`.
-
+
See :meth:`.Session.expunge_all`.
-
+
"""
self.session.expunge_all()
def map_to(self, attrname, tablename=None, selectable=None,
schema=None, base=None, mapper_args=util.frozendict()):
"""Configure a mapping to the given attrname.
-
+
This is the "master" method that can be used to create any
configuration.
-
+
(new in 0.6.6)
-
+
:param attrname: String attribute name which will be
established as an attribute on this :class:.`.SqlSoup`
instance.
@@ -648,8 +648,8 @@ class SqlSoup(object):
argument.
:param schema: String schema name to use if the
``tablename`` argument is present.
-
-
+
+
"""
if attrname in self._cache:
raise InvalidRequestError(
@@ -657,7 +657,7 @@ class SqlSoup(object):
attrname,
class_mapper(self._cache[attrname]).mapped_table
))
-
+
if tablename is not None:
if not isinstance(tablename, basestring):
raise ArgumentError("'tablename' argument must be a string."
@@ -692,7 +692,7 @@ class SqlSoup(object):
raise PKNotFoundError(
"selectable '%s' does not have a primary "
"key defined" % selectable)
-
+
mapped_cls = _class_for_table(
self.session,
self.engine,
@@ -702,14 +702,14 @@ class SqlSoup(object):
)
self._cache[attrname] = mapped_cls
return mapped_cls
-
+
def map(self, selectable, base=None, **mapper_args):
"""Map a selectable directly.
-
+
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param selectable: an :func:`.expression.select` construct.
:param base: a Python class which will be used as the
base for the mapped class. If ``None``, the "base"
@@ -718,7 +718,7 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
return _class_for_table(
@@ -735,7 +735,7 @@ class SqlSoup(object):
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param selectable: an :func:`.expression.select` construct.
:param base: a Python class which will be used as the
base for the mapped class. If ``None``, the "base"
@@ -744,9 +744,9 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
-
+
# TODO give meaningful aliases
return self.map(
expression._clause_element_as_expr(selectable).
@@ -759,7 +759,7 @@ class SqlSoup(object):
The class and its mapping are not cached and will
be discarded once dereferenced (as of 0.6.6).
-
+
:param left: a mapped class or table object.
:param right: a mapped class or table object.
:param onclause: optional "ON" clause construct..
@@ -771,24 +771,24 @@ class SqlSoup(object):
``object``.
:param mapper_args: Dictionary of arguments which will
be passed directly to :func:`.orm.mapper`.
-
+
"""
-
+
j = join(left, right, onclause=onclause, isouter=isouter)
return self.map(j, base=base, **mapper_args)
def entity(self, attr, schema=None):
"""Return the named entity from this :class:`.SqlSoup`, or
create if not present.
-
+
For more generalized mapping, see :meth:`.map_to`.
-
+
"""
try:
return self._cache[attr]
except KeyError, ke:
return self.map_to(attr, tablename=attr, schema=schema)
-
+
def __getattr__(self, attr):
return self.entity(attr)
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 3acdcd102..0a1eec75d 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -19,23 +19,23 @@ class PoolListener(object):
.. note:: :class:`PoolListener` is deprecated. Please
refer to :class:`.PoolEvents`.
-
+
Usage::
-
+
class MyListener(PoolListener):
def connect(self, dbapi_con, con_record):
'''perform connect operations'''
# etc.
-
+
# create a new pool with a listener
p = QueuePool(..., listeners=[MyListener()])
-
+
# add a listener after the fact
p.add_listener(MyListener())
-
+
# usage with create_engine()
e = create_engine("url://", listeners=[MyListener()])
-
+
All of the standard connection :class:`~sqlalchemy.pool.Pool` types can
accept event listeners for key connection lifecycle events:
creation, pool check-out and check-in. There are no events fired
@@ -66,14 +66,14 @@ class PoolListener(object):
internal event queues based on its capabilities. In terms of
efficiency and function call overhead, you're much better off only
providing implementations for the hooks you'll be using.
-
+
"""
-
+
@classmethod
def _adapt_listener(cls, self, listener):
"""Adapt a :class:`PoolListener` to individual
:class:`event.Dispatch` events.
-
+
"""
listener = util.as_interface(listener, methods=('connect',
@@ -86,8 +86,8 @@ class PoolListener(object):
event.listen(self, 'checkout', listener.checkout)
if hasattr(listener, 'checkin'):
event.listen(self, 'checkin', listener.checkin)
-
-
+
+
def connect(self, dbapi_con, con_record):
"""Called once for each new DB-API connection or Pool's ``creator()``.
@@ -151,16 +151,16 @@ class ConnectionProxy(object):
.. note:: :class:`ConnectionProxy` is deprecated. Please
refer to :class:`.EngineEvents`.
-
+
Either or both of the ``execute()`` and ``cursor_execute()``
may be implemented to intercept compiled statement and
cursor level executions, e.g.::
-
+
class MyProxy(ConnectionProxy):
def execute(self, conn, execute, clauseelement, *multiparams, **params):
print "compiled statement:", clauseelement
return execute(clauseelement, *multiparams, **params)
-
+
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
print "raw statement:", statement
return execute(cursor, statement, parameters, context)
@@ -168,14 +168,14 @@ class ConnectionProxy(object):
The ``execute`` argument is a function that will fulfill the default
execution behavior for the operation. The signature illustrated
in the example should be used.
-
+
The proxy is installed into an :class:`~sqlalchemy.engine.Engine` via
the ``proxy`` argument::
-
+
e = create_engine('someurl://', proxy=MyProxy())
-
+
"""
-
+
@classmethod
def _adapt_listener(cls, self, listener):
@@ -240,66 +240,66 @@ class ConnectionProxy(object):
adapt_listener(listener.rollback_twophase))
event.listen(self, 'commit_twophase',
adapt_listener(listener.commit_twophase))
-
-
+
+
def execute(self, conn, execute, clauseelement, *multiparams, **params):
"""Intercept high level execute() events."""
-
-
+
+
return execute(clauseelement, *multiparams, **params)
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
"""Intercept low-level cursor execute() events."""
-
+
return execute(cursor, statement, parameters, context)
-
+
def begin(self, conn, begin):
"""Intercept begin() events."""
-
+
return begin()
-
+
def rollback(self, conn, rollback):
"""Intercept rollback() events."""
-
+
return rollback()
-
+
def commit(self, conn, commit):
"""Intercept commit() events."""
-
+
return commit()
-
+
def savepoint(self, conn, savepoint, name=None):
"""Intercept savepoint() events."""
-
+
return savepoint(name=name)
-
+
def rollback_savepoint(self, conn, rollback_savepoint, name, context):
"""Intercept rollback_savepoint() events."""
-
+
return rollback_savepoint(name, context)
-
+
def release_savepoint(self, conn, release_savepoint, name, context):
"""Intercept release_savepoint() events."""
-
+
return release_savepoint(name, context)
-
+
def begin_twophase(self, conn, begin_twophase, xid):
"""Intercept begin_twophase() events."""
-
+
return begin_twophase(xid)
-
+
def prepare_twophase(self, conn, prepare_twophase, xid):
"""Intercept prepare_twophase() events."""
-
+
return prepare_twophase(xid)
-
+
def rollback_twophase(self, conn, rollback_twophase, xid, is_prepared):
"""Intercept rollback_twophase() events."""
-
+
return rollback_twophase(xid, is_prepared)
-
+
def commit_twophase(self, conn, commit_twophase, xid, is_prepared):
"""Intercept commit_twophase() events."""
-
+
return commit_twophase(xid, is_prepared)
-
+
diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py
index e749ec031..adfede75f 100644
--- a/lib/sqlalchemy/log.py
+++ b/lib/sqlalchemy/log.py
@@ -45,29 +45,29 @@ def class_logger(cls, enable=False):
cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
cls.logger = logger
_logged_classes.add(cls)
-
+
class Identified(object):
logging_name = None
-
+
def _should_log_debug(self):
return self.logger.isEnabledFor(logging.DEBUG)
-
+
def _should_log_info(self):
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger(object):
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
-
+
This allows multiple instances (e.g. Engine or Pool instances)
to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
which is based on an instance's echo setting.
-
+
Default implementation is:
-
+
'debug' -> logging.DEBUG
True -> logging.INFO
False -> Effective level of underlying logger
@@ -86,7 +86,7 @@ class InstanceLogger(object):
def __init__(self, echo, name):
self.echo = echo
self.logger = logging.getLogger(name)
-
+
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO \
@@ -98,17 +98,17 @@ class InstanceLogger(object):
#
def debug(self, msg, *args, **kwargs):
"""Delegate a debug call to the underlying logger."""
-
+
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
"""Delegate an info call to the underlying logger."""
-
+
self.log(logging.INFO, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""Delegate a warning call to the underlying logger."""
-
+
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
@@ -121,27 +121,27 @@ class InstanceLogger(object):
def exception(self, msg, *args, **kwargs):
"""Delegate an exception call to the underlying logger."""
-
+
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
"""Delegate a critical call to the underlying logger."""
-
+
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
"""Delegate a log call to the underlying logger.
-
+
The level here is determined by the echo
flag as well as that of the underlying logger, and
logger._log() is called directly.
-
+
"""
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
-
+
if self.logger.manager.disable >= level:
return
@@ -154,14 +154,14 @@ class InstanceLogger(object):
def isEnabledFor(self, level):
"""Is this logger enabled for level 'level'?"""
-
+
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self):
"""What's the effective level for this logger?"""
-
+
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
@@ -176,9 +176,9 @@ def instance_logger(instance, echoflag=None):
else:
name = "%s.%s" % (instance.__class__.__module__,
instance.__class__.__name__)
-
+
instance._echo = echoflag
-
+
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
@@ -188,9 +188,9 @@ def instance_logger(instance, echoflag=None):
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
-
+
instance.logger = logger
-
+
class echo_property(object):
__doc__ = """\
When ``True``, enable log output for this element.
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index e9f4f14f6..0b77b0239 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -143,7 +143,7 @@ def scoped_session(session_factory, scopefunc=None):
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
with no automation enabled by default.
-
+
This function is used primarily for testing. The usual
route to :class:`.Session` creation is via its constructor
or the :func:`.sessionmaker` function.
@@ -178,10 +178,10 @@ def create_session(bind=None, **kwargs):
def relationship(argument, secondary=None, **kwargs):
"""Provide a relationship of a primary Mapper to a secondary Mapper.
-
+
.. note:: :func:`relationship` is historically known as
:func:`relation` prior to version 0.6.
-
+
This corresponds to a parent-child or associative table relationship. The
constructed class is an instance of :class:`RelationshipProperty`.
@@ -212,7 +212,7 @@ def relationship(argument, secondary=None, **kwargs):
for applications that make use of
:func:`.attributes.get_history` which also need to know
the "previous" value of the attribute. (New in 0.6.6)
-
+
:param backref:
indicates the string name of a property to be placed on the related
mapper's class that will handle this relationship in the other
@@ -220,7 +220,7 @@ def relationship(argument, secondary=None, **kwargs):
when the mappers are configured. Can also be passed as a
:func:`backref` object to control the configuration of the
new relationship.
-
+
:param back_populates:
Takes a string name and has the same meaning as ``backref``,
except the complementing property is **not** created automatically,
@@ -263,7 +263,7 @@ def relationship(argument, secondary=None, **kwargs):
* ``all`` - shorthand for "save-update,merge, refresh-expire,
expunge, delete"
-
+
:param cascade_backrefs=True:
a boolean value indicating if the ``save-update`` cascade should
operate along a backref event. When set to ``False`` on a
@@ -273,9 +273,9 @@ def relationship(argument, secondary=None, **kwargs):
set to ``False`` on a many-to-one relationship that has a one-to-many
backref, appending a persistent object to the one-to-many collection
on a transient object will not add the transient to the session.
-
+
``cascade_backrefs`` is new in 0.6.5.
-
+
:param collection_class:
a class or callable that returns a new list-holding object. will
be used in place of a plain list for storing elements.
@@ -288,11 +288,11 @@ def relationship(argument, secondary=None, **kwargs):
:param doc:
docstring which will be applied to the resulting descriptor.
-
+
:param extension:
an :class:`.AttributeExtension` instance, or list of extensions,
which will be prepended to the list of attribute listeners for
- the resulting descriptor placed on the class.
+ the resulting descriptor placed on the class.
**Deprecated.** Please see :class:`.AttributeEvents`.
:param foreign_keys:
@@ -307,7 +307,7 @@ def relationship(argument, secondary=None, **kwargs):
"foreign" in the table metadata, allowing the specification
of a list of :class:`.Column` objects that should be considered
part of the foreign key.
-
+
There are only two use cases for ``foreign_keys`` - one, when it is not
convenient for :class:`.Table` metadata to contain its own foreign key
metadata (which should be almost never, unless reflecting a large amount of
@@ -325,7 +325,7 @@ def relationship(argument, secondary=None, **kwargs):
via many-to-one using local foreign keys that are not nullable,
or when the reference is one-to-one or a collection that is
guaranteed to have one or at least one entry.
-
+
:param join_depth:
when non-``None``, an integer value indicating how many levels
deep "eager" loaders should join on a self-referring or cyclical
@@ -343,7 +343,7 @@ def relationship(argument, secondary=None, **kwargs):
* ``select`` - items should be loaded lazily when the property is first
accessed, using a separate SELECT statement, or identity map
fetch for simple many-to-one references.
-
+
* ``immediate`` - items should be loaded as the parents are loaded,
using a separate SELECT statement, or identity map fetch for
simple many-to-one references. (new as of 0.6.5)
@@ -352,7 +352,7 @@ def relationship(argument, secondary=None, **kwargs):
that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
the join is "outer" or not is determined by the ``innerjoin``
parameter.
-
+
* ``subquery`` - items should be loaded "eagerly" within the same
query as that of the parent, using a second SQL statement
which issues a JOIN to a subquery of the original
@@ -370,18 +370,18 @@ def relationship(argument, secondary=None, **kwargs):
allowing ``append()`` and ``remove()``. Changes to the
collection will not be visible until flushed
to the database, where it is then refetched upon iteration.
-
+
* True - a synonym for 'select'
-
+
* False - a synonyn for 'joined'
-
+
* None - a synonym for 'noload'
-
+
Detailed discussion of loader strategies is at :ref:`loading_toplevel`.
-
+
:param load_on_pending=False:
Indicates loading behavior for transient or pending parent objects.
-
+
When set to ``True``, causes the lazy-loader to
issue a query for a parent object that is not persistent, meaning it has
never been flushed. This may take effect for a pending object when
@@ -389,21 +389,21 @@ def relationship(argument, secondary=None, **kwargs):
"attached" to a :class:`.Session` but is not part of its pending
collection. Attachment of transient objects to the session without
moving to the "pending" state is not a supported behavior at this time.
-
+
Note that the load of related objects on a pending or transient object
also does not trigger any attribute change events - no user-defined
events will be emitted for these attributes, and if and when the
object is ultimately flushed, only the user-specific foreign key
attributes will be part of the modified state.
-
+
The load_on_pending flag does not improve behavior
when the ORM is used normally - object references should be constructed
at the object level, not at the foreign key level, so that they
are present in an ordinary way before flush() proceeds. This flag
is not not intended for general use.
-
+
New in 0.6.5.
-
+
:param order_by:
indicates the ordering that should be applied when loading these
items.
@@ -456,7 +456,7 @@ def relationship(argument, secondary=None, **kwargs):
(i.e. SQLite, MySQL MyISAM tables).
Also see the passive_updates flag on ``mapper()``.
-
+
A future SQLAlchemy release will provide a "detect" feature for
this flag.
@@ -503,7 +503,7 @@ def relationship(argument, secondary=None, **kwargs):
should be treated either as one-to-one or one-to-many. Its
usage is optional unless delete-orphan cascade is also
set on this relationship(), in which case its required (new in 0.5.2).
-
+
:param uselist=(True|False):
a boolean that indicates if this property should be loaded as a
list or a scalar. In most cases, this value is determined
@@ -528,9 +528,9 @@ def relationship(argument, secondary=None, **kwargs):
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
-
+
return relationship(*arg, **kw)
-
+
def dynamic_loader(argument, secondary=None, primaryjoin=None,
secondaryjoin=None, foreign_keys=None, backref=None,
post_update=False, cascade=False, remote_side=None,
@@ -614,11 +614,11 @@ def column_property(*args, **kwargs):
it does not load immediately, and is instead loaded when the
attribute is first accessed on an instance. See also
:func:`~sqlalchemy.orm.deferred`.
-
+
:param doc:
optional string that will be applied as the doc on the
class-bound descriptor.
-
+
:param extension:
an
:class:`.AttributeExtension`
@@ -634,10 +634,10 @@ def column_property(*args, **kwargs):
def composite(class_, *cols, **kwargs):
"""Return a composite column-based property for use with a Mapper.
-
+
See the mapping documention section :ref:`mapper_composite` for a full
usage example.
-
+
:param class\_:
The "composite type" class.
@@ -788,7 +788,7 @@ def mapper(class_, local_table=None, *args, **params):
:param passive_updates: Indicates UPDATE behavior of foreign keys
when a primary key changes on a joined-table inheritance or other
joined table mapping.
-
+
When True, it is assumed that ON UPDATE CASCADE is configured on
the foreign key in the database, and that the database will handle
propagation of an UPDATE from a source column to dependent rows.
@@ -797,20 +797,20 @@ def mapper(class_, local_table=None, *args, **params):
required for this operation. The relationship() will update the
value of the attribute on related items which are locally present
in the session during a flush.
-
+
When False, it is assumed that the database does not enforce
referential integrity and will not be issuing its own CASCADE
operation for an update. The relationship() will issue the
appropriate UPDATE statements to the database in response to the
change of a referenced key, and items locally present in the
session during a flush will also be refreshed.
-
+
This flag should probably be set to False if primary key changes
are expected and the database in use doesn't support CASCADE (i.e.
SQLite, MySQL MyISAM tables).
-
+
Also see the passive_updates flag on :func:`relationship()`.
-
+
A future SQLAlchemy release will provide a "detect" feature for
this flag.
@@ -939,7 +939,7 @@ def comparable_property(comparator_factory, descriptor=None):
from sqlalchemy.orm import mapper, comparable_property
from sqlalchemy.orm.interfaces import PropComparator
from sqlalchemy.sql import func
-
+
class MyClass(object):
@property
def myprop(self):
@@ -954,12 +954,12 @@ def comparable_property(comparator_factory, descriptor=None):
Used with the ``properties`` dictionary sent to
:func:`~sqlalchemy.orm.mapper`.
-
+
Note that :func:`comparable_property` is usually not needed for basic
needs. The recipe at :mod:`.derived_attributes` offers a simpler
pure-Python method of achieving a similar result using class-bound
attributes with SQLAlchemy expression constructs.
-
+
:param comparator_factory:
A PropComparator subclass or factory that defines operator behavior
for this property.
@@ -973,21 +973,21 @@ def comparable_property(comparator_factory, descriptor=None):
"""
return ComparableProperty(comparator_factory, descriptor)
-
+
@sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
"is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have been defined."""
-
+
configure_mappers()
def clear_mappers():
"""Remove all mappers from all classes.
-
+
This function removes all instrumentation from classes and disposes
of their associated mappers. Once called, the classes are unmapped
and can be later re-mapped with new mappers.
-
+
:func:`.clear_mappers` is *not* for normal use, as there is literally no
valid usage for it outside of very specific testing scenarios. Normally,
mappers are permanent structural components of user-defined classes, and
@@ -999,7 +999,7 @@ def clear_mappers():
and possibly the test suites of other ORM extension libraries which
intend to test various combinations of mapper construction upon a fixed
set of classes.
-
+
"""
mapperlib._COMPILE_MUTEX.acquire()
try:
@@ -1025,10 +1025,10 @@ def joinedload(*keys, **kw):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
examples::
-
+
# joined-load the "orders" colleciton on "User"
query(User).options(joinedload(User.orders))
-
+
# joined-load the "keywords" collection on each "Item",
# but not the "items" collection on "Order" - those
# remain lazily loaded.
@@ -1039,17 +1039,17 @@ def joinedload(*keys, **kw):
:func:`joinedload` also accepts a keyword argument `innerjoin=True` which
indicates using an inner join instead of an outer::
-
+
query(Order).options(joinedload(Order.user, innerjoin=True))
-
+
Note that the join created by :func:`joinedload` is aliased such that no
other aspects of the query will affect what it loads. To use joined eager
loading with a join that is constructed manually using
:meth:`~sqlalchemy.orm.query.Query.join` or :func:`~sqlalchemy.orm.join`,
see :func:`contains_eager`.
-
+
See also: :func:`subqueryload`, :func:`lazyload`
-
+
"""
innerjoin = kw.pop('innerjoin', None)
if innerjoin is not None:
@@ -1080,7 +1080,7 @@ def joinedload_all(*keys, **kw):
load in one joined eager load.
Individual descriptors are accepted as arguments as well::
-
+
query.options(joinedload_all(User.orders, Order.items, Item.keywords))
The keyword arguments accept a flag `innerjoin=True|False` which will
@@ -1102,11 +1102,11 @@ def joinedload_all(*keys, **kw):
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
return joinedload(*args, **kwargs)
-
+
def eagerload_all(*args, **kwargs):
"""A synonym for :func:`joinedload_all()`"""
return joinedload_all(*args, **kwargs)
-
+
def subqueryload(*keys):
"""Return a ``MapperOption`` that will convert the property
of the given name or series of mapped attributes
@@ -1115,10 +1115,10 @@ def subqueryload(*keys):
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
examples::
-
+
# subquery-load the "orders" colleciton on "User"
query(User).options(subqueryload(User.orders))
-
+
# subquery-load the "keywords" collection on each "Item",
# but not the "items" collection on "Order" - those
# remain lazily loaded.
@@ -1128,7 +1128,7 @@ def subqueryload(*keys):
query(Order).options(subqueryload_all(Order.items, Item.keywords))
See also: :func:`joinedload`, :func:`lazyload`
-
+
"""
return strategies.EagerLazyOption(keys, lazy="subquery")
@@ -1147,7 +1147,7 @@ def subqueryload_all(*keys):
load in one subquery eager load.
Individual descriptors are accepted as arguments as well::
-
+
query.options(subqueryload_all(User.orders, Order.items,
Item.keywords))
@@ -1155,7 +1155,7 @@ def subqueryload_all(*keys):
"""
return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
-
+
def lazyload(*keys):
"""Return a ``MapperOption`` that will convert the property of the given
name or series of mapped attributes into a lazy load.
@@ -1193,16 +1193,16 @@ def noload(*keys):
def immediateload(*keys):
"""Return a ``MapperOption`` that will convert the property of the given
name or series of mapped attributes into an immediate load.
-
+
Used with :meth:`~sqlalchemy.orm.query.Query.options`.
See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
-
+
New as of verison 0.6.5.
-
+
"""
return strategies.EagerLazyOption(keys, lazy='immediate')
-
+
def contains_alias(alias):
"""Return a ``MapperOption`` that will indicate to the query that
the main table has been aliased.
@@ -1222,11 +1222,11 @@ def contains_eager(*keys, **kwargs):
The option is used in conjunction with an explicit join that loads
the desired rows, i.e.::
-
+
sess.query(Order).\\
join(Order.user).\\
options(contains_eager(Order.user))
-
+
The above query would join from the ``Order`` entity to its related
``User`` entity, and the returned ``Order`` objects would have the
``Order.user`` attribute pre-populated.
@@ -1235,7 +1235,7 @@ def contains_eager(*keys, **kwargs):
string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
the eagerly-loaded rows are to come from an aliased table::
-
+
user_alias = aliased(User)
sess.query(Order).\\
join((user_alias, Order.user)).\\
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 56cae6a18..6b57d33f5 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -53,7 +53,7 @@ PASSIVE_OFF = False #util.symbol('PASSIVE_OFF')
class QueryableAttribute(interfaces.PropComparator):
"""Base class for class-bound attributes. """
-
+
def __init__(self, class_, key, impl=None,
comparator=None, parententity=None):
self.class_ = class_
@@ -73,15 +73,15 @@ class QueryableAttribute(interfaces.PropComparator):
dispatch = event.dispatcher(events.AttributeEvents)
dispatch.dispatch_cls._active_history = False
-
+
@util.memoized_property
def _supports_population(self):
return self.impl.supports_population
-
+
def get_history(self, instance, **kwargs):
return self.impl.get_history(instance_state(instance),
instance_dict(instance), **kwargs)
-
+
def __selectable__(self):
# TODO: conditionally attach this method based on clause_element ?
return self
@@ -100,7 +100,7 @@ class QueryableAttribute(interfaces.PropComparator):
def hasparent(self, state, optimistic=False):
return self.impl.hasparent(state, optimistic=optimistic)
-
+
def __getattr__(self, key):
try:
return getattr(self.comparator, key)
@@ -111,7 +111,7 @@ class QueryableAttribute(interfaces.PropComparator):
type(self.comparator).__name__,
key)
)
-
+
def __str__(self):
return repr(self.parententity) + "." + self.property.key
@@ -146,15 +146,15 @@ def create_proxied_attribute(descriptor):
Returns a new QueryableAttribute type that delegates descriptor
behavior and getattr() to the given descriptor.
"""
-
+
# TODO: can move this to descriptor_props if the need for this
# function is removed from ext/hybrid.py
-
+
class Proxy(QueryableAttribute):
"""Presents the :class:`.QueryableAttribute` interface as a
proxy on top of a Python descriptor / :class:`.PropComparator`
combination.
-
+
"""
def __init__(self, class_, key, descriptor, comparator,
@@ -165,7 +165,7 @@ def create_proxied_attribute(descriptor):
self._comparator = comparator
self.adapter = adapter
self.__doc__ = doc
-
+
@util.memoized_property
def comparator(self):
if util.callable(self._comparator):
@@ -173,20 +173,20 @@ def create_proxied_attribute(descriptor):
if self.adapter:
self._comparator = self._comparator.adapted(self.adapter)
return self._comparator
-
+
def __get__(self, instance, owner):
if instance is None:
return self
else:
return self.descriptor.__get__(instance, owner)
-
+
def __str__(self):
return self.key
-
+
def __getattr__(self, attribute):
"""Delegate __getattr__ to the original descriptor and/or
comparator."""
-
+
try:
return getattr(descriptor, attribute)
except AttributeError:
@@ -219,7 +219,7 @@ class AttributeImpl(object):
\class_
associated class
-
+
key
string name of the attribute
@@ -251,12 +251,12 @@ class AttributeImpl(object):
the hasparent() function to identify an "owning" attribute.
Allows multiple AttributeImpls to all match a single
owner attribute.
-
+
expire_missing
if False, don't add an "expiry" callable to this attribute
during state.expire_attributes(None), if no value is present
for this key.
-
+
"""
self.class_ = class_
self.key = key
@@ -268,30 +268,30 @@ class AttributeImpl(object):
self.is_equal = operator.eq
else:
self.is_equal = compare_function
-
+
# TODO: pass in the manager here
# instead of doing a lookup
attr = manager_of_class(class_)[key]
-
+
for ext in util.to_list(extension or []):
ext._adapt_listener(attr, ext)
-
+
if active_history:
self.dispatch._active_history = True
self.expire_missing = expire_missing
-
+
def _get_active_history(self):
"""Backwards compat for impl.active_history"""
-
+
return self.dispatch._active_history
-
+
def _set_active_history(self, value):
self.dispatch._active_history = value
-
+
active_history = property(_get_active_history, _set_active_history)
-
-
+
+
def hasparent(self, state, optimistic=False):
"""Return the boolean value of a `hasparent` flag attached to
the given state.
@@ -337,17 +337,17 @@ class AttributeImpl(object):
def get_history(self, state, dict_, passive=PASSIVE_OFF):
raise NotImplementedError()
-
+
def get_all_pending(self, state, dict_):
"""Return a list of tuples of (state, obj)
for all objects in this attribute's current state
+ history.
-
+
Only applies to object-based attributes.
This is an inlining of existing functionality
which roughly correponds to:
-
+
get_state_history(
state,
key,
@@ -355,7 +355,7 @@ class AttributeImpl(object):
"""
raise NotImplementedError()
-
+
def initialize(self, state, dict_):
"""Initialize the given state's attribute with an empty value."""
@@ -379,7 +379,7 @@ class AttributeImpl(object):
state.committed_state[key] is NEVER_SET:
if passive is PASSIVE_NO_INITIALIZE:
return PASSIVE_NO_RESULT
-
+
if key in state.callables:
callable_ = state.callables[key]
value = callable_(passive)
@@ -404,7 +404,7 @@ class AttributeImpl(object):
# Return a new, empty value
return self.initialize(state, dict_)
-
+
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, value, initiator, passive=passive)
@@ -515,7 +515,7 @@ class MutableScalarAttributeImpl(ScalarAttributeImpl):
v = state.committed_state.get(self.key, NO_VALUE)
else:
v = dict_.get(self.key, NO_VALUE)
-
+
return History.from_scalar_attribute(self, state, v)
def check_mutable_modified(self, state, dict_):
@@ -545,7 +545,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
where the target object is also instrumented.
Adds events to delete/set operations.
-
+
"""
accepts_scalar_loader = False
@@ -585,12 +585,12 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
ret = [(instance_state(current), current)]
else:
ret = []
-
+
if self.key in state.committed_state:
original = state.committed_state[self.key]
if original not in (NEVER_SET, PASSIVE_NO_RESULT, None) and \
original is not current:
-
+
ret.append((instance_state(original), original))
return ret
else:
@@ -611,14 +611,14 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
else:
old = self.get(state, dict_, passive=PASSIVE_NO_FETCH)
-
+
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), False)
-
+
for fn in self.dispatch.remove:
fn(state, value, initiator or self)
@@ -630,7 +630,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
previous is not None and
previous is not PASSIVE_NO_RESULT):
self.sethasparent(instance_state(previous), False)
-
+
for fn in self.dispatch.set:
value = fn(state, value, previous, initiator or self)
@@ -691,24 +691,24 @@ class CollectionAttributeImpl(AttributeImpl):
current = dict_[self.key]
current = getattr(current, '_sa_adapter')
-
+
if self.key in state.committed_state:
original = state.committed_state[self.key]
if original is not NO_VALUE:
current_states = [(instance_state(c), c) for c in current]
original_states = [(instance_state(c), c) for c in original]
-
+
current_set = dict(current_states)
original_set = dict(original_states)
-
+
return \
[(s, o) for s, o in current_states if s not in original_set] + \
[(s, o) for s, o in current_states if s in original_set] + \
[(s, o) for s, o in original_states if s not in current_set]
-
+
return [(instance_state(o), o) for o in current]
-
+
def fire_append_event(self, state, dict_, value, initiator):
for fn in self.dispatch.append:
value = fn(state, value, initiator or self)
@@ -844,7 +844,7 @@ class CollectionAttributeImpl(AttributeImpl):
state.commit(dict_, [self.key])
if self.key in state.pending:
-
+
# pending items exist. issue a modified event,
# add/remove new items.
state.modified_event(dict_, self, user_data, True)
@@ -893,7 +893,7 @@ def backref_listeners(attribute, key, uselist):
initiator, passive=PASSIVE_NO_FETCH)
except (ValueError, KeyError, IndexError):
pass
-
+
if child is not None:
child_state, child_dict = instance_state(child),\
instance_dict(child)
@@ -926,19 +926,19 @@ def backref_listeners(attribute, key, uselist):
state.obj(),
initiator,
passive=PASSIVE_NO_FETCH)
-
+
if uselist:
event.listen(attribute, "append", append, retval=True, raw=True)
else:
event.listen(attribute, "set", set_, retval=True, raw=True)
# TODO: need coverage in test/orm/ of remove event
event.listen(attribute, "remove", remove, retval=True, raw=True)
-
+
class History(tuple):
"""A 3-tuple of added, unchanged and deleted values,
representing the changes which have occured on an instrumented
attribute.
-
+
Each tuple member is an iterable sequence.
"""
@@ -948,57 +948,57 @@ class History(tuple):
added = property(itemgetter(0))
"""Return the collection of items added to the attribute (the first tuple
element)."""
-
+
unchanged = property(itemgetter(1))
"""Return the collection of items that have not changed on the attribute
(the second tuple element)."""
-
-
+
+
deleted = property(itemgetter(2))
"""Return the collection of items that have been removed from the
attribute (the third tuple element)."""
-
+
def __new__(cls, added, unchanged, deleted):
return tuple.__new__(cls, (added, unchanged, deleted))
-
+
def __nonzero__(self):
return self != HISTORY_BLANK
-
+
def empty(self):
"""Return True if this :class:`History` has no changes
and no existing, unchanged state.
-
+
"""
-
+
return not bool(
(self.added or self.deleted)
or self.unchanged and self.unchanged != [None]
)
-
+
def sum(self):
"""Return a collection of added + unchanged + deleted."""
-
+
return (self.added or []) +\
(self.unchanged or []) +\
(self.deleted or [])
-
+
def non_deleted(self):
"""Return a collection of added + unchanged."""
-
+
return (self.added or []) +\
(self.unchanged or [])
-
+
def non_added(self):
"""Return a collection of unchanged + deleted."""
-
+
return (self.unchanged or []) +\
(self.deleted or [])
-
+
def has_changes(self):
"""Return True if this :class:`History` has changes."""
-
+
return bool(self.added or self.deleted)
-
+
def as_state(self):
return History(
[(c is not None and c is not PASSIVE_NO_RESULT)
@@ -1039,7 +1039,7 @@ class History(tuple):
@classmethod
def from_object_attribute(cls, attribute, state, current):
original = state.committed_state.get(attribute.key, NEVER_SET)
-
+
if current is NO_VALUE:
if (original is not None and
original is not NEVER_SET and
@@ -1064,7 +1064,7 @@ class History(tuple):
def from_collection(cls, attribute, state, current):
original = state.committed_state.get(attribute.key, NEVER_SET)
current = getattr(current, '_sa_adapter')
-
+
if original is NO_VALUE:
return cls(list(current), (), ())
elif original is NEVER_SET:
@@ -1072,10 +1072,10 @@ class History(tuple):
else:
current_states = [(instance_state(c), c) for c in current]
original_states = [(instance_state(c), c) for c in original]
-
+
current_set = dict(current_states)
original_set = dict(original_states)
-
+
return cls(
[o for s, o in current_states if s not in original_set],
[o for s, o in current_states if s in original_set],
@@ -1087,25 +1087,25 @@ HISTORY_BLANK = History(None, None, None)
def get_history(obj, key, **kwargs):
"""Return a :class:`.History` record for the given object
and attribute key.
-
+
:param obj: an object whose class is instrumented by the
- attributes package.
-
+ attributes package.
+
:param key: string attribute name.
-
+
:param kwargs: Optional keyword arguments currently
include the ``passive`` flag, which indicates if the attribute should be
loaded from the database if not already present (:attr:`PASSIVE_NO_FETCH`), and
if the attribute should be not initialized to a blank value otherwise
(:attr:`PASSIVE_NO_INITIALIZE`). Default is :attr:`PASSIVE_OFF`.
-
+
"""
return get_state_history(instance_state(obj), key, **kwargs)
def get_state_history(state, key, **kwargs):
return state.get_history(key, **kwargs)
-
+
def has_parent(cls, obj, key, optimistic=False):
"""TODO"""
manager = manager_of_class(cls)
@@ -1120,12 +1120,12 @@ def register_attribute(class_, key, **kw):
comparator, parententity, doc=doc)
register_attribute_impl(class_, key, **kw)
return desc
-
-def register_attribute_impl(class_, key,
+
+def register_attribute_impl(class_, key,
uselist=False, callable_=None,
useobject=False, mutable_scalars=False,
impl_class=None, backref=None, **kw):
-
+
manager = manager_of_class(class_)
if uselist:
factory = kw.pop('typecallable', None)
@@ -1135,7 +1135,7 @@ def register_attribute_impl(class_, key,
typecallable = kw.pop('typecallable', None)
dispatch = manager[key].dispatch
-
+
if impl_class:
impl = impl_class(class_, key, typecallable, dispatch, **kw)
elif uselist:
@@ -1151,22 +1151,22 @@ def register_attribute_impl(class_, key,
impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
manager[key].impl = impl
-
+
if backref:
backref_listeners(manager[key], backref, uselist)
manager.post_configure_attribute(key)
return manager[key]
-
+
def register_descriptor(class_, key, comparator=None,
parententity=None, property_=None, doc=None):
manager = manager_of_class(class_)
descriptor = InstrumentedAttribute(class_, key, comparator=comparator,
parententity=parententity)
-
+
descriptor.__doc__ = doc
-
+
manager.instrument_attribute(key, descriptor)
return descriptor
@@ -1175,36 +1175,36 @@ def unregister_attribute(class_, key):
def init_collection(obj, key):
"""Initialize a collection attribute and return the collection adapter.
-
+
This function is used to provide direct access to collection internals
for a previously unloaded attribute. e.g.::
-
+
collection_adapter = init_collection(someobject, 'elements')
for elem in values:
collection_adapter.append_without_event(elem)
-
+
For an easier way to do the above, see
:func:`~sqlalchemy.orm.attributes.set_committed_value`.
-
+
obj is an instrumented object instance. An InstanceState
is accepted directly for backwards compatibility but
this usage is deprecated.
-
+
"""
state = instance_state(obj)
dict_ = state.dict
return init_state_collection(state, dict_, key)
-
+
def init_state_collection(state, dict_, key):
"""Initialize a collection attribute and return the collection adapter."""
-
+
attr = state.manager[key].impl
user_data = attr.initialize(state, dict_)
return attr.get_collection(state, dict_, user_data)
def set_committed_value(instance, key, value):
"""Set the value of an attribute with no history events.
-
+
Cancels any previous history present. The value should be
a scalar value for scalar-holding attributes, or
an iterable for any collection-holding attribute.
@@ -1215,20 +1215,20 @@ def set_committed_value(instance, key, value):
which has loaded additional attributes or collections through
separate queries, which can then be attached to an instance
as though it were part of its original loaded state.
-
+
"""
state, dict_ = instance_state(instance), instance_dict(instance)
state.manager[key].impl.set_committed_value(state, dict_, value)
-
+
def set_attribute(instance, key, value):
"""Set the value of an attribute, firing history events.
-
+
This function may be used regardless of instrumentation
applied directly to the class, i.e. no descriptors are required.
Custom attribute management schemes will need to make usage
of this method to establish attribute state as understood
by SQLAlchemy.
-
+
"""
state, dict_ = instance_state(instance), instance_dict(instance)
state.manager[key].impl.set(state, dict_, value, None)
@@ -1241,7 +1241,7 @@ def get_attribute(instance, key):
Custom attribute management schemes will need to make usage
of this method to make usage of attribute state as understood
by SQLAlchemy.
-
+
"""
state, dict_ = instance_state(instance), instance_dict(instance)
return state.manager[key].impl.get(state, dict_)
@@ -1254,20 +1254,19 @@ def del_attribute(instance, key):
Custom attribute management schemes will need to make usage
of this method to establish attribute state as understood
by SQLAlchemy.
-
+
"""
state, dict_ = instance_state(instance), instance_dict(instance)
state.manager[key].impl.delete(state, dict_)
def flag_modified(instance, key):
"""Mark an attribute on an instance as 'modified'.
-
+
This sets the 'modified' flag on the instance and
establishes an unconditional change event for the given attribute.
-
+
"""
state, dict_ = instance_state(instance), instance_dict(instance)
impl = state.manager[key].impl
state.modified_event(dict_, impl, NO_VALUE)
-
- \ No newline at end of file
+
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index b0fab36c0..4b03a50db 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -449,7 +449,7 @@ class collection(object):
# implementations
def collection_adapter(collection):
"""Fetch the :class:`.CollectionAdapter` for a collection."""
-
+
return getattr(collection, '_sa_adapter', None)
def collection_iter(collection):
@@ -479,14 +479,14 @@ class CollectionAdapter(object):
The usage of getattr()/setattr() is currently to allow injection
of custom methods, such as to unwrap Zope security proxies.
-
+
"""
def __init__(self, attr, owner_state, data):
self._key = attr.key
self._data = weakref.ref(data)
self.owner_state = owner_state
self.link_to_self(data)
-
+
@property
def data(self):
"The entity collection being adapted."
@@ -495,7 +495,7 @@ class CollectionAdapter(object):
@util.memoized_property
def attr(self):
return self.owner_state.manager[self._key].impl
-
+
def link_to_self(self, data):
"""Link a collection to this adapter, and fire a link event."""
setattr(data, '_sa_adapter', self)
@@ -555,7 +555,7 @@ class CollectionAdapter(object):
def append_with_event(self, item, initiator=None):
"""Add an entity to the collection, firing mutation events."""
-
+
getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
def append_without_event(self, item):
@@ -578,7 +578,7 @@ class CollectionAdapter(object):
def clear_with_event(self, initiator=None):
"""Empty the collection, firing a mutation event for each entity."""
-
+
remover = getattr(self._data(), '_sa_remover')
for item in list(self):
remover(item, _sa_initiator=initiator)
@@ -592,7 +592,7 @@ class CollectionAdapter(object):
def __iter__(self):
"""Iterate over entities in the collection."""
-
+
# Py3K requires iter() here
return iter(getattr(self._data(), '_sa_iterator')())
@@ -926,7 +926,7 @@ def __set(collection, item, _sa_initiator=None):
if executor:
item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
return item
-
+
def __del(collection, item, _sa_initiator=None):
"""Run del events, may eventually be inlined into decorators."""
if _sa_initiator is not False and item is not None:
@@ -987,12 +987,12 @@ def _list_decorators():
stop = index.stop or len(self)
if stop < 0:
stop += len(self)
-
+
if step == 1:
for i in xrange(start, stop, step):
if len(self) > start:
del self[start]
-
+
for i, item in enumerate(value):
self.insert(i + start, item)
else:
@@ -1041,7 +1041,7 @@ def _list_decorators():
_tidy(__delslice__)
return __delslice__
# end Py2K
-
+
def extend(fn):
def extend(self, iterable):
for value in iterable:
@@ -1371,7 +1371,7 @@ class InstrumentedDict(dict):
__instrumentation__ = {
'iterator': 'itervalues', }
# end Py2K
-
+
__canned_instrumentation = {
list: InstrumentedList,
set: InstrumentedSet,
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 57c6d6e9e..8acf77ad8 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -33,29 +33,29 @@ class DependencyProcessor(object):
"No target attributes to populate between parent and "
"child are present" %
self.prop)
-
+
@classmethod
def from_relationship(cls, prop):
return _direction_to_processor[prop.direction](prop)
-
+
def hasparent(self, state):
"""return True if the given object instance has a parent,
according to the ``InstrumentedAttribute`` handled by this
``DependencyProcessor``.
-
+
"""
return self.parent.class_manager.get_impl(self.key).hasparent(state)
def per_property_preprocessors(self, uow):
"""establish actions and dependencies related to a flush.
-
+
These actions will operate on all relevant states in
the aggreagte.
-
+
"""
uow.register_preprocessor(self, True)
-
-
+
+
def per_property_flush_actions(self, uow):
after_save = unitofwork.ProcessAll(uow, self, False, True)
before_delete = unitofwork.ProcessAll(uow, self, True, True)
@@ -77,7 +77,7 @@ class DependencyProcessor(object):
uow,
self.mapper.primary_base_mapper
)
-
+
self.per_property_dependencies(uow,
parent_saves,
child_saves,
@@ -86,15 +86,15 @@ class DependencyProcessor(object):
after_save,
before_delete
)
-
+
def per_state_flush_actions(self, uow, states, isdelete):
"""establish actions and dependencies related to a flush.
-
+
These actions will operate on all relevant states
individually. This occurs only if there are cycles
in the 'aggregated' version of events.
-
+
"""
parent_base_mapper = self.parent.primary_base_mapper
@@ -104,7 +104,7 @@ class DependencyProcessor(object):
# locate and disable the aggregate processors
# for this dependency
-
+
if isdelete:
before_delete = unitofwork.ProcessAll(uow, self, True, True)
before_delete.disabled = True
@@ -113,14 +113,14 @@ class DependencyProcessor(object):
after_save.disabled = True
# check if the "child" side is part of the cycle
-
+
if child_saves not in uow.cycles:
# based on the current dependencies we use, the saves/
# deletes should always be in the 'cycles' collection
# together. if this changes, we will have to break up
# this method a bit more.
assert child_deletes not in uow.cycles
-
+
# child side is not part of the cycle, so we will link per-state
# actions to the aggregate "saves", "deletes" actions
child_actions = [
@@ -129,7 +129,7 @@ class DependencyProcessor(object):
child_in_cycles = False
else:
child_in_cycles = True
-
+
# check if the "parent" side is part of the cycle
if not isdelete:
parent_saves = unitofwork.SaveUpdateAll(
@@ -145,14 +145,14 @@ class DependencyProcessor(object):
parent_saves = after_save = None
if parent_deletes in uow.cycles:
parent_in_cycles = True
-
+
# now create actions /dependencies for each state.
for state in states:
# detect if there's anything changed or loaded
# by a preprocessor on this state/attribute. if not,
# we should be able to skip it entirely.
sum_ = state.manager[self.key].impl.get_all_pending(state, state.dict)
-
+
if not sum_:
continue
@@ -171,7 +171,7 @@ class DependencyProcessor(object):
uow,
state,
parent_base_mapper)
-
+
if child_in_cycles:
child_actions = []
for child_state, child in sum_:
@@ -192,7 +192,7 @@ class DependencyProcessor(object):
child_base_mapper),
False)
child_actions.append(child_action)
-
+
# establish dependencies between our possibly per-state
# parent action and our possibly per-state child action.
for child_action, childisdelete in child_actions:
@@ -201,23 +201,23 @@ class DependencyProcessor(object):
child_action,
after_save, before_delete,
isdelete, childisdelete)
-
-
+
+
def presort_deletes(self, uowcommit, states):
return False
-
+
def presort_saves(self, uowcommit, states):
return False
-
+
def process_deletes(self, uowcommit, states):
pass
-
+
def process_saves(self, uowcommit, states):
pass
def prop_has_changes(self, uowcommit, states, isdelete):
passive = not isdelete or self.passive_deletes
-
+
for s in states:
# TODO: add a high speed method
# to InstanceState which returns: attribute
@@ -230,7 +230,7 @@ class DependencyProcessor(object):
return True
else:
return False
-
+
def _verify_canload(self, state):
if state is not None and \
not self.mapper._canload(state,
@@ -249,7 +249,7 @@ class DependencyProcessor(object):
"Attempting to flush an item of type %s on collection '%s', "
"whose mapper does not inherit from that of %s." %
(state.class_, self.prop, self.mapper.class_))
-
+
def _synchronize(self, state, child, associationrow,
clearkeys, uowcommit):
raise NotImplementedError()
@@ -275,7 +275,7 @@ class DependencyProcessor(object):
[r for l, r in self.prop.synchronize_pairs]
)
break
-
+
def _pks_changed(self, uowcommit, state):
raise NotImplementedError()
@@ -283,7 +283,7 @@ class DependencyProcessor(object):
return "%s(%s)" % (self.__class__.__name__, self.prop)
class OneToManyDP(DependencyProcessor):
-
+
def per_property_dependencies(self, uow, parent_saves,
child_saves,
parent_deletes,
@@ -300,37 +300,37 @@ class OneToManyDP(DependencyProcessor):
uow,
self.mapper.primary_base_mapper,
True)
-
+
uow.dependencies.update([
(child_saves, after_save),
(parent_saves, after_save),
(after_save, child_post_updates),
-
+
(before_delete, child_pre_updates),
(child_pre_updates, parent_deletes),
(child_pre_updates, child_deletes),
-
+
])
else:
uow.dependencies.update([
(parent_saves, after_save),
(after_save, child_saves),
(after_save, child_deletes),
-
+
(child_saves, parent_deletes),
(child_deletes, parent_deletes),
(before_delete, child_saves),
(before_delete, child_deletes),
])
-
+
def per_state_dependencies(self, uow,
save_parent,
delete_parent,
child_action,
after_save, before_delete,
isdelete, childisdelete):
-
+
if self.post_update:
child_post_updates = unitofwork.IssuePostUpdate(
@@ -341,7 +341,7 @@ class OneToManyDP(DependencyProcessor):
uow,
self.mapper.primary_base_mapper,
True)
-
+
# TODO: this whole block is not covered
# by any tests
if not isdelete:
@@ -378,7 +378,7 @@ class OneToManyDP(DependencyProcessor):
(before_delete, child_action),
(child_action, delete_parent)
])
-
+
def presort_deletes(self, uowcommit, states):
# head object is being deleted, and we manage its list of
# child objects the child objects have to have their
@@ -398,21 +398,21 @@ class OneToManyDP(DependencyProcessor):
uowcommit.register_object(child, isdelete=True)
else:
uowcommit.register_object(child)
-
+
if should_null_fks:
for child in history.unchanged:
if child is not None:
uowcommit.register_object(child,
operation="delete", prop=self.prop)
-
-
+
+
def presort_saves(self, uowcommit, states):
children_added = uowcommit.memo(('children_added', self), set)
-
+
for state in states:
pks_changed = self._pks_changed(uowcommit, state)
-
+
history = uowcommit.get_attribute_history(
state,
self.key,
@@ -451,14 +451,14 @@ class OneToManyDP(DependencyProcessor):
self.passive_updates,
operation="pk change",
prop=self.prop)
-
+
def process_deletes(self, uowcommit, states):
# head object is being deleted, and we manage its list of
# child objects the child objects have to have their foreign
# key to the parent set to NULL this phase can be called
# safely for any cascade but is unnecessary if delete cascade
# is on.
-
+
if self.post_update or not self.passive_deletes == 'all':
children_added = uowcommit.memo(('children_added', self), set)
@@ -478,7 +478,7 @@ class OneToManyDP(DependencyProcessor):
uowcommit, False)
if self.post_update and child:
self._post_update(child, uowcommit, [state])
-
+
if self.post_update or not self.cascade.delete:
for child in set(history.unchanged).\
difference(children_added):
@@ -492,12 +492,12 @@ class OneToManyDP(DependencyProcessor):
self._post_update(child,
uowcommit,
[state])
-
+
# technically, we can even remove each child from the
# collection here too. but this would be a somewhat
# inconsistent behavior since it wouldn't happen
#if the old parent wasn't deleted but child was moved.
-
+
def process_saves(self, uowcommit, states):
for state in states:
history = uowcommit.get_attribute_history(state,
@@ -520,7 +520,7 @@ class OneToManyDP(DependencyProcessor):
for child in history.unchanged:
self._synchronize(state, child, None,
False, uowcommit, True)
-
+
def _synchronize(self, state, child,
associationrow, clearkeys, uowcommit,
pks_changed):
@@ -593,7 +593,7 @@ class ManyToOneDP(DependencyProcessor):
isdelete, childisdelete):
if self.post_update:
-
+
if not isdelete:
parent_post_updates = unitofwork.IssuePostUpdate(
uow,
@@ -608,7 +608,7 @@ class ManyToOneDP(DependencyProcessor):
uow.dependencies.update([
(save_parent, after_save),
(child_action, after_save),
-
+
(after_save, parent_post_updates)
])
else:
@@ -622,7 +622,7 @@ class ManyToOneDP(DependencyProcessor):
(parent_pre_updates, delete_parent),
(parent_pre_updates, child_action)
])
-
+
elif not isdelete:
if not childisdelete:
uow.dependencies.update([
@@ -633,7 +633,7 @@ class ManyToOneDP(DependencyProcessor):
uow.dependencies.update([
(after_save, save_parent),
])
-
+
else:
if childisdelete:
uow.dependencies.update([
@@ -661,7 +661,7 @@ class ManyToOneDP(DependencyProcessor):
'delete', child):
uowcommit.register_object(
st_, isdelete=True)
-
+
def presort_saves(self, uowcommit, states):
for state in states:
uowcommit.register_object(state, operation="add", prop=self.prop)
@@ -676,7 +676,7 @@ class ManyToOneDP(DependencyProcessor):
if self.hasparent(child) is False:
uowcommit.register_object(child, isdelete=True,
operation="delete", prop=self.prop)
-
+
for c, m, st_, dct_ in self.mapper.cascade_iterator(
'delete', child):
uowcommit.register_object(
@@ -687,7 +687,7 @@ class ManyToOneDP(DependencyProcessor):
if self.post_update and \
not self.cascade.delete_orphan and \
not self.passive_deletes == 'all':
-
+
# post_update means we have to update our
# row to not reference the child object
# before we can DELETE the row
@@ -710,7 +710,7 @@ class ManyToOneDP(DependencyProcessor):
for child in history.added:
self._synchronize(state, child, None, False,
uowcommit, "add")
-
+
if self.post_update:
self._post_update(state, uowcommit, history.sum())
@@ -728,7 +728,7 @@ class ManyToOneDP(DependencyProcessor):
"operation along '%s' won't proceed" %
(mapperutil.state_class_str(child), operation, self.prop))
return
-
+
if clearkeys or child is None:
sync.clear(state, self.parent, self.prop.synchronize_pairs)
else:
@@ -743,12 +743,12 @@ class DetectKeySwitch(DependencyProcessor):
"""For many-to-one relationships with no one-to-many backref,
searches for parents through the unit of work when a primary
key has changed and updates them.
-
+
Theoretically, this approach could be expanded to support transparent
deletion of objects referenced via many-to-one as well, although
the current attribute system doesn't do enough bookkeeping for this
to be efficient.
-
+
"""
def per_property_preprocessors(self, uow):
@@ -759,7 +759,7 @@ class DetectKeySwitch(DependencyProcessor):
if False in (prop.passive_updates for \
prop in self.prop._reverse_property):
return
-
+
uow.register_preprocessor(self, False)
def per_property_flush_actions(self, uow):
@@ -770,10 +770,10 @@ class DetectKeySwitch(DependencyProcessor):
uow.dependencies.update([
(parent_saves, after_save)
])
-
+
def per_state_flush_actions(self, uow, states, isdelete):
pass
-
+
def presort_deletes(self, uowcommit, states):
pass
@@ -787,9 +787,9 @@ class DetectKeySwitch(DependencyProcessor):
if not isdelete and self.passive_updates:
d = self._key_switchers(uow, states)
return bool(d)
-
+
return False
-
+
def process_deletes(self, uowcommit, states):
assert False
@@ -800,13 +800,13 @@ class DetectKeySwitch(DependencyProcessor):
# statements being emitted
assert self.passive_updates
self._process_key_switches(states, uowcommit)
-
+
def _key_switchers(self, uow, states):
switched, notswitched = uow.memo(
('pk_switchers', self),
lambda: (set(), set())
)
-
+
allstates = switched.union(notswitched)
for s in states:
if s not in allstates:
@@ -815,7 +815,7 @@ class DetectKeySwitch(DependencyProcessor):
else:
notswitched.add(s)
return switched
-
+
def _process_key_switches(self, deplist, uowcommit):
switchers = self._key_switchers(uowcommit, deplist)
if switchers:
@@ -848,7 +848,7 @@ class DetectKeySwitch(DependencyProcessor):
class ManyToManyDP(DependencyProcessor):
-
+
def per_property_dependencies(self, uow, parent_saves,
child_saves,
parent_deletes,
@@ -861,14 +861,14 @@ class ManyToManyDP(DependencyProcessor):
(parent_saves, after_save),
(child_saves, after_save),
(after_save, child_deletes),
-
+
# a rowswitch on the parent from deleted to saved
# can make this one occur, as the "save" may remove
# an element from the
# "deleted" list before we have a chance to
# process its child rows
(before_delete, parent_saves),
-
+
(before_delete, parent_deletes),
(before_delete, child_deletes),
(before_delete, child_saves),
@@ -896,7 +896,7 @@ class ManyToManyDP(DependencyProcessor):
(before_delete, child_action),
(before_delete, delete_parent)
])
-
+
def presort_deletes(self, uowcommit, states):
if not self.passive_deletes:
# if no passive deletes, load history on
@@ -907,7 +907,7 @@ class ManyToManyDP(DependencyProcessor):
state,
self.key,
passive=self.passive_deletes)
-
+
def presort_saves(self, uowcommit, states):
if not self.passive_updates:
# if no passive updates, load history on
@@ -922,7 +922,7 @@ class ManyToManyDP(DependencyProcessor):
if not self.cascade.delete_orphan:
return
-
+
# check for child items removed from the collection
# if delete_orphan check is turned on.
for state in states:
@@ -940,12 +940,12 @@ class ManyToManyDP(DependencyProcessor):
child):
uowcommit.register_object(
st_, isdelete=True)
-
+
def process_deletes(self, uowcommit, states):
secondary_delete = []
secondary_insert = []
secondary_update = []
-
+
processed = self._get_reversed_processed_set(uowcommit)
tmp = set()
for state in states:
@@ -969,12 +969,12 @@ class ManyToManyDP(DependencyProcessor):
False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
-
+
tmp.update((c, state) for c in history.non_added())
if processed is not None:
processed.update(tmp)
-
+
self._run_crud(uowcommit, secondary_insert,
secondary_update, secondary_delete)
@@ -1016,12 +1016,12 @@ class ManyToManyDP(DependencyProcessor):
False, uowcommit, "delete"):
continue
secondary_delete.append(associationrow)
-
+
tmp.update((c, state)
for c in history.added + history.deleted)
-
+
if need_cascade_pks:
-
+
for child in history.unchanged:
associationrow = {}
sync.update(state,
@@ -1036,17 +1036,17 @@ class ManyToManyDP(DependencyProcessor):
self.prop.secondary_synchronize_pairs)
secondary_update.append(associationrow)
-
+
if processed is not None:
processed.update(tmp)
-
+
self._run_crud(uowcommit, secondary_insert,
secondary_update, secondary_delete)
-
+
def _run_crud(self, uowcommit, secondary_insert,
secondary_update, secondary_delete):
connection = uowcommit.transaction.connection(self.mapper)
-
+
if secondary_delete:
associationrow = secondary_delete[0]
statement = self.secondary.delete(sql.and_(*[
@@ -1055,7 +1055,7 @@ class ManyToManyDP(DependencyProcessor):
if c.key in associationrow
]))
result = connection.execute(statement, secondary_delete)
-
+
if result.supports_sane_multi_rowcount() and \
result.rowcount != len(secondary_delete):
raise exc.StaleDataError(
@@ -1085,7 +1085,7 @@ class ManyToManyDP(DependencyProcessor):
if secondary_insert:
statement = self.secondary.insert()
connection.execute(statement, secondary_insert)
-
+
def _synchronize(self, state, child, associationrow,
clearkeys, uowcommit, operation):
if associationrow is None:
@@ -1098,16 +1098,16 @@ class ManyToManyDP(DependencyProcessor):
"operation along '%s' won't proceed" %
(mapperutil.state_class_str(child), operation, self.prop))
return False
-
+
self._verify_canload(child)
-
+
sync.populate_dict(state, self.parent, associationrow,
self.prop.synchronize_pairs)
sync.populate_dict(child, self.mapper, associationrow,
self.prop.secondary_synchronize_pairs)
-
+
return True
-
+
def _pks_changed(self, uowcommit, state):
return sync.source_modified(
uowcommit,
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index 341594578..8cdde2282 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -14,27 +14,27 @@ class MapperExtension(object):
.. note:: :class:`.MapperExtension` is deprecated. Please
refer to :func:`.event.listen` as well as
:class:`.MapperEvents`.
-
+
New extension classes subclass :class:`.MapperExtension` and are specified
using the ``extension`` mapper() argument, which is a single
:class:`.MapperExtension` or a list of such::
-
+
from sqlalchemy.orm.interfaces import MapperExtension
-
+
class MyExtension(MapperExtension):
def before_insert(self, mapper, connection, instance):
print "instance %s before insert !" % instance
-
+
m = mapper(User, users_table, extension=MyExtension())
-
+
A single mapper can maintain a chain of ``MapperExtension``
objects. When a particular mapping event occurs, the
corresponding method on each ``MapperExtension`` is invoked
serially, and each method has the ability to halt the chain
from proceeding further::
-
+
m = mapper(User, users_table, extension=[ext1, ext2, ext3])
-
+
Each ``MapperExtension`` method returns the symbol
EXT_CONTINUE by default. This symbol generally means "move
to the next ``MapperExtension`` for processing". For methods
@@ -43,13 +43,13 @@ class MapperExtension(object):
should be ignored. In some cases it's required for a
default mapper activity to be performed, such as adding a
new instance to a result list.
-
+
The symbol EXT_STOP has significance within a chain
of ``MapperExtension`` objects that the chain will be stopped
when this symbol is returned. Like EXT_CONTINUE, it also
has additional significance in some cases that a default
mapper activity will not be performed.
-
+
"""
@classmethod
@@ -75,17 +75,17 @@ class MapperExtension(object):
'before_delete',
'after_delete'
))
-
+
@classmethod
def _adapt_listener_methods(cls, self, listener, methods):
-
+
for meth in methods:
me_meth = getattr(MapperExtension, meth)
ls_meth = getattr(listener, meth)
-
+
# TODO: comparing self.methods to cls.method,
# this comparison is probably moot
-
+
if me_meth is not ls_meth:
if meth == 'reconstruct_instance':
def go(ls_meth):
@@ -109,7 +109,7 @@ class MapperExtension(object):
util.warn_exception(ls_meth, self, self.class_,
self.class_manager.original_init,
instance, args, kwargs)
-
+
return init_failed
event.listen(self.class_manager, 'init_failure',
go(ls_meth), raw=False, propagate=True)
@@ -121,20 +121,20 @@ class MapperExtension(object):
def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed, and has
applied instrumentation to the mapped class.
-
+
The return value is only significant within the ``MapperExtension``
chain; the parent mapper's behavior isn't modified by this method.
-
+
"""
return EXT_CONTINUE
def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
"""Receive an instance when it's constructor is called.
-
+
This method is only called during a userland construction of
an object. It is not called when an object is loaded from the
database.
-
+
The return value is only significant within the ``MapperExtension``
chain; the parent mapper's behavior isn't modified by this method.
@@ -144,11 +144,11 @@ class MapperExtension(object):
def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
"""Receive an instance when it's constructor has been called,
and raised an exception.
-
+
This method is only called during a userland construction of
an object. It is not called when an object is loaded from the
database.
-
+
The return value is only significant within the ``MapperExtension``
chain; the parent mapper's behavior isn't modified by this method.
@@ -166,10 +166,10 @@ class MapperExtension(object):
object which contains mapped columns as keys. The
returned object should also be a dictionary-like object
which recognizes mapped columns as keys.
-
+
If the ultimate return value is EXT_CONTINUE, the row
is not translated.
-
+
"""
return EXT_CONTINUE
@@ -302,7 +302,7 @@ class MapperExtension(object):
The return value is only significant within the ``MapperExtension``
chain; the parent mapper's behavior isn't modified by this method.
-
+
"""
return EXT_CONTINUE
@@ -319,7 +319,7 @@ class MapperExtension(object):
This means that an instance being sent to before_update is *not* a
guarantee that an UPDATE statement will be issued (although you can
affect the outcome here).
-
+
To detect if the column-based attributes on the object have net
changes, and will therefore generate an UPDATE statement, use
``object_session(instance).is_modified(instance,
@@ -344,7 +344,7 @@ class MapperExtension(object):
The return value is only significant within the ``MapperExtension``
chain; the parent mapper's behavior isn't modified by this method.
-
+
"""
return EXT_CONTINUE
@@ -377,17 +377,17 @@ class MapperExtension(object):
class SessionExtension(object):
"""Base implementation for :class:`.Session` event hooks.
-
+
.. note:: :class:`.SessionExtension` is deprecated. Please
refer to :func:`.event.listen` as well as
:class:`.SessionEvents`.
-
+
Subclasses may be installed into a :class:`.Session` (or
:func:`.sessionmaker`) using the ``extension`` keyword
argument::
-
+
from sqlalchemy.orm.interfaces import SessionExtension
-
+
class MySessionExtension(SessionExtension):
def before_commit(self, session):
print "before commit!"
@@ -414,32 +414,32 @@ class SessionExtension(object):
def before_commit(self, session):
"""Execute right before commit is called.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def after_commit(self, session):
"""Execute after a commit has occured.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def after_rollback(self, session):
"""Execute after a rollback has occured.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def before_flush( self, session, flush_context, instances):
"""Execute before flush process has started.
-
+
`instances` is an optional list of objects which were passed to
the ``flush()`` method. """
def after_flush(self, session, flush_context):
"""Execute after flush has completed, but before commit has been
called.
-
+
Note that the session's state is still in pre-flush, i.e. 'new',
'dirty', and 'deleted' lists still show pre-flush state as well
as the history settings on instance attributes."""
@@ -447,7 +447,7 @@ class SessionExtension(object):
def after_flush_postexec(self, session, flush_context):
"""Execute after flush has completed, and after the post-exec
state occurs.
-
+
This will be when the 'new', 'dirty', and 'deleted' lists are in
their final state. An actual commit() may or may not have
occured, depending on whether or not the flush started its own
@@ -455,20 +455,20 @@ class SessionExtension(object):
def after_begin( self, session, transaction, connection):
"""Execute after a transaction is begun on a connection
-
+
`transaction` is the SessionTransaction. This method is called
after an engine level transaction is begun on a connection. """
def after_attach(self, session, instance):
"""Execute after an instance is attached to a session.
-
+
This is called after an add, delete or merge. """
def after_bulk_update( self, session, query, query_context, result):
"""Execute after a bulk update operation to the session.
-
+
This is called after a session.query(...).update()
-
+
`query` is the query object that this update operation was
called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
@@ -476,9 +476,9 @@ class SessionExtension(object):
def after_bulk_delete( self, session, query, query_context, result):
"""Execute after a bulk delete operation to the session.
-
+
This is called after a session.query(...).delete()
-
+
`query` is the query object that this delete operation was
called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
@@ -492,7 +492,7 @@ class AttributeExtension(object):
.. note:: :class:`.AttributeExtension` is deprecated. Please
refer to :func:`.event.listen` as well as
:class:`.AttributeEvents`.
-
+
:class:`.AttributeExtension` is used to listen for set,
remove, and append events on individual mapped attributes.
It is established on an individual mapped attribute using
@@ -502,16 +502,16 @@ class AttributeExtension(object):
from sqlalchemy.orm.interfaces import AttributeExtension
from sqlalchemy.orm import mapper, relationship, column_property
-
+
class MyAttrExt(AttributeExtension):
def append(self, state, value, initiator):
print "append event !"
return value
-
+
def set(self, state, value, oldvalue, initiator):
print "set event !"
return value
-
+
mapper(SomeClass, sometable, properties={
'foo':column_property(sometable.c.foo, extension=MyAttrExt()),
'bar':relationship(Bar, extension=MyAttrExt())
@@ -523,10 +523,10 @@ class AttributeExtension(object):
``value`` parameter. The returned value is used as the
effective value, and allows the extension to change what is
ultimately persisted.
-
+
AttributeExtension is assembled within the descriptors associated
with a mapped class.
-
+
"""
active_history = True
@@ -535,7 +535,7 @@ class AttributeExtension(object):
Note that ``active_history`` can also be set directly via
:func:`.column_property` and :func:`.relationship`.
-
+
"""
@classmethod
@@ -549,7 +549,7 @@ class AttributeExtension(object):
event.listen(self, 'set', listener.set,
active_history=listener.active_history,
raw=True, retval=True)
-
+
def append(self, state, value, initiator):
"""Receive a collection append event.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index 06da99e07..e6166aa9e 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -6,7 +6,7 @@
"""Descriptor proprerties are more "auxilliary" properties
that exist as configurational elements, but don't participate
-as actively in the load/persist ORM loop.
+as actively in the load/persist ORM loop.
"""
@@ -20,23 +20,23 @@ properties = util.importlater('sqlalchemy.orm', 'properties')
class DescriptorProperty(MapperProperty):
""":class:`MapperProperty` which proxies access to a
user-defined descriptor."""
-
+
doc = None
-
+
def instrument_class(self, mapper):
prop = self
-
+
class _ProxyImpl(object):
accepts_scalar_loader = False
expire_missing = True
def __init__(self, key):
self.key = key
-
+
if hasattr(prop, 'get_history'):
def get_history(self, state, dict_, **kw):
return prop.get_history(state, dict_, **kw)
-
+
if self.descriptor is None:
desc = getattr(mapper.class_, self.key, None)
if mapper._is_userland_descriptor(desc):
@@ -55,7 +55,7 @@ class DescriptorProperty(MapperProperty):
fset=fset,
fdel=fdel,
)
-
+
proxy_attr = attributes.\
create_proxied_attribute(self.descriptor)\
(
@@ -68,10 +68,10 @@ class DescriptorProperty(MapperProperty):
proxy_attr.property = self
proxy_attr.impl = _ProxyImpl(self.key)
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
-
+
class CompositeProperty(DescriptorProperty):
-
+
def __init__(self, class_, *columns, **kwargs):
self.columns = columns
self.composite_class = class_
@@ -84,32 +84,32 @@ class CompositeProperty(DescriptorProperty):
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
-
+
def do_init(self):
"""Initialization which occurs after the :class:`.CompositeProperty`
has been associated with its parent mapper.
-
+
"""
self._setup_arguments_on_columns()
-
+
def _create_descriptor(self):
"""Create the Python descriptor that will serve as
the access point on instances of the mapped class.
-
+
"""
def fget(instance):
dict_ = attributes.instance_dict(instance)
-
+
# key not present, assume the columns aren't
# loaded. The load events will establish
# the item.
if self.key not in dict_:
for key in self._attribute_keys:
getattr(instance, key)
-
+
return dict_.get(self.key, None)
-
+
def fset(instance, value):
dict_ = attributes.instance_dict(instance)
state = attributes.instance_state(instance)
@@ -126,7 +126,7 @@ class CompositeProperty(DescriptorProperty):
self._attribute_keys,
value.__composite_values__()):
setattr(instance, key, value)
-
+
def fdel(instance):
state = attributes.instance_state(instance)
dict_ = attributes.instance_dict(instance)
@@ -135,13 +135,13 @@ class CompositeProperty(DescriptorProperty):
attr.dispatch.remove(state, previous, attr.impl)
for key in self._attribute_keys:
setattr(instance, key, None)
-
+
self.descriptor = property(fget, fset, fdel)
-
+
def _setup_arguments_on_columns(self):
"""Propagate configuration arguments made on this composite
to the target columns, for those that apply.
-
+
"""
for col in self.columns:
prop = self.parent._columntoproperty[col]
@@ -153,35 +153,35 @@ class CompositeProperty(DescriptorProperty):
def _setup_event_handlers(self):
"""Establish events that populate/expire the composite attribute."""
-
+
def load_handler(state, *args):
dict_ = state.dict
-
+
if self.key in dict_:
return
-
+
# if column elements aren't loaded, skip.
# __get__() will initiate a load for those
# columns
for k in self._attribute_keys:
if k not in dict_:
return
-
+
dict_[self.key] = self.composite_class(
*[state.dict[key] for key in
self._attribute_keys]
)
-
+
def expire_handler(state, keys):
if keys is None or set(self._attribute_keys).intersection(keys):
state.dict.pop(self.key, None)
-
+
def insert_update_handler(mapper, connection, state):
state.dict[self.key] = self.composite_class(
*[state.dict.get(key, None) for key in
self._attribute_keys]
)
-
+
event.listen(self.parent, 'after_insert',
insert_update_handler, raw=True)
event.listen(self.parent, 'after_update',
@@ -189,35 +189,35 @@ class CompositeProperty(DescriptorProperty):
event.listen(self.parent, 'load', load_handler, raw=True)
event.listen(self.parent, 'refresh', load_handler, raw=True)
event.listen(self.parent, "expire", expire_handler, raw=True)
-
+
# TODO: need a deserialize hook here
-
+
@util.memoized_property
def _attribute_keys(self):
return [
self.parent._columntoproperty[col].key
for col in self.columns
]
-
+
def get_history(self, state, dict_, **kw):
"""Provided for userland code that uses attributes.get_history()."""
-
+
added = []
deleted = []
-
+
has_history = False
for col in self.columns:
key = self.parent._columntoproperty[col].key
hist = state.manager[key].impl.get_history(state, dict_)
if hist.has_changes():
has_history = True
-
+
added.extend(hist.non_deleted())
if hist.deleted:
deleted.extend(hist.deleted)
else:
deleted.append(None)
-
+
if has_history:
return attributes.History(
[self.composite_class(*added)],
@@ -236,7 +236,7 @@ class CompositeProperty(DescriptorProperty):
def __init__(self, prop, adapter=None):
self.prop = prop
self.adapter = adapter
-
+
def __clause_element__(self):
if self.adapter:
# TODO: test coverage for adapted composite comparison
@@ -244,9 +244,9 @@ class CompositeProperty(DescriptorProperty):
*[self.adapter(x) for x in self.prop.columns])
else:
return expression.ClauseList(*self.prop.columns)
-
+
__hash__ = None
-
+
def __eq__(self, other):
if other is None:
values = [None] * len(self.prop.columns)
@@ -254,7 +254,7 @@ class CompositeProperty(DescriptorProperty):
values = other.__composite_values__()
return sql.and_(
*[a==b for a, b in zip(self.prop.columns, values)])
-
+
def __ne__(self, other):
return sql.not_(self.__eq__(other))
@@ -280,14 +280,14 @@ class ConcreteInheritedProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
comparator_callable = None
-
+
for m in self.parent.iterate_to_root():
p = m._props[self.key]
if not isinstance(p, ConcreteInheritedProperty):
comparator_callable = p.comparator_factory
break
return comparator_callable
-
+
def __init__(self):
def warn():
raise AttributeError("Concrete %s does not implement "
@@ -305,8 +305,8 @@ class ConcreteInheritedProperty(DescriptorProperty):
return self.descriptor
warn()
self.descriptor = NoninheritedConcreteProp()
-
-
+
+
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
@@ -317,16 +317,16 @@ class SynonymProperty(DescriptorProperty):
self.descriptor = descriptor
self.comparator_factory = comparator_factory
self.doc = doc or (descriptor and descriptor.__doc__) or None
-
+
util.set_creation_order(self)
-
+
# TODO: when initialized, check _proxied_property,
# emit a warning if its not a column-based property
-
+
@util.memoized_property
def _proxied_property(self):
return getattr(self.parent.class_, self.name).property
-
+
def _comparator_factory(self, mapper):
prop = self._proxied_property
@@ -361,9 +361,9 @@ class SynonymProperty(DescriptorProperty):
init=init,
setparent=True)
p._mapped_by_synonym = self.key
-
+
self.parent = parent
-
+
class ComparableProperty(DescriptorProperty):
"""Instruments a Python property for use in query expressions."""
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 7d12900cc..8dbdd8ffe 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -41,7 +41,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
supports_population = False
-
+
def __init__(self, class_, key, typecallable,
dispatch,
target_mapper, order_by, query_class=None, **kw):
@@ -131,12 +131,12 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
def set_committed_value(self, state, dict_, value):
raise NotImplementedError("Dynamic attributes don't support "
"collection population.")
-
+
def get_history(self, state, dict_, passive=False):
c = self._get_collection_history(state, passive)
return attributes.History(c.added_items, c.unchanged_items,
c.deleted_items)
-
+
def get_all_pending(self, state, dict_):
c = self._get_collection_history(state, True)
return [
@@ -144,7 +144,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
for x in
c.added_items + c.unchanged_items + c.deleted_items
]
-
+
def _get_collection_history(self, state, passive=False):
if self.key in state.committed_state:
c = state.committed_state[self.key]
@@ -265,10 +265,10 @@ class AppenderMixin(object):
query = self.query_class(self.attr.target_mapper, session=sess)
else:
query = sess.query(self.attr.target_mapper)
-
+
query._criterion = self._criterion
query._order_by = self._order_by
-
+
return query
def append(self, item):
@@ -307,4 +307,4 @@ class CollectionHistory(object):
self.deleted_items = []
self.added_items = []
self.unchanged_items = []
-
+
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 5fe795db2..761ba315d 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -12,20 +12,20 @@ import inspect
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
-
+
The listeners here support being established against
any new style class, that is any object that is a subclass
of 'type'. Events will then be fired off for events
- against that class as well as all subclasses.
+ against that class as well as all subclasses.
'type' itself is also accepted as a target
in which case the events fire for all classes.
-
+
"""
-
+
@classmethod
def _accept_with(cls, target):
from sqlalchemy.orm.instrumentation import instrumentation_registry
-
+
if isinstance(target, type):
return instrumentation_registry
else:
@@ -41,36 +41,36 @@ class InstrumentationEvents(event.Events):
def class_instrument(self, cls):
"""Called after the given class is instrumented.
-
+
To get at the :class:`.ClassManager`, use
:func:`.manager_of_class`.
-
+
"""
def class_uninstrument(self, cls):
"""Called before the given class is uninstrumented.
-
+
To get at the :class:`.ClassManager`, use
:func:`.manager_of_class`.
-
+
"""
-
-
+
+
def attribute_instrument(self, cls, key, inst):
"""Called when an attribute is instrumented."""
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
-
+
Instance-level don't automatically propagate their associations
to subclasses.
-
+
"""
@classmethod
def _accept_with(cls, target):
from sqlalchemy.orm.instrumentation import ClassManager, manager_of_class
from sqlalchemy.orm import Mapper, mapper
-
+
if isinstance(target, ClassManager):
return target
elif isinstance(target, Mapper):
@@ -85,7 +85,7 @@ class InstanceEvents(event.Events):
if manager:
return manager
return None
-
+
@classmethod
def _listen(cls, target, identifier, fn, raw=False, propagate=False):
if not raw:
@@ -98,7 +98,7 @@ class InstanceEvents(event.Events):
if propagate:
for mgr in target.subclass_managers(True):
event.Events._listen(mgr, identifier, fn, True)
-
+
@classmethod
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of instance events not yet implemented")
@@ -107,26 +107,26 @@ class InstanceEvents(event.Events):
"""Called when the first instance of a particular mapping is called.
"""
-
+
def init(self, target, args, kwargs):
"""Receive an instance when it's constructor is called.
-
+
This method is only called during a userland construction of
an object. It is not called when an object is loaded from the
database.
"""
-
+
def init_failure(self, target, args, kwargs):
"""Receive an instance when it's constructor has been called,
and raised an exception.
-
+
This method is only called during a userland construction of
an object. It is not called when an object is loaded from the
database.
"""
-
+
def load(self, target, context):
"""Receive an object instance after it has been created via
``__new__``, and after initial attribute population has
@@ -153,7 +153,7 @@ class InstanceEvents(event.Events):
def refresh(self, target, context, attrs):
"""Receive an object instance after one or more attributes have
been refreshed from a query.
-
+
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
@@ -163,13 +163,13 @@ class InstanceEvents(event.Events):
:param attrs: iterable collection of attribute names which
were populated, or None if all column-mapped, non-deferred
attributes were populated.
-
+
"""
-
+
def expire(self, target, attrs):
"""Receive an object instance after its attributes or some subset
have been expired.
-
+
'keys' is a list of attribute names. If None, the entire
state was expired.
@@ -180,27 +180,27 @@ class InstanceEvents(event.Events):
:param attrs: iterable collection of attribute
names which were expired, or None if all attributes were
expired.
-
+
"""
-
+
def resurrect(self, target):
"""Receive an object instance as it is 'resurrected' from
garbage collection, which occurs when a "dirty" state falls
out of scope.
-
+
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
-
+
"""
-
+
class MapperEvents(event.Events):
"""Define events specific to mappings.
e.g.::
-
+
from sqlalchemy import event
def my_before_insert_listener(mapper, connection, target):
@@ -209,7 +209,7 @@ class MapperEvents(event.Events):
target.calculated_value = connection.scalar(
"select my_special_function(%d)"
% target.special_number)
-
+
# associate the listener function with SomeMappedClass,
# to execute during the "before_insert" hook
event.listen(SomeMappedClass, 'before_insert', my_before_insert_listener)
@@ -221,13 +221,13 @@ class MapperEvents(event.Events):
for global event reception::
from sqlalchemy.orm import mapper
-
+
def some_listener(mapper, connection, target):
log.debug("Instance %s being inserted" % target)
-
+
# attach to all mappers
event.listen(mapper, 'before_insert', some_listener)
-
+
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
object loading, and object persistence. In particular, the
@@ -240,10 +240,10 @@ class MapperEvents(event.Events):
:meth:`.SessionEvents.after_flush` methods as more
flexible and user-friendly hooks in which to apply
additional database state during a flush.
-
+
When using :class:`.MapperEvents`, several modifiers are
available to the :func:`.event.listen` function.
-
+
:param propagate=False: When True, the event listener should
be applied to all inheriting mappers as well as the
mapper which is the target of this listener.
@@ -256,7 +256,7 @@ class MapperEvents(event.Events):
control subsequent event propagation, or to otherwise alter
the operation in progress by the mapper. Possible return
values are:
-
+
* ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event
processing normally.
* ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
@@ -264,7 +264,7 @@ class MapperEvents(event.Events):
* other values - the return value specified by specific listeners,
such as :meth:`~.MapperEvents.translate_row` or
:meth:`~.MapperEvents.create_instance`.
-
+
"""
@classmethod
@@ -279,7 +279,7 @@ class MapperEvents(event.Events):
return class_mapper(target)
else:
return target
-
+
@classmethod
def _listen(cls, target, identifier, fn,
raw=False, retval=False, propagate=False):
@@ -292,7 +292,7 @@ class MapperEvents(event.Events):
target_index = inspect.getargspec(meth)[0].index('target') - 1
except ValueError:
target_index = None
-
+
wrapped_fn = fn
def wrap(*arg, **kw):
if not raw and target_index is not None:
@@ -304,42 +304,42 @@ class MapperEvents(event.Events):
else:
return wrapped_fn(*arg, **kw)
fn = wrap
-
+
if propagate:
for mapper in target.self_and_descendants:
event.Events._listen(mapper, identifier, fn, propagate=True)
else:
event.Events._listen(target, identifier, fn)
-
+
def instrument_class(self, mapper, class_):
"""Receive a class when the mapper is first constructed,
before instrumentation is applied to the mapped class.
-
+
This event is the earliest phase of mapper construction.
Most attributes of the mapper are not yet initialized.
-
+
This listener can generally only be applied to the :class:`.Mapper`
class overall.
-
+
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param class\_: the mapped class.
-
+
"""
-
+
def mapper_configured(self, mapper, class_):
"""Called when the mapper for the class is fully configured.
This event is the latest phase of mapper construction.
The mapper should be in its final state.
-
+
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param class\_: the mapped class.
-
+
"""
# TODO: need coverage for this event
-
+
def translate_row(self, mapper, context, row):
"""Perform pre-processing on the given result row and return a
new row instance.
@@ -352,7 +352,7 @@ class MapperEvents(event.Events):
object which contains mapped columns as keys. The
returned object should also be a dictionary-like object
which recognizes mapped columns as keys.
-
+
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param context: the :class:`.QueryContext`, which includes
@@ -364,8 +364,8 @@ class MapperEvents(event.Events):
:return: When configured with ``retval=True``, the function
should return a dictionary-like row object, or ``EXT_CONTINUE``,
indicating the original row should be used.
-
-
+
+
"""
def create_instance(self, mapper, context, row, class_):
@@ -396,10 +396,10 @@ class MapperEvents(event.Events):
result, **flags):
"""Receive an object instance before that instance is appended
to a result list.
-
+
This is a rarely used hook which can be used to alter
the construction of a result list returned by :class:`.Query`.
-
+
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param context: the :class:`.QueryContext`, which includes
@@ -435,7 +435,7 @@ class MapperEvents(event.Events):
unloaded attributes to be populated. The method may be called
many times for a single instance, as multiple result rows are
used to populate eagerly loaded collections.
-
+
Most usages of this hook are obsolete. For a
generic "object has been newly created from a row" hook, use
:meth:`.InstanceEvents.load`.
@@ -462,12 +462,12 @@ class MapperEvents(event.Events):
def before_insert(self, mapper, connection, target):
"""Receive an object instance before an INSERT statement
is emitted corresponding to that instance.
-
+
This event is used to modify local, non-object related
attributes on the instance before an INSERT occurs, as well
as to emit additional SQL statements on the given
- connection.
-
+ connection.
+
The event is often called for a batch of objects of the
same class before their INSERT statements are emitted at
once in a later step. In the extremely rare case that
@@ -476,7 +476,7 @@ class MapperEvents(event.Events):
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
-
+
Handlers should **not** modify any attributes which are
mapped by :func:`.relationship`, nor should they attempt
to make any modifications to the :class:`.Session` in
@@ -502,11 +502,11 @@ class MapperEvents(event.Events):
def after_insert(self, mapper, connection, target):
"""Receive an object instance after an INSERT statement
is emitted corresponding to that instance.
-
+
This event is used to modify in-Python-only
state on the instance after an INSERT occurs, as well
as to emit additional SQL statements on the given
- connection.
+ connection.
The event is often called for a batch of objects of the
same class after their INSERT statements have been
@@ -528,7 +528,7 @@ class MapperEvents(event.Events):
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
-
+
"""
def before_update(self, mapper, connection, target):
@@ -538,7 +538,7 @@ class MapperEvents(event.Events):
This event is used to modify local, non-object related
attributes on the instance before an UPDATE occurs, as well
as to emit additional SQL statements on the given
- connection.
+ connection.
This method is called for all instances that are
marked as "dirty", *even those which have no net changes
@@ -553,7 +553,7 @@ class MapperEvents(event.Events):
issued, although you can affect the outcome here by
modifying attributes so that a net change in value does
exist.
-
+
To detect if the column-based attributes on the object have net
changes, and will therefore generate an UPDATE statement, use
``object_session(instance).is_modified(instance,
@@ -567,7 +567,7 @@ class MapperEvents(event.Events):
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
-
+
Handlers should **not** modify any attributes which are
mapped by :func:`.relationship`, nor should they attempt
to make any modifications to the :class:`.Session` in
@@ -596,7 +596,7 @@ class MapperEvents(event.Events):
This event is used to modify in-Python-only
state on the instance after an UPDATE occurs, as well
as to emit additional SQL statements on the given
- connection.
+ connection.
This method is called for all instances that are
marked as "dirty", *even those which have no net changes
@@ -610,7 +610,7 @@ class MapperEvents(event.Events):
being sent to :meth:`~.MapperEvents.after_update` is
*not* a guarantee that an UPDATE statement has been
issued.
-
+
To detect if the column-based attributes on the object have net
changes, and therefore resulted in an UPDATE statement, use
``object_session(instance).is_modified(instance,
@@ -624,7 +624,7 @@ class MapperEvents(event.Events):
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
-
+
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
@@ -636,21 +636,21 @@ class MapperEvents(event.Events):
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
-
+
"""
def before_delete(self, mapper, connection, target):
"""Receive an object instance before a DELETE statement
is emitted corresponding to that instance.
-
+
This event is used to emit additional SQL statements on
the given connection as well as to perform application
specific bookkeeping related to a deletion event.
-
+
The event is often called for a batch of objects of the
same class before their DELETE statements are emitted at
once in a later step.
-
+
Handlers should **not** modify any attributes which are
mapped by :func:`.relationship`, nor should they attempt
to make any modifications to the :class:`.Session` in
@@ -670,17 +670,17 @@ class MapperEvents(event.Events):
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
-
+
"""
def after_delete(self, mapper, connection, target):
"""Receive an object instance after a DELETE statement
has been emitted corresponding to that instance.
-
+
This event is used to emit additional SQL statements on
the given connection as well as to perform application
specific bookkeeping related to a deletion event.
-
+
The event is often called for a batch of objects of the
same class after their DELETE statements have been emitted at
once in a previous step.
@@ -696,36 +696,36 @@ class MapperEvents(event.Events):
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
-
+
"""
@classmethod
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of mapper events not yet implemented")
-
+
class SessionEvents(event.Events):
"""Define events specific to :class:`.Session` lifecycle.
-
+
e.g.::
-
+
from sqlalchemy import event
from sqlalchemy.orm import sessionmaker
-
+
class my_before_commit(session):
print "before commit!"
-
+
Session = sessionmaker()
-
+
event.listen(Session, "before_commit", my_before_commit)
-
+
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
of :func:`.sessionmaker` and :func:`.scoped_session`.
-
+
Additionally, it accepts the :class:`.Session` class which
will apply listeners to all :class:`.Session` instances
globally.
-
+
"""
@classmethod
@@ -748,39 +748,39 @@ class SessionEvents(event.Events):
return target
else:
return None
-
+
@classmethod
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of session events not yet implemented")
def before_commit(self, session):
"""Execute before commit is called.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def after_commit(self, session):
"""Execute after a commit has occured.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def after_rollback(self, session):
"""Execute after a rollback has occured.
-
+
Note that this may not be per-flush if a longer running
transaction is ongoing."""
def before_flush( self, session, flush_context, instances):
"""Execute before flush process has started.
-
+
`instances` is an optional list of objects which were passed to
the ``flush()`` method. """
def after_flush(self, session, flush_context):
"""Execute after flush has completed, but before commit has been
called.
-
+
Note that the session's state is still in pre-flush, i.e. 'new',
'dirty', and 'deleted' lists still show pre-flush state as well
as the history settings on instance attributes."""
@@ -788,7 +788,7 @@ class SessionEvents(event.Events):
def after_flush_postexec(self, session, flush_context):
"""Execute after flush has completed, and after the post-exec
state occurs.
-
+
This will be when the 'new', 'dirty', and 'deleted' lists are in
their final state. An actual commit() may or may not have
occured, depending on whether or not the flush started its own
@@ -796,20 +796,20 @@ class SessionEvents(event.Events):
def after_begin( self, session, transaction, connection):
"""Execute after a transaction is begun on a connection
-
+
`transaction` is the SessionTransaction. This method is called
after an engine level transaction is begun on a connection. """
def after_attach(self, session, instance):
"""Execute after an instance is attached to a session.
-
+
This is called after an add, delete or merge. """
def after_bulk_update( self, session, query, query_context, result):
"""Execute after a bulk update operation to the session.
-
+
This is called after a session.query(...).update()
-
+
`query` is the query object that this update operation was
called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
@@ -817,9 +817,9 @@ class SessionEvents(event.Events):
def after_bulk_delete( self, session, query, query_context, result):
"""Execute after a bulk delete operation to the session.
-
+
This is called after a session.query(...).delete()
-
+
`query` is the query object that this delete operation was
called on. `query_context` was the query context object.
`result` is the result object returned from the bulk operation.
@@ -828,37 +828,37 @@ class SessionEvents(event.Events):
class AttributeEvents(event.Events):
"""Define events for object attributes.
-
+
These are typically defined on the class-bound descriptor for the
target class.
e.g.::
-
+
from sqlalchemy import event
-
+
def my_append_listener(target, value, initiator):
print "received append event for target: %s" % target
-
+
event.listen(MyClass.collection, 'append', my_append_listener)
-
+
Listeners have the option to return a possibly modified version
of the value, when the ``retval=True`` flag is passed
to :func:`~.event.listen`::
-
+
def validate_phone(target, value, oldvalue, initiator):
"Strip non-numeric characters from a phone number"
-
+
return re.sub(r'(?![0-9])', '', value)
-
+
# setup listener on UserContact.phone attribute, instructing
# it to use the return value
listen(UserContact.phone, 'set', validate_phone, retval=True)
-
+
A validation function like the above can also raise an exception
such as :class:`ValueError` to halt the operation.
-
+
Several modifiers are available to the :func:`~.event.listen` function.
-
+
:param active_history=False: When True, indicates that the
"set" event would like to receive the "old" value being
replaced unconditionally, even if this requires firing off
@@ -879,8 +879,8 @@ class AttributeEvents(event.Events):
listening must return the "value" argument from the
function. This gives the listening function the opportunity
to change the value that is ultimately used for a "set"
- or "append" event.
-
+ or "append" event.
+
"""
@classmethod
@@ -891,17 +891,17 @@ class AttributeEvents(event.Events):
return getattr(target.parent.class_, target.key)
else:
return target
-
+
@classmethod
def _listen(cls, target, identifier, fn, active_history=False,
raw=False, retval=False,
propagate=False):
if active_history:
target.dispatch._active_history = True
-
+
# TODO: for removal, need to package the identity
# of the wrapper with the original function.
-
+
if not raw or not retval:
orig_fn = fn
def wrap(target, value, *arg):
@@ -913,21 +913,21 @@ class AttributeEvents(event.Events):
else:
return orig_fn(target, value, *arg)
fn = wrap
-
+
event.Events._listen(target, identifier, fn, propagate)
-
+
if propagate:
from sqlalchemy.orm.instrumentation import manager_of_class
-
+
manager = manager_of_class(target.class_)
-
+
for mgr in manager.subclass_managers(True):
event.Events._listen(mgr[target.key], identifier, fn, True)
-
+
@classmethod
def _remove(cls, identifier, target, fn):
raise NotImplementedError("Removal of attribute events not yet implemented")
-
+
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -942,7 +942,7 @@ class AttributeEvents(event.Events):
which initiated this event.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
-
+
"""
def remove(self, target, value, initiator):
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index a180f3725..b86e5c7c3 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -14,22 +14,22 @@ NO_STATE = (AttributeError, KeyError)
class StaleDataError(sa.exc.SQLAlchemyError):
"""An operation encountered database state that is unaccounted for.
-
+
Two conditions cause this to happen:
-
+
* A flush may have attempted to update or delete rows
and an unexpected number of rows were matched during
the UPDATE or DELETE statement. Note that when
version_id_col is used, rows in UPDATE or DELETE statements
are also matched against the current known version
identifier.
-
+
* A mapped object with version_id_col was refreshed,
and the version number coming back from the database does
not match that of the object itself.
-
+
"""
-
+
ConcurrentModificationError = StaleDataError
@@ -43,7 +43,7 @@ class UnmappedError(sa.exc.InvalidRequestError):
class DetachedInstanceError(sa.exc.SQLAlchemyError):
"""An attempt to access unloaded attributes on a
mapped instance that is detached."""
-
+
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 83687d682..b3a7f8bc3 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -15,30 +15,30 @@ class IdentityMap(dict):
self._mutable_attrs = set()
self._modified = set()
self._wr = weakref.ref(self)
-
+
def replace(self, state):
raise NotImplementedError()
-
+
def add(self, state):
raise NotImplementedError()
-
+
def remove(self, state):
raise NotImplementedError()
-
+
def update(self, dict):
raise NotImplementedError("IdentityMap uses add() to insert data")
-
+
def clear(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
-
+
def _manage_incoming_state(self, state):
state._instance_dict = self._wr
-
+
if state.modified:
- self._modified.add(state)
+ self._modified.add(state)
if state.manager.mutable_attributes:
self._mutable_attrs.add(state)
-
+
def _manage_removed_state(self, state):
del state._instance_dict
self._mutable_attrs.discard(state)
@@ -50,7 +50,7 @@ class IdentityMap(dict):
def check_modified(self):
"""return True if any InstanceStates present have been marked as 'modified'."""
-
+
if self._modified:
return True
else:
@@ -58,10 +58,10 @@ class IdentityMap(dict):
if state.modified:
return True
return False
-
+
def has_key(self, key):
return key in self
-
+
def popitem(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
@@ -79,7 +79,7 @@ class IdentityMap(dict):
def __delitem__(self, key):
raise NotImplementedError("IdentityMap uses remove() to remove data")
-
+
class WeakInstanceDict(IdentityMap):
def __init__(self):
IdentityMap.__init__(self)
@@ -107,10 +107,10 @@ class WeakInstanceDict(IdentityMap):
return False
else:
return o is not None
-
+
def contains_state(self, state):
return dict.get(self, state.key) is state
-
+
def replace(self, state):
if dict.__contains__(self, state.key):
existing = dict.__getitem__(self, state.key)
@@ -118,7 +118,7 @@ class WeakInstanceDict(IdentityMap):
self._manage_removed_state(existing)
else:
return
-
+
dict.__setitem__(self, state.key, state)
self._manage_incoming_state(state)
@@ -146,7 +146,7 @@ class WeakInstanceDict(IdentityMap):
def remove_key(self, key):
state = dict.__getitem__(self, key)
self.remove(state)
-
+
def remove(self, state):
self._remove_mutex.acquire()
try:
@@ -156,14 +156,14 @@ class WeakInstanceDict(IdentityMap):
"identity map" % state)
finally:
self._remove_mutex.release()
-
+
self._manage_removed_state(state)
-
+
def discard(self, state):
if self.contains_state(state):
dict.__delitem__(self, state.key)
self._manage_removed_state(state)
-
+
def get(self, key, default=None):
if not dict.__contains__(self, key):
return default
@@ -178,7 +178,7 @@ class WeakInstanceDict(IdentityMap):
def items(self):
# Py2K
return list(self.iteritems())
-
+
def iteritems(self):
# end Py2K
self._remove_mutex.acquire()
@@ -192,7 +192,7 @@ class WeakInstanceDict(IdentityMap):
return iter(result)
finally:
self._remove_mutex.release()
-
+
def values(self):
# Py2K
return list(self.itervalues())
@@ -210,29 +210,29 @@ class WeakInstanceDict(IdentityMap):
return iter(result)
finally:
self._remove_mutex.release()
-
+
def all_states(self):
self._remove_mutex.acquire()
try:
# Py3K
# return list(dict.values(self))
-
+
# Py2K
return dict.values(self)
# end Py2K
finally:
self._remove_mutex.release()
-
+
def prune(self):
return 0
-
+
class StrongInstanceDict(IdentityMap):
def all_states(self):
return [attributes.instance_state(o) for o in self.itervalues()]
-
+
def contains_state(self, state):
return state.key in self and attributes.instance_state(self[state.key]) is state
-
+
def replace(self, state):
if dict.__contains__(self, state.key):
existing = dict.__getitem__(self, state.key)
@@ -255,26 +255,26 @@ class StrongInstanceDict(IdentityMap):
else:
dict.__setitem__(self, state.key, state.obj())
self._manage_incoming_state(state)
-
+
def remove(self, state):
if attributes.instance_state(dict.pop(self, state.key)) \
is not state:
raise AssertionError('State %s is not present in this '
'identity map' % state)
self._manage_removed_state(state)
-
+
def discard(self, state):
if self.contains_state(state):
dict.__delitem__(self, state.key)
self._manage_removed_state(state)
-
+
def remove_key(self, key):
state = attributes.instance_state(dict.__getitem__(self, key))
self.remove(state)
def prune(self):
"""prune unreferenced, non-dirty states."""
-
+
ref_count = len(self)
dirty = [s.obj() for s in self.all_states() if s.modified]
@@ -286,4 +286,4 @@ class StrongInstanceDict(IdentityMap):
dict.update(self, keepers)
self.modified = bool(dirty)
return ref_count - len(self)
-
+
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 3ba9190c0..aa051490c 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -78,9 +78,9 @@ class ClassManager(dict):
STATE_ATTR = '_sa_instance_state'
deferred_scalar_loader = None
-
+
original_init = object.__init__
-
+
def __init__(self, class_):
self.class_ = class_
self.factory = None # where we came from, for inheritance bookkeeping
@@ -101,30 +101,30 @@ class ClassManager(dict):
self.manage()
self._instrument_init()
-
+
dispatch = event.dispatcher(events.InstanceEvents)
-
+
@property
def is_mapped(self):
return 'mapper' in self.__dict__
-
+
@util.memoized_property
def mapper(self):
raise exc.UnmappedClassError(self.class_)
-
+
def _attr_has_impl(self, key):
"""Return True if the given attribute is fully initialized.
-
+
i.e. has an impl.
"""
-
+
return key in self and self[key].impl is not None
-
+
def _configure_create_arguments(self,
_source=None,
deferred_scalar_loader=None):
"""Accept extra **kw arguments passed to create_manager_for_cls.
-
+
The current contract of ClassManager and other managers is that they
take a single "cls" argument in their constructor (as per
test/orm/instrumentation.py InstrumentationCollisionTest). This
@@ -133,30 +133,30 @@ class ClassManager(dict):
ClassManager-like instances. So create_manager_for_cls sends
in ClassManager-specific arguments via this method once the
non-proxied ClassManager is available.
-
+
"""
if _source:
deferred_scalar_loader = _source.deferred_scalar_loader
if deferred_scalar_loader:
self.deferred_scalar_loader = deferred_scalar_loader
-
+
def _subclass_manager(self, cls):
"""Create a new ClassManager for a subclass of this ClassManager's
class.
-
+
This is called automatically when attributes are instrumented so that
the attributes can be propagated to subclasses against their own
class-local manager, without the need for mappers etc. to have already
pre-configured managers for the full class hierarchy. Mappers
can post-configure the auto-generated ClassManager when needed.
-
+
"""
manager = manager_of_class(cls)
if manager is None:
manager = _create_manager_for_cls(cls, _source=self)
return manager
-
+
def _instrument_init(self):
# TODO: self.class_.__init__ is often the already-instrumented
# __init__ from an instrumented superclass. We still need to make
@@ -166,12 +166,12 @@ class ClassManager(dict):
self.original_init = self.class_.__init__
self.new_init = _generate_init(self.class_, self)
self.install_member('__init__', self.new_init)
-
+
def _uninstrument_init(self):
if self.new_init:
self.uninstall_member('__init__')
self.new_init = None
-
+
@util.memoized_property
def _state_constructor(self):
self.dispatch.first_init(self, self.class_)
@@ -179,15 +179,15 @@ class ClassManager(dict):
return state.MutableAttrInstanceState
else:
return state.InstanceState
-
+
def manage(self):
"""Mark this instance as the manager for its class."""
-
+
setattr(self.class_, self.MANAGER_ATTR, self)
def dispose(self):
"""Dissasociate this manager from its class."""
-
+
delattr(self.class_, self.MANAGER_ATTR)
def manager_getter(self):
@@ -201,7 +201,7 @@ class ClassManager(dict):
self.local_attrs[key] = inst
self.install_descriptor(key, inst)
self[key] = inst
-
+
for cls in self.class_.__subclasses__():
manager = self._subclass_manager(cls)
manager.instrument_attribute(key, inst, True)
@@ -214,11 +214,11 @@ class ClassManager(dict):
if recursive:
for m in mgr.subclass_managers(True):
yield m
-
+
def post_configure_attribute(self, key):
instrumentation_registry.dispatch.\
attribute_instrument(self.class_, key, self[key])
-
+
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
return
@@ -238,12 +238,12 @@ class ClassManager(dict):
def unregister(self):
"""remove all instrumentation established by this ClassManager."""
-
+
self._uninstrument_init()
self.mapper = self.dispatch = None
self.info.clear()
-
+
for key in list(self):
if key in self.local_attrs:
self.uninstrument_attribute(key)
@@ -304,15 +304,15 @@ class ClassManager(dict):
def setup_instance(self, instance, state=None):
setattr(instance, self.STATE_ATTR,
state or self._state_constructor(instance, self))
-
+
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
-
+
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
A private convenience method used by the __init__ decorator.
-
+
"""
if hasattr(instance, self.STATE_ATTR):
return False
@@ -329,7 +329,7 @@ class ClassManager(dict):
state = self._state_constructor(instance, self)
setattr(instance, self.STATE_ATTR, state)
return state
-
+
def state_getter(self):
"""Return a (instance) -> InstanceState callable.
@@ -339,13 +339,13 @@ class ClassManager(dict):
"""
return attrgetter(self.STATE_ATTR)
-
+
def dict_getter(self):
return attrgetter('__dict__')
-
+
def has_state(self, instance):
return hasattr(instance, self.STATE_ATTR)
-
+
def has_parent(self, state, key, optimistic=False):
"""TODO"""
return self.get_impl(key).hasparent(state, optimistic=optimistic)
@@ -365,7 +365,7 @@ class _ClassInstrumentationAdapter(ClassManager):
self._adapted = override
self._get_state = self._adapted.state_getter(class_)
self._get_dict = self._adapted.dict_getter(class_)
-
+
ClassManager.__init__(self, class_, **kw)
def manage(self):
@@ -427,10 +427,10 @@ class _ClassInstrumentationAdapter(ClassManager):
def setup_instance(self, instance, state=None):
self._adapted.initialize_instance_dict(self.class_, instance)
-
+
if state is None:
state = self._state_constructor(instance, self)
-
+
# the given instance is assumed to have no state
self._adapted.install_state(self.class_, instance, state)
return state
@@ -445,7 +445,7 @@ class _ClassInstrumentationAdapter(ClassManager):
return False
else:
return True
-
+
def state_getter(self):
return self._get_state
@@ -454,7 +454,7 @@ class _ClassInstrumentationAdapter(ClassManager):
def register_class(class_, **kw):
"""Register class instrumentation.
-
+
Returns the existing or newly created class manager.
"""
@@ -462,31 +462,31 @@ def register_class(class_, **kw):
if manager is None:
manager = _create_manager_for_cls(class_, **kw)
return manager
-
+
def unregister_class(class_):
"""Unregister class instrumentation."""
-
+
instrumentation_registry.unregister(class_)
def is_instrumented(instance, key):
"""Return True if the given attribute on the given instance is
instrumented by the attributes package.
-
+
This function may be used regardless of instrumentation
applied directly to the class, i.e. no descriptors are required.
-
+
"""
return manager_of_class(instance.__class__).\
is_instrumented(key, search=True)
class InstrumentationRegistry(object):
"""Private instrumentation registration singleton.
-
+
All classes are routed through this registry
when first instrumented, however the InstrumentationRegistry
is not actually needed unless custom ClassManagers are in use.
-
+
"""
_manager_finders = weakref.WeakKeyDictionary()
@@ -518,23 +518,23 @@ class InstrumentationRegistry(object):
manager = factory(class_)
if not isinstance(manager, ClassManager):
manager = _ClassInstrumentationAdapter(class_, manager)
-
+
if factory != ClassManager and not self._extended:
# somebody invoked a custom ClassManager.
# reinstall global "getter" functions with the more
# expensive ones.
self._extended = True
_install_lookup_strategy(self)
-
+
manager._configure_create_arguments(**kw)
manager.factory = factory
self._manager_finders[class_] = manager.manager_getter()
self._state_finders[class_] = manager.state_getter()
self._dict_finders[class_] = manager.dict_getter()
-
+
self.dispatch.class_instrument(class_)
-
+
return manager
def _collect_management_factories_for(self, cls):
@@ -597,7 +597,7 @@ class InstrumentationRegistry(object):
except KeyError:
raise AttributeError("%r is not instrumented" %
instance.__class__)
-
+
def unregister(self, class_):
if class_ in self._manager_finders:
manager = self.manager_of_class(class_)
@@ -609,7 +609,7 @@ class InstrumentationRegistry(object):
del self._dict_finders[class_]
if ClassManager.MANAGER_ATTR in class_.__dict__:
delattr(class_, ClassManager.MANAGER_ATTR)
-
+
instrumentation_registry = InstrumentationRegistry()
@@ -618,10 +618,10 @@ def _install_lookup_strategy(implementation):
with either faster or more comprehensive implementations,
based on whether or not extended class instrumentation
has been detected.
-
+
This function is called only by InstrumentationRegistry()
and unit tests specific to this behavior.
-
+
"""
global instance_state, instance_dict, manager_of_class
if implementation is util.symbol('native'):
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 367344f5a..8cece65cc 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -65,9 +65,9 @@ class MapperProperty(object):
cascade = ()
"""The set of 'cascade' attribute names.
-
+
This collection is checked before the 'cascade_iterator' method is called.
-
+
"""
def setup(self, context, entity, path, reduced_path, adapter, **kwargs):
@@ -83,7 +83,7 @@ class MapperProperty(object):
def create_row_processor(self, selectcontext, path, reduced_path,
mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
-
+
"""
return None, None, None
@@ -91,9 +91,9 @@ class MapperProperty(object):
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
-
+
Return an iterator3-tuples (instance, mapper, state).
-
+
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
@@ -110,7 +110,7 @@ class MapperProperty(object):
_compile_started = False
_compile_finished = False
-
+
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
@@ -131,10 +131,10 @@ class MapperProperty(object):
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
-
+
This is a template method called by the ``MapperProperty``
object's init() method.
-
+
"""
pass
@@ -188,7 +188,7 @@ class PropComparator(expression.ColumnOperators):
new operator behaivor. The custom :class:`.PropComparator` is passed to
the mapper property via the ``comparator_factory`` argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
-
+
from sqlalchemy.orm.properties import \\
ColumnProperty,\\
CompositeProperty,\\
@@ -196,13 +196,13 @@ class PropComparator(expression.ColumnOperators):
class MyColumnComparator(ColumnProperty.Comparator):
pass
-
+
class MyCompositeComparator(CompositeProperty.Comparator):
pass
-
+
class MyRelationshipComparator(RelationshipProperty.Comparator):
pass
-
+
"""
def __init__(self, prop, mapper, adapter=None):
@@ -216,7 +216,7 @@ class PropComparator(expression.ColumnOperators):
def adapted(self, adapter):
"""Return a copy of this PropComparator which will use the given
adaption function on the local side of generated expressions.
-
+
"""
return self.__class__(self.prop, self.mapper, adapter)
@@ -291,9 +291,9 @@ class StrategizedProperty(MapperProperty):
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
-
+
"""
-
+
def _get_context_strategy(self, context, reduced_path):
key = ('loaderstrategy', reduced_path)
if key in context.attributes:
@@ -334,7 +334,7 @@ class StrategizedProperty(MapperProperty):
if self.is_primary() and \
not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
-
+
def build_path(entity, key, prev=None):
if prev:
return prev + (entity, key)
@@ -344,7 +344,7 @@ def build_path(entity, key, prev=None):
def serialize_path(path):
if path is None:
return None
-
+
return zip(
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
[path[i] for i in range(1, len(path), 2)] + [None]
@@ -366,14 +366,14 @@ class MapperOption(object):
"""if True, indicate this option should be carried along
Query object generated by scalar or object lazy loaders.
"""
-
+
def process_query(self, query):
pass
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
-
+
Used when secondary loaders resend existing options to a new
Query."""
@@ -440,7 +440,7 @@ class PropertyOption(MapperOption):
[str(m.path_entity) for m in query._entities]))
else:
return None
-
+
def _get_paths(self, query, raiseerr):
path = None
entity = None
@@ -451,7 +451,7 @@ class PropertyOption(MapperOption):
# existing path
current_path = list(query._current_path)
-
+
tokens = deque(self.key)
while tokens:
token = tokens.popleft()
@@ -459,7 +459,7 @@ class PropertyOption(MapperOption):
sub_tokens = token.split(".", 1)
token = sub_tokens[0]
tokens.extendleft(sub_tokens[1:])
-
+
if not entity:
if current_path:
if current_path[1] == token:
@@ -540,11 +540,11 @@ class StrategizedOption(PropertyOption):
def _reduce_path(path):
"""Convert a (mapper, path) path to use base mappers.
-
+
This is used to allow more open ended selection of loader strategies, i.e.
Mapper -> prop1 -> Subclass -> prop2, where Subclass is a sub-mapper
of the mapper referened by Mapper.prop1.
-
+
"""
return tuple([i % 2 != 0 and
element or
@@ -595,7 +595,7 @@ class LoaderStrategy(object):
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
-
+
StrategizedProperty delegates its create_row_processor method
directly to this method. """
@@ -617,7 +617,7 @@ class LoaderStrategy(object):
class InstrumentationManager(object):
"""User-defined class instrumentation extension.
-
+
:class:`.InstrumentationManager` can be subclassed in order
to change
how class instrumentation proceeds. This class exists for
@@ -626,13 +626,13 @@ class InstrumentationManager(object):
instrumentation methodology of the ORM, and is not intended
for regular usage. For interception of class instrumentation
events, see :class:`.InstrumentationEvents`.
-
+
For an example of :class:`.InstrumentationManager`, see the
example :ref:`examples_instrumentation`.
-
+
The API for this class should be considered as semi-stable,
and may change slightly with new releases.
-
+
"""
# r4361 added a mandatory (cls) constructor to this interface.
@@ -694,4 +694,3 @@ class InstrumentationManager(object):
def dict_getter(self, class_):
return lambda inst: self.get_instance_dict(class_, inst)
- \ No newline at end of file
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 5dc2fd83d..8fe68fb8c 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -26,7 +26,7 @@ from sqlalchemy.orm import instrumentation, attributes, sync, \
exc as orm_exc, unitofwork, events
from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
PropComparator
-
+
from sqlalchemy.orm.util import _INSTRUMENTOR, _class_to_mapper, \
_state_mapper, class_mapper, instance_str, state_str
@@ -111,7 +111,7 @@ class Mapper(object):
self.order_by = util.to_list(order_by)
else:
self.order_by = order_by
-
+
self.always_refresh = always_refresh
self.version_id_col = version_id_col
self.version_id_generator = version_id_generator or \
@@ -138,16 +138,16 @@ class Mapper(object):
self._compiled_cache_size = _compiled_cache_size
self._reconstructor = None
self._deprecated_extensions = util.to_list(extension or [])
-
+
if allow_null_pks:
util.warn_deprecated(
"the allow_null_pks option to Mapper() is "
"deprecated. It is now allow_partial_pks=False|True, "
"defaults to True.")
allow_partial_pks = allow_null_pks
-
+
self.allow_partial_pks = allow_partial_pks
-
+
if with_polymorphic == '*':
self.with_polymorphic = ('*', None)
elif isinstance(with_polymorphic, (tuple, list)):
@@ -197,7 +197,7 @@ class Mapper(object):
self.exclude_properties = None
self.configured = False
-
+
# prevent this mapper from being constructed
# while a configure_mappers() is occuring (and defer a configure_mappers()
# until construction succeeds)
@@ -218,7 +218,7 @@ class Mapper(object):
_COMPILE_MUTEX.release()
dispatch = event.dispatcher(events.MapperEvents)
-
+
def _configure_inheritance(self):
"""Configure settings related to inherting and/or inherited mappers
being present."""
@@ -322,12 +322,12 @@ class Mapper(object):
if self.polymorphic_identity is not None:
self.polymorphic_map[self.polymorphic_identity] = self
self._identity_class = self.class_
-
+
if self.mapped_table is None:
raise sa_exc.ArgumentError(
"Mapper '%s' does not have a mapped_table specified."
% self)
-
+
def _configure_legacy_instrument_class(self):
if self.inherits:
@@ -336,7 +336,7 @@ class Mapper(object):
for m in self.inherits.iterate_to_root()]))
else:
super_extensions = set()
-
+
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_instrument_class(self, ext)
@@ -351,7 +351,7 @@ class Mapper(object):
for ext in self._deprecated_extensions:
if ext not in super_extensions:
ext._adapt_listener(self, ext)
-
+
if self.inherits:
self.class_manager.dispatch._update(
self.inherits.class_manager.dispatch)
@@ -368,7 +368,7 @@ class Mapper(object):
"""
manager = attributes.manager_of_class(self.class_)
-
+
if self.non_primary:
if not manager or not manager.is_mapped:
raise sa_exc.InvalidRequestError(
@@ -392,7 +392,7 @@ class Mapper(object):
# a ClassManager may already exist as
# ClassManager.instrument_attribute() creates
# new managers for each subclass if they don't yet exist.
-
+
_mapper_registry[self] = True
self.dispatch.instrument_class(self, self.class_)
@@ -414,7 +414,7 @@ class Mapper(object):
event.listen(manager, 'first_init', _event_on_first_init, raw=True)
event.listen(manager, 'init', _event_on_init, raw=True)
event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
-
+
for key, method in util.iterate_attributes(self.class_):
if isinstance(method, types.FunctionType):
if hasattr(method, '__sa_reconstructor__'):
@@ -425,31 +425,31 @@ class Mapper(object):
self._validators[name] = method
manager.info[_INSTRUMENTOR] = self
-
+
@util.deprecated("0.7", message=":meth:`.Mapper.compile` "
"is replaced by :func:`.configure_mappers`")
def compile(self):
"""Initialize the inter-mapper relationships of all mappers that
have been constructed thus far.
-
+
"""
configure_mappers()
return self
-
-
+
+
@property
@util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
"is replaced by :attr:`.Mapper.configured`")
def compiled(self):
return self.configured
-
+
def dispose(self):
# Disable any attribute-based compilation.
self.configured = True
-
+
if hasattr(self, '_configure_failed'):
del self._configure_failed
-
+
if not self.non_primary and \
self.class_manager.is_mapped and \
self.class_manager.mapper is self:
@@ -465,7 +465,7 @@ class Mapper(object):
all_cols = util.column_set(chain(*[
col.proxy_set for col in
self._columntoproperty]))
-
+
pk_cols = util.column_set(c for c in all_cols if c.primary_key)
# identify primary key columns which are also mapped by this mapper.
@@ -489,7 +489,7 @@ class Mapper(object):
for col in self._columntoproperty
if not hasattr(col, 'table') or
col.table not in self._cols_by_table)
-
+
# if explicit PK argument sent, add those columns to the
# primary key mappings
if self.primary_key_argument:
@@ -497,7 +497,7 @@ class Mapper(object):
if k.table not in self._pks_by_table:
self._pks_by_table[k.table] = util.OrderedSet()
self._pks_by_table[k.table].add(k)
-
+
# otherwise, see that we got a full PK for the mapped table
elif self.mapped_table not in self._pks_by_table or \
len(self._pks_by_table[self.mapped_table]) == 0:
@@ -535,7 +535,7 @@ class Mapper(object):
self._log("Identified primary key columns: %s", primary_key)
def _configure_properties(self):
-
+
# Column and other ClauseElement objects which are mapped
self.columns = self.c = util.OrderedProperties()
@@ -589,18 +589,18 @@ class Mapper(object):
"""Configure an attribute on the mapper representing the
'polymorphic_on' column, if applicable, and not
already generated by _configure_properties (which is typical).
-
+
Also create a setter function which will assign this
attribute to the value of the 'polymorphic_identity'
upon instance construction, also if applicable. This
routine will run when an instance is created.
-
+
"""
# do a special check for the "discriminiator" column, as it
# may only be present in the 'with_polymorphic' selectable
# but we need it for the base mapper
setter = False
-
+
if self.polymorphic_on is not None:
setter = True
@@ -624,7 +624,7 @@ class Mapper(object):
raise sa_exc.InvalidRequestError(
"Cannot exclude or override the discriminator column %r" %
col.key)
-
+
self._configure_property(
col.key,
properties.ColumnProperty(col, _instrument=instrument),
@@ -642,7 +642,7 @@ class Mapper(object):
self._set_polymorphic_identity = _set_polymorphic_identity
else:
self._set_polymorphic_identity = None
-
+
def _adapt_inherited_property(self, key, prop, init):
@@ -653,7 +653,7 @@ class Mapper(object):
key,
properties.ConcreteInheritedProperty(),
init=init, setparent=True)
-
+
def _configure_property(self, key, prop, init=True, setparent=True):
self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
@@ -684,7 +684,7 @@ class Mapper(object):
prop.columns.insert(0, column)
self._log("inserting column to existing list "
"in properties.ColumnProperty %s" % (key))
-
+
elif prop is None or isinstance(prop, properties.ConcreteInheritedProperty):
mapped_column = []
for c in columns:
@@ -722,7 +722,7 @@ class Mapper(object):
if isinstance(prop, properties.ColumnProperty):
col = self.mapped_table.corresponding_column(prop.columns[0])
-
+
# if the column is not present in the mapped table,
# test if a column has been added after the fact to the
# parent table (or their parent, etc.) [ticket:1570]
@@ -737,7 +737,7 @@ class Mapper(object):
prop.columns[0])
break
path.append(m)
-
+
# otherwise, col might not be present! the selectable given
# to the mapper need not include "deferred"
# columns (included in zblog tests)
@@ -758,7 +758,7 @@ class Mapper(object):
col.table in self._cols_by_table and \
col not in self._cols_by_table[col.table]:
self._cols_by_table[col.table].add(col)
-
+
# if this properties.ColumnProperty represents the "polymorphic
# discriminator" column, mark it. We'll need this when rendering
# columns in SELECT statements.
@@ -766,7 +766,7 @@ class Mapper(object):
prop._is_polymorphic_discriminator = \
(col is self.polymorphic_on or
prop.columns[0] is self.polymorphic_on)
-
+
self.columns[key] = col
for col in prop.columns:
for col in col.proxy_set:
@@ -785,7 +785,7 @@ class Mapper(object):
"a ColumnProperty already exists keyed to the name "
"%r for column %r" % (syn, key, key, syn)
)
-
+
self._props[key] = prop
if not self.non_primary:
@@ -805,23 +805,23 @@ class Mapper(object):
This is a deferred configuration step which is intended
to execute once all mappers have been constructed.
-
+
"""
self._log("_post_configure_properties() started")
l = [(key, prop) for key, prop in self._props.iteritems()]
for key, prop in l:
self._log("initialize prop %s", key)
-
+
if prop.parent is self and not prop._compile_started:
prop.init()
-
+
if prop._compile_finished:
prop.post_instrument_class(self)
-
+
self._log("_post_configure_properties() complete")
self.configured = True
-
+
def add_properties(self, dict_of_properties):
"""Add the given dictionary of properties to this mapper,
using `add_property`.
@@ -904,19 +904,19 @@ class Mapper(object):
except KeyError:
raise sa_exc.InvalidRequestError(
"Mapper '%s' has no property '%s'" % (self, key))
-
+
@util.deprecated('0.6.4',
'Call to deprecated function mapper._get_col_to_pr'
'op(). Use mapper.get_property_by_column()')
def _get_col_to_prop(self, col):
return self._columntoproperty[col]
-
+
def get_property_by_column(self, column):
"""Given a :class:`.Column` object, return the
:class:`.MapperProperty` which maps this column."""
return self._columntoproperty[column]
-
+
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
@@ -957,7 +957,7 @@ class Mapper(object):
mapped tables.
"""
-
+
from_obj = self.mapped_table
for m in mappers:
if m is self:
@@ -1023,7 +1023,7 @@ class Mapper(object):
def _iterate_polymorphic_properties(self, mappers=None):
"""Return an iterator of MapperProperty objects which will render into
a SELECT."""
-
+
if mappers is None:
mappers = self._with_polymorphic_mappers
@@ -1043,7 +1043,7 @@ class Mapper(object):
c.columns[0] is not self.polymorphic_on):
continue
yield c
-
+
@property
def properties(self):
raise NotImplementedError(
@@ -1109,7 +1109,7 @@ class Mapper(object):
hasattr(obj, '__get__') and not \
isinstance(obj.__get__(None, obj),
attributes.QueryableAttribute)
-
+
def _should_exclude(self, name, assigned_name, local, column):
"""determine whether a particular property should be implicitly
@@ -1208,13 +1208,13 @@ class Mapper(object):
def primary_mapper(self):
"""Return the primary mapper corresponding to this mapper's class key
(class)."""
-
+
return self.class_manager.mapper
@property
def primary_base_mapper(self):
return self.class_manager.mapper.base_mapper
-
+
def identity_key_from_row(self, row, adapter=None):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
@@ -1261,7 +1261,7 @@ class Mapper(object):
impl.get(state, dict_, False)
for col in self.primary_key
])
-
+
def primary_key_from_instance(self, instance):
"""Return the list of primary key values for the given
instance.
@@ -1294,7 +1294,7 @@ class Mapper(object):
def _get_committed_state_attr_by_column(self, state, dict_, column,
passive=False):
-
+
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.\
get_committed_value(state, dict_, passive=passive)
@@ -1302,21 +1302,21 @@ class Mapper(object):
def _optimized_get_statement(self, state, attribute_names):
"""assemble a WHERE clause which retrieves a given state by primary
key, using a minimized set of tables.
-
+
Applies to a joined-table inheritance mapper where the
requested attribute names are only present on joined tables,
not the base table. The WHERE clause attempts to include
only those tables to minimize joins.
-
+
"""
props = self._props
-
+
tables = set(chain(
*[sqlutil.find_tables(c, check_columns=True)
for key in attribute_names
for c in props[key].columns]
))
-
+
if self.base_mapper.local_table in tables:
return None
@@ -1396,7 +1396,7 @@ class Mapper(object):
if not iterator:
visitables.pop()
continue
-
+
if item_type is prp:
prop = iterator.popleft()
if type_ not in prop.cascade:
@@ -1422,7 +1422,7 @@ class Mapper(object):
for mapper in self.base_mapper.self_and_descendants:
for t in mapper.tables:
table_to_mapper[t] = mapper
-
+
sorted_ = sqlutil.sort_tables(table_to_mapper.iterkeys())
ret = util.OrderedDict()
for t in sorted_:
@@ -1433,15 +1433,15 @@ class Mapper(object):
saves = unitofwork.SaveUpdateAll(uow, self.base_mapper)
deletes = unitofwork.DeleteAll(uow, self.base_mapper)
uow.dependencies.add((saves, deletes))
-
+
for dep in self._dependency_processors:
dep.per_property_preprocessors(uow)
-
+
for prop in self._props.values():
prop.per_property_preprocessors(uow)
-
+
def _per_state_flush_actions(self, uow, states, isdelete):
-
+
base_mapper = self.base_mapper
save_all = unitofwork.SaveUpdateAll(uow, base_mapper)
delete_all = unitofwork.DeleteAll(uow, base_mapper)
@@ -1454,20 +1454,20 @@ class Mapper(object):
else:
action = unitofwork.SaveUpdateState(uow, state, base_mapper)
uow.dependencies.add((action, delete_all))
-
+
yield action
-
+
def _memo(self, key, callable_):
if key in self._memoized_values:
return self._memoized_values[key]
else:
self._memoized_values[key] = value = callable_()
return value
-
+
def _post_update(self, states, uowtransaction, post_update_cols):
"""Issue UPDATE statements on behalf of a relationship() which
specifies post_update.
-
+
"""
cached_connections = util.PopulateDict(
lambda conn:conn.execution_options(
@@ -1492,7 +1492,7 @@ class Mapper(object):
conn = connection
mapper = _state_mapper(state)
-
+
tups.append((state, state.dict, mapper, conn))
table_to_mapper = self._sorted_tables
@@ -1503,7 +1503,7 @@ class Mapper(object):
for state, state_dict, mapper, connection in tups:
if table not in mapper._pks_by_table:
continue
-
+
pks = mapper._pks_by_table[table]
params = {}
hasdata = False
@@ -1525,7 +1525,7 @@ class Mapper(object):
if hasdata:
update.append((state, state_dict, params, mapper,
connection))
-
+
if update:
mapper = table_to_mapper[table]
@@ -1551,7 +1551,7 @@ class Mapper(object):
params, mapper, conn in grouper]
cached_connections[connection].\
execute(statement, multiparams)
-
+
def _save_obj(self, states, uowtransaction, single=False):
"""Issue ``INSERT`` and/or ``UPDATE`` statements for a list
of objects.
@@ -1562,7 +1562,7 @@ class Mapper(object):
updates for all descendant mappers.
"""
-
+
# if batch=false, call _save_obj separately for each object
if not single and not self.batch:
for state in _sort_states(states):
@@ -1582,19 +1582,19 @@ class Mapper(object):
connection_callable = None
tups = []
-
+
for state in _sort_states(states):
if connection_callable:
conn = connection_callable(self, state.obj())
else:
conn = connection
-
+
has_identity = bool(state.key)
mapper = _state_mapper(state)
instance_key = state.key or mapper._identity_key_from_state(state)
row_switch = None
-
+
# call before_XXX extensions
if not has_identity:
mapper.dispatch.before_insert(mapper, conn, state)
@@ -1648,14 +1648,14 @@ class Mapper(object):
instance_key, row_switch in tups:
if table not in mapper._pks_by_table:
continue
-
+
pks = mapper._pks_by_table[table]
-
+
isinsert = not has_identity and not row_switch
-
+
params = {}
value_params = {}
-
+
if isinsert:
has_all_pks = True
for col in mapper._cols_by_table[table]:
@@ -1667,7 +1667,7 @@ class Mapper(object):
# pending objects
prop = mapper._columntoproperty[col]
value = state_dict.get(prop.key, None)
-
+
if value is None:
if col in pks:
has_all_pks = False
@@ -1772,7 +1772,7 @@ class Mapper(object):
def update_stmt():
clause = sql.and_()
-
+
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
type_=col.type))
@@ -1783,13 +1783,13 @@ class Mapper(object):
type_=col.type))
return table.update(clause)
-
+
statement = self._memo(('update', table), update_stmt)
-
+
rows = 0
for state, state_dict, params, mapper, \
connection, value_params in update:
-
+
if value_params:
c = connection.execute(
statement.values(value_params),
@@ -1797,7 +1797,7 @@ class Mapper(object):
else:
c = cached_connections[connection].\
execute(statement, params)
-
+
mapper._postfetch(
uowtransaction,
table,
@@ -1821,7 +1821,7 @@ class Mapper(object):
"- versioning cannot be verified." %
c.dialect.dialect_description,
stacklevel=12)
-
+
if insert:
statement = self._memo(('insert', table), table.insert)
@@ -1837,7 +1837,7 @@ class Mapper(object):
multiparams = [rec[2] for rec in records]
c = cached_connections[connection].\
execute(statement, multiparams)
-
+
for (state, state_dict, params, mapper,
conn, value_params, has_all_pks), \
last_inserted_params in \
@@ -1851,7 +1851,7 @@ class Mapper(object):
c.context.postfetch_cols,
last_inserted_params,
value_params)
-
+
else:
for state, state_dict, params, mapper, \
connection, value_params, \
@@ -1864,7 +1864,7 @@ class Mapper(object):
else:
result = cached_connections[connection].\
execute(statement, params)
-
+
primary_key = result.context.inserted_primary_key
if primary_key is not None:
@@ -1942,14 +1942,14 @@ class Mapper(object):
equated_pairs,
uowtransaction,
self.passive_updates)
-
+
@util.memoized_property
def _table_to_equated(self):
"""memoized map of tables to collections of columns to be
synchronized upwards to the base mapper."""
-
+
result = util.defaultdict(list)
-
+
for table in self._sorted_tables:
cols = set(table.c)
for m in self.iterate_to_root():
@@ -1957,9 +1957,9 @@ class Mapper(object):
cols.intersection(
[l for l, r in m._inherits_equated_pairs]):
result[table].append((m, m._inherits_equated_pairs))
-
+
return result
-
+
def _delete_obj(self, states, uowtransaction):
"""Issue ``DELETE`` statements for a list of objects.
@@ -1973,13 +1973,13 @@ class Mapper(object):
else:
connection = uowtransaction.transaction.connection(self)
connection_callable = None
-
+
tups = []
cached_connections = util.PopulateDict(
lambda conn:conn.execution_options(
compiled_cache=self._compiled_cache
))
-
+
for state in _sort_states(states):
mapper = _state_mapper(state)
@@ -1987,9 +1987,9 @@ class Mapper(object):
conn = connection_callable(self, state.obj())
else:
conn = connection
-
+
mapper.dispatch.before_delete(mapper, conn, state)
-
+
tups.append((state,
state.dict,
_state_mapper(state),
@@ -1997,7 +1997,7 @@ class Mapper(object):
conn))
table_to_mapper = self._sorted_tables
-
+
for table in reversed(table_to_mapper.keys()):
delete = util.defaultdict(list)
for state, state_dict, mapper, has_identity, connection in tups:
@@ -2080,10 +2080,10 @@ class Mapper(object):
polymorphic_from=None,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None):
-
+
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
-
+
pk_cols = self.primary_key
if polymorphic_from or refresh_state:
@@ -2112,7 +2112,7 @@ class Mapper(object):
new_populators = []
existing_populators = []
load_path = context.query._current_path + path
-
+
def populate_state(state, dict_, row, isnew, only_load_props):
if isnew:
if context.propagate_options:
@@ -2125,7 +2125,7 @@ class Mapper(object):
new_populators,
existing_populators
)
-
+
if isnew:
populators = new_populators
else:
@@ -2142,7 +2142,7 @@ class Mapper(object):
session_identity_map = context.session.identity_map
listeners = self.dispatch
-
+
translate_row = listeners.translate_row or None
create_instance = listeners.create_instance or None
populate_instance = listeners.populate_instance or None
@@ -2152,7 +2152,7 @@ class Mapper(object):
is_not_primary_key = _none_set.issuperset
else:
is_not_primary_key = _none_set.issubset
-
+
def _instance(row, result):
if translate_row:
for fn in translate_row:
@@ -2160,7 +2160,7 @@ class Mapper(object):
if ret is not EXT_CONTINUE:
row = ret
break
-
+
if polymorphic_on is not None:
discriminator = row[polymorphic_on]
if discriminator is not None:
@@ -2196,7 +2196,7 @@ class Mapper(object):
dict_,
self.version_id_col) != \
row[version_id_col]:
-
+
raise orm_exc.StaleDataError(
"Instance '%s' has version id '%s' which "
"does not match database-loaded version id '%s'."
@@ -2268,7 +2268,7 @@ class Mapper(object):
populate_state(state, dict_, row, isnew, only_load_props)
else:
populate_state(state, dict_, row, isnew, only_load_props)
-
+
if loaded_instance:
state.manager.dispatch.load(state, context)
elif isnew:
@@ -2286,8 +2286,8 @@ class Mapper(object):
isnew = True
attrs = state.unloaded
# allow query.instances to commit the subset of attrs
- context.partials[state] = (dict_, attrs)
-
+ context.partials[state] = (dict_, attrs)
+
if populate_instance:
for fn in populate_instance:
ret = fn(self, context, row, state,
@@ -2299,11 +2299,11 @@ class Mapper(object):
populate_state(state, dict_, row, isnew, attrs)
else:
populate_state(state, dict_, row, isnew, attrs)
-
+
if isnew:
state.manager.dispatch.refresh(state, context, attrs)
-
+
if result is not None:
if append_result:
for fn in append_result:
@@ -2322,7 +2322,7 @@ class Mapper(object):
def _populators(self, context, path, reduced_path, row, adapter,
new_populators, existing_populators):
"""Produce a collection of attribute level row processor callables."""
-
+
delayed_populators = []
for prop in self._props.itervalues():
newpop, existingpop, delayedpop = prop.create_row_processor(
@@ -2337,11 +2337,11 @@ class Mapper(object):
delayed_populators.append((prop.key, delayedpop))
if delayed_populators:
new_populators.extend(delayed_populators)
-
+
def _configure_subclass_mapper(self, context, path, reduced_path, adapter):
"""Produce a mapper level row processor callable factory for mappers
inheriting this one."""
-
+
def configure_subclass_mapper(discriminator):
try:
mapper = self.polymorphic_map[discriminator]
@@ -2351,16 +2351,16 @@ class Mapper(object):
discriminator)
if mapper is self:
return None
-
+
# replace the tip of the path info with the subclass mapper
# being used. that way accurate "load_path" info is available
# for options invoked during deferred loads.
# we lose AliasedClass path elements this way, but currently,
# those are not needed at this stage.
-
+
# this asserts to true
#assert mapper.isa(_class_to_mapper(path[-1]))
-
+
return mapper._instance_processor(context, path[0:-1] + (mapper,),
reduced_path[0:-1] + (mapper.base_mapper,),
adapter,
@@ -2375,7 +2375,7 @@ def configure_mappers():
This function can be called any number of times, but in
most cases is handled internally.
-
+
"""
global _new_mappers
@@ -2453,7 +2453,7 @@ def validates(*names):
can then raise validation exceptions to halt the process from continuing,
or can modify or replace the value before proceeding. The function
should otherwise return the given value.
-
+
Note that a validator for a collection **cannot** issue a load of that
collection within the validation routine - this usage raises
an assertion to avoid recursion overflows. This is a reentrant
@@ -2477,7 +2477,7 @@ def _event_on_first_init(manager, cls):
if instrumenting_mapper:
if _new_mappers:
configure_mappers()
-
+
def _event_on_init(state, args, kwargs):
"""Run init_instance hooks."""
@@ -2494,14 +2494,14 @@ def _event_on_resurrect(state):
for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
instrumenting_mapper._set_state_attr_by_column(
state, state.dict, col, val)
-
-
+
+
def _sort_states(states):
return sorted(states, key=operator.attrgetter('sort_key'))
def _load_scalar_attributes(state, attribute_names):
"""initiate a column-based attribute refresh operation."""
-
+
mapper = _state_mapper(state)
session = sessionlib._state_session(state)
if not session:
@@ -2511,7 +2511,7 @@ def _load_scalar_attributes(state, attribute_names):
(state_str(state)))
has_key = bool(state.key)
-
+
result = False
if mapper.inherits and not mapper.concrete:
@@ -2536,7 +2536,7 @@ def _load_scalar_attributes(state, attribute_names):
" persistent and does not "
"contain a full primary key." % state_str(state))
identity_key = mapper._identity_key_from_state(state)
-
+
if (_none_set.issubset(identity_key) and \
not mapper.allow_partial_pks) or \
_none_set.issuperset(identity_key):
@@ -2545,7 +2545,7 @@ def _load_scalar_attributes(state, attribute_names):
"(and shouldn't be expired, either)."
% state_str(state))
return
-
+
result = session.query(mapper)._load_on_ident(
identity_key,
refresh_state=state,
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 9f2d63364..813be60ce 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -62,7 +62,7 @@ class ColumnProperty(StrategizedProperty):
self.descriptor = kwargs.pop('descriptor', None)
self.extension = kwargs.pop('extension', None)
self.active_history = kwargs.pop('active_history', False)
-
+
if 'doc' in kwargs:
self.doc = kwargs.pop('doc')
else:
@@ -73,7 +73,7 @@ class ColumnProperty(StrategizedProperty):
break
else:
self.doc = None
-
+
if kwargs:
raise TypeError(
"%s received unexpected keyword argument(s): %s" % (
@@ -87,11 +87,11 @@ class ColumnProperty(StrategizedProperty):
self.strategy_class = strategies.DeferredColumnLoader
else:
self.strategy_class = strategies.ColumnLoader
-
+
def instrument_class(self, mapper):
if not self.instrument:
return
-
+
attributes.register_descriptor(
mapper.class_,
self.key,
@@ -100,7 +100,7 @@ class ColumnProperty(StrategizedProperty):
property_=self,
doc=self.doc
)
-
+
def do_init(self):
super(ColumnProperty, self).do_init()
if len(self.columns) > 1 and \
@@ -127,7 +127,7 @@ class ColumnProperty(StrategizedProperty):
dest_dict, load, _recursive):
if self.key in source_dict:
value = source_dict[self.key]
-
+
if not load:
dest_dict[self.key] = value
else:
@@ -136,7 +136,7 @@ class ColumnProperty(StrategizedProperty):
else:
if dest_state.has_identity and self.key not in dest_dict:
dest_state.expire_attributes(dest_dict, [self.key])
-
+
class Comparator(PropComparator):
@util.memoized_instancemethod
def __clause_element__(self):
@@ -146,17 +146,17 @@ class ColumnProperty(StrategizedProperty):
return self.prop.columns[0]._annotate({
"parententity": self.mapper,
"parentmapper":self.mapper})
-
+
def operate(self, op, *other, **kwargs):
return op(self.__clause_element__(), *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
col = self.__clause_element__()
return op(col._bind_param(op, other), col, **kwargs)
-
+
# TODO: legacy..do we need this ? (0.5)
ColumnComparator = Comparator
-
+
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
@@ -222,7 +222,7 @@ class RelationshipProperty(StrategizedProperty):
RelationshipProperty.Comparator
self.comparator = self.comparator_factory(self, None)
util.set_creation_order(self)
-
+
if strategy_class:
self.strategy_class = strategy_class
elif self.lazy== 'dynamic':
@@ -230,7 +230,7 @@ class RelationshipProperty(StrategizedProperty):
self.strategy_class = dynamic.DynaLoader
else:
self.strategy_class = strategies.factory(self.lazy)
-
+
self._reverse_property = set()
if cascade is not False:
@@ -280,13 +280,13 @@ class RelationshipProperty(StrategizedProperty):
"""Return a copy of this PropComparator which will use the
given adaption function on the local side of generated
expressions.
-
+
"""
return self.__class__(self.property, self.mapper,
getattr(self, '_of_type', None),
adapter)
-
+
@property
def parententity(self):
return self.property.parent
@@ -314,9 +314,9 @@ class RelationshipProperty(StrategizedProperty):
raise NotImplementedError('in_() not yet supported for '
'relationships. For a simple many-to-one, use '
'in_() against the set of foreign key values.')
-
+
__hash__ = None
-
+
def __eq__(self, other):
if isinstance(other, (NoneType, expression._Null)):
if self.property.direction in [ONETOMANY, MANYTOMANY]:
@@ -352,7 +352,7 @@ class RelationshipProperty(StrategizedProperty):
source_selectable = self.__clause_element__()
else:
source_selectable = None
-
+
pj, sj, source, dest, secondary, target_adapter = \
self.property._create_joins(dest_polymorphic=True,
dest_selectable=to_selectable,
@@ -364,7 +364,7 @@ class RelationshipProperty(StrategizedProperty):
criterion = crit
else:
criterion = criterion & crit
-
+
# annotate the *local* side of the join condition, in the case
# of pj + sj this is the full primaryjoin, in the case of just
# pj its the local side of the primaryjoin.
@@ -372,7 +372,7 @@ class RelationshipProperty(StrategizedProperty):
j = _orm_annotate(pj) & sj
else:
j = _orm_annotate(pj, exclude=self.property.remote_side)
-
+
if criterion is not None and target_adapter:
# limit this adapter to annotated only?
criterion = target_adapter.traverse(criterion)
@@ -384,9 +384,9 @@ class RelationshipProperty(StrategizedProperty):
# to anything in the enclosing query.
if criterion is not None:
criterion = criterion._annotate({'_halt_adapt': True})
-
+
crit = j & criterion
-
+
return sql.exists([1], crit, from_obj=dest).correlate(source)
def any(self, criterion=None, **kwargs):
@@ -422,26 +422,26 @@ class RelationshipProperty(StrategizedProperty):
def __negated_contains_or_equals(self, other):
if self.property.direction == MANYTOONE:
state = attributes.instance_state(other)
-
+
def state_bindparam(x, state, col):
o = state.obj() # strong ref
return sql.bindparam(x, unique=True, callable_=lambda : \
self.property.mapper._get_committed_attr_by_column(o,
col))
-
+
def adapt(col):
if self.adapter:
return self.adapter(col)
else:
return col
-
+
if self.property._use_get:
return sql.and_(*[
sql.or_(
adapt(x) != state_bindparam(adapt(x), state, y),
adapt(x) == None)
for (x, y) in self.property.local_remote_pairs])
-
+
criterion = sql.and_(*[x==y for (x, y) in
zip(
self.property.mapper.primary_key,
@@ -531,11 +531,11 @@ class RelationshipProperty(StrategizedProperty):
if load:
# for a full merge, pre-load the destination collection,
# so that individual _merge of each item pulls from identity
- # map for those already present.
+ # map for those already present.
# also assumes CollectionAttrbiuteImpl behavior of loading
# "old" list in any case
dest_state.get_impl(self.key).get(dest_state, dest_dict)
-
+
dest_list = []
for current in instances:
current_state = attributes.instance_state(current)
@@ -545,7 +545,7 @@ class RelationshipProperty(StrategizedProperty):
load=load, _recursive=_recursive)
if obj is not None:
dest_list.append(obj)
-
+
if not load:
coll = attributes.init_state_collection(dest_state,
dest_dict, self.key)
@@ -582,28 +582,28 @@ class RelationshipProperty(StrategizedProperty):
if type_ == 'save-update':
tuples = state.manager[self.key].impl.\
get_all_pending(state, dict_)
-
+
else:
tuples = state.value_as_iterable(dict_, self.key,
passive=passive)
-
+
skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
not in self.cascade
-
+
for instance_state, c in tuples:
if instance_state in visited_states:
continue
-
+
instance_dict = attributes.instance_dict(c)
-
+
if halt_on and halt_on(instance_state):
continue
-
+
if skip_pending and not instance_state.key:
continue
-
+
instance_mapper = instance_state.manager.mapper
-
+
if not instance_mapper.isa(self.mapper.class_manager.mapper):
raise AssertionError("Attribute '%s' on class '%s' "
"doesn't handle objects "
@@ -616,13 +616,13 @@ class RelationshipProperty(StrategizedProperty):
visited_states.add(instance_state)
yield c, instance_mapper, instance_state, instance_dict
-
+
def _add_reverse_property(self, key):
other = self.mapper.get_property(key, _compile_mappers=False)
self._reverse_property.add(other)
other._reverse_property.add(self)
-
+
if not other._get_target().common_parent(self.parent):
raise sa_exc.ArgumentError('reverse_property %r on '
'relationship %s references relationship %s, which '
@@ -634,7 +634,7 @@ class RelationshipProperty(StrategizedProperty):
'both of the same direction %r. Did you mean to '
'set remote_side on the many-to-one side ?'
% (other, self, self.direction))
-
+
def do_init(self):
self._get_target()
self._assert_is_primary()
@@ -667,7 +667,7 @@ class RelationshipProperty(StrategizedProperty):
% (self.key, type(self.argument)))
assert isinstance(self.mapper, mapper.Mapper), self.mapper
return self.mapper
-
+
def _process_dependent_arguments(self):
# accept callables for other attributes which may require
@@ -783,23 +783,23 @@ class RelationshipProperty(StrategizedProperty):
"""Given a join condition, figure out what columns are foreign
and are part of a binary "equated" condition to their referecned
columns, and convert into a list of tuples of (primary col->foreign col).
-
+
Make several attempts to determine if cols are compared using
"=" or other comparators (in which case suggest viewonly),
columns are present but not part of the expected mappings, columns
don't have any :class:`ForeignKey` information on them, or
the ``foreign_keys`` attribute is being used incorrectly.
-
+
"""
eq_pairs = criterion_as_pairs(join_condition,
consider_as_foreign_keys=self._user_defined_foreign_keys,
any_operator=self.viewonly)
-
+
eq_pairs = [(l, r) for (l, r) in eq_pairs
if self._col_is_part_of_mappings(l)
and self._col_is_part_of_mappings(r)
or self.viewonly and r in self._user_defined_foreign_keys]
-
+
if not eq_pairs and \
self.secondary is not None and \
not self._user_defined_foreign_keys:
@@ -822,12 +822,12 @@ class RelationshipProperty(StrategizedProperty):
join_condition,
self
))
-
+
if not eq_pairs:
if not self.viewonly and criterion_as_pairs(join_condition,
consider_as_foreign_keys=self._user_defined_foreign_keys,
any_operator=True):
-
+
err = "Could not locate any "\
"foreign-key-equated, locally mapped column "\
"pairs for %s "\
@@ -836,7 +836,7 @@ class RelationshipProperty(StrategizedProperty):
join_condition,
self
)
-
+
if not self._user_defined_foreign_keys:
err += " Ensure that the "\
"referencing Column objects have a "\
@@ -844,7 +844,7 @@ class RelationshipProperty(StrategizedProperty):
"of a ForeignKeyConstraint on their parent "\
"Table, or specify the foreign_keys parameter "\
"to this relationship."
-
+
err += " For more "\
"relaxed rules on join conditions, the "\
"relationship may be marked as viewonly=True."
@@ -981,7 +981,7 @@ class RelationshipProperty(StrategizedProperty):
util.warn("On %s, 'passive_deletes' is normally configured "
"on one-to-many, one-to-one, many-to-many relationships only."
% self)
-
+
def _determine_local_remote_pairs(self):
if not self.local_remote_pairs:
if self.remote_side:
@@ -1054,7 +1054,7 @@ class RelationshipProperty(StrategizedProperty):
"created for class '%s' " % (self.key,
self.parent.class_.__name__,
self.parent.class_.__name__))
-
+
def _generate_backref(self):
if not self.is_primary():
return
@@ -1099,7 +1099,7 @@ class RelationshipProperty(StrategizedProperty):
mapper._configure_property(backref_key, relationship)
if self.back_populates:
self._add_reverse_property(self.back_populates)
-
+
def _post_init(self):
self.logger.info('%s setup primary join %s', self,
self.primaryjoin)
@@ -1121,7 +1121,7 @@ class RelationshipProperty(StrategizedProperty):
if not self.viewonly:
self._dependency_processor = \
dependency.DependencyProcessor.from_relationship(self)
-
+
@util.memoized_property
def _use_get(self):
"""memoize the 'use_get' attribute of this RelationshipLoader's
@@ -1129,7 +1129,7 @@ class RelationshipProperty(StrategizedProperty):
strategy = self._get_strategy(strategies.LazyLoader)
return strategy.use_get
-
+
def _refers_to_parent_table(self):
for c, f in self.synchronize_pairs:
if c.table is f.table:
@@ -1169,21 +1169,21 @@ class RelationshipProperty(StrategizedProperty):
primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
self.secondaryjoin, self.secondary
-
+
# adjust the join condition for single table inheritance,
# in the case that the join is to a subclass
# this is analgous to the "_adjust_for_single_table_inheritance()"
# method in Query.
dest_mapper = of_type or self.mapper
-
+
single_crit = dest_mapper._single_table_criterion
if single_crit is not None:
if secondaryjoin is not None:
secondaryjoin = secondaryjoin & single_crit
else:
primaryjoin = primaryjoin & single_crit
-
+
if aliased:
if secondary is not None:
secondary = secondary.alias()
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 53c777ae8..d5f0ef0ca 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -55,20 +55,20 @@ def _generative(*assertions):
class Query(object):
"""ORM-level SQL construction object.
-
+
:class:`.Query` is the source of all SELECT statements generated by the
ORM, both those formulated by end-user query operations as well as by
high level internal operations such as related collection loading. It
features a generative interface whereby successive calls return a new
:class:`.Query` object, a copy of the former with additional
criteria and options associated with it.
-
+
:class:`.Query` objects are normally initially generated using the
:meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough
of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`.
-
+
"""
-
+
_enable_eagerloads = True
_enable_assertions = True
_with_labels = False
@@ -100,7 +100,7 @@ class Query(object):
_with_options = ()
_with_hints = ()
_enable_single_crit = True
-
+
def __init__(self, entities, session=None):
self.session = session
self._polymorphic_adapters = {}
@@ -173,7 +173,7 @@ class Query(object):
equivs = self.__all_equivs()
self._from_obj_alias = sql_util.ColumnAdapter(
self._from_obj[0], equivs)
-
+
def _get_polymorphic_adapter(self, entity, selectable):
self.__mapper_loads_polymorphically_with(entity.mapper,
sql_util.ColumnAdapter(selectable,
@@ -226,7 +226,7 @@ class Query(object):
@_generative()
def _adapt_all_clauses(self):
self._disable_orm_filtering = True
-
+
def _adapt_col_list(self, cols):
return [
self._adapt_clause(
@@ -234,7 +234,7 @@ class Query(object):
True, True)
for o in cols
]
-
+
def _adapt_clause(self, clause, as_filter, orm_only):
adapters = []
if as_filter and self._filter_aliases:
@@ -323,7 +323,7 @@ class Query(object):
def _get_condition(self):
self._order_by = self._distinct = False
return self._no_criterion_condition("get")
-
+
def _no_criterion_condition(self, meth):
if not self._enable_assertions:
return
@@ -407,11 +407,11 @@ class Query(object):
@property
def statement(self):
"""The full SELECT statement represented by this Query.
-
+
The statement by default will not have disambiguating labels
applied to the construct unless with_labels(True) is called
first.
-
+
"""
stmt = self._compile_context(labels=self._with_labels).\
@@ -432,33 +432,33 @@ class Query(object):
"""
return self.enable_eagerloads(False).statement.alias()
-
+
def label(self, name):
"""Return the full SELECT statement represented by this :class:`.Query`, converted
to a scalar subquery with a label of the given name.
-
+
Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
-
+
New in 0.6.5.
"""
-
+
return self.enable_eagerloads(False).statement.label(name)
def as_scalar(self):
"""Return the full SELECT statement represented by this :class:`.Query`, converted
to a scalar subquery.
-
+
Analagous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`.
New in 0.6.5.
-
+
"""
-
+
return self.enable_eagerloads(False).statement.as_scalar()
-
-
+
+
def __clause_element__(self):
return self.enable_eagerloads(False).with_labels().statement
@@ -495,11 +495,11 @@ class Query(object):
"""
self._with_labels = True
-
+
@_generative()
def enable_assertions(self, value):
"""Control whether assertions are generated.
-
+
When set to False, the returned Query will
not assert its state before certain operations,
including that LIMIT/OFFSET has not been applied
@@ -509,22 +509,22 @@ class Query(object):
is called. This more permissive mode is used by
custom Query subclasses to specify criterion or
other modifiers outside of the usual usage patterns.
-
+
Care should be taken to ensure that the usage
pattern is even possible. A statement applied
by from_statement() will override any criterion
set by filter() or order_by(), for example.
-
+
"""
self._enable_assertions = value
-
+
@property
def whereclause(self):
"""A readonly attribute which returns the current WHERE criterion for this Query.
-
+
This returned value is a SQL expression construct, or ``None`` if no
criterion has been established.
-
+
"""
return self._criterion
@@ -601,12 +601,12 @@ class Query(object):
set the ``stream_results`` execution
option to ``True``, which currently is only understood by psycopg2
and causes server side cursors to be used.
-
+
"""
self._yield_per = count
self._execution_options = self._execution_options.copy()
self._execution_options['stream_results'] = True
-
+
def get(self, ident):
"""Return an instance of the object based on the
given identifier, or None if not found.
@@ -621,9 +621,9 @@ class Query(object):
# convert composite types to individual args
if hasattr(ident, '__composite_values__'):
ident = ident.__composite_values__()
-
+
ident = util.to_list(ident)
-
+
mapper = self._only_mapper_zero(
"get() can only be used against a single mapped class."
)
@@ -633,13 +633,13 @@ class Query(object):
"Incorrect number of values in identifier to formulate "
"primary key for query.get(); primary key columns are %s" %
','.join("'%s'" % c for c in mapper.primary_key))
-
+
key = mapper.identity_key_from_primary_key(ident)
if not self._populate_existing and \
not mapper.always_refresh and \
self._lockmode is None:
-
+
instance = self._get_from_identity(self.session, key, False)
if instance is not None:
# reject calls for id in identity map but class
@@ -655,22 +655,22 @@ class Query(object):
"""Return a :class:`.Query` construct which will correlate the given
FROM clauses to that of an enclosing :class:`.Query` or
:func:`~.expression.select`.
-
+
The method here accepts mapped classes, :func:`.aliased` constructs,
and :func:`.mapper` constructs as arguments, which are resolved into
expression constructs, in addition to appropriate expression
constructs.
-
+
The correlation arguments are ultimately passed to
:meth:`.Select.correlate` after coercion to expression constructs.
-
+
The correlation arguments take effect in such cases
as when :meth:`.Query.from_self` is used, or when
a subquery as returned by :meth:`.Query.subquery` is
embedded in another :func:`~.expression.select` construct.
-
+
"""
-
+
self._correlate = self._correlate.union(
_orm_selectable(s)
for s in args)
@@ -691,7 +691,7 @@ class Query(object):
def populate_existing(self):
"""Return a :class:`Query` that will expire and refresh all instances
as they are loaded, or reused from the current :class:`.Session`.
-
+
:meth:`.populate_existing` does not improve behavior when
the ORM is used normally - the :class:`.Session` object's usual
behavior of maintaining a transaction and expiring all attributes
@@ -706,16 +706,16 @@ class Query(object):
to a child object or collection, using its attribute state
as well as an established :func:`.relationship()`
configuration.
-
+
The method uses the :func:`.with_parent` function to generate
the clause, the result of which is passed to :meth:`.Query.filter`.
-
+
Parameters are the same as :func:`.with_parent`, with the exception
that the given property can be None, in which case a search is
performed against this :class:`.Query` object's target mapper.
-
+
"""
-
+
if property is None:
from sqlalchemy.orm import properties
mapper = object_mapper(instance)
@@ -767,7 +767,7 @@ class Query(object):
@_generative()
def _enable_single_crit(self, val):
self._enable_single_crit = val
-
+
@_generative()
def _from_selectable(self, fromclause):
for attr in ('_statement', '_criterion', '_order_by', '_group_by',
@@ -805,12 +805,12 @@ class Query(object):
# end Py2K
except StopIteration:
return None
-
+
@_generative()
def with_entities(self, *entities):
"""Return a new :class:`.Query` replacing the SELECT list with the given
entities.
-
+
e.g.::
# Users, filtered on some arbitrary criterion
@@ -830,11 +830,11 @@ class Query(object):
limit(1)
New in 0.6.5.
-
+
"""
self._set_entities(entities)
-
-
+
+
@_generative()
def add_columns(self, *column):
"""Add one or more column expressions to the list
@@ -853,23 +853,23 @@ class Query(object):
False)
def add_column(self, column):
"""Add a column expression to the list of result columns to be returned.
-
+
Pending deprecation: :meth:`.add_column` will be superceded by
:meth:`.add_columns`.
-
+
"""
-
+
return self.add_columns(column)
def options(self, *args):
"""Return a new Query object, applying the given list of
mapper options.
-
+
Most supplied options regard changing how column- and
relationship-mapped attributes are loaded. See the sections
:ref:`deferred` and :ref:`loading_toplevel` for reference
documentation.
-
+
"""
return self._options(False, *args)
@@ -894,7 +894,7 @@ class Query(object):
def with_hint(self, selectable, text, dialect_name='*'):
"""Add an indexing hint for the given entity or selectable to
this :class:`Query`.
-
+
Functionality is passed straight through to
:meth:`~sqlalchemy.sql.expression.Select.with_hint`,
with the addition that ``selectable`` can be a
@@ -902,16 +902,16 @@ class Query(object):
/etc.
"""
mapper, selectable, is_aliased_class = _entity_info(selectable)
-
+
self._with_hints += ((selectable, text, dialect_name),)
-
+
@_generative()
def execution_options(self, **kwargs):
""" Set non-SQL options which take effect during execution.
-
+
The options are the same as those accepted by
:meth:`sqlalchemy.sql.expression.Executable.execution_options`.
-
+
Note that the ``stream_results`` execution option is enabled
automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()`
method is used.
@@ -982,16 +982,16 @@ class Query(object):
def order_by(self, *criterion):
"""apply one or more ORDER BY criterion to the query and return
the newly resulting ``Query``
-
+
All existing ORDER BY settings can be suppressed by
passing ``None`` - this will suppress any ORDER BY configured
on mappers as well.
-
+
Alternatively, an existing ORDER BY setting on the Query
object can be entirely cancelled by passing ``False``
as the value - use this before calling methods where
an ORDER BY is invalid.
-
+
"""
if len(criterion) == 1:
@@ -1002,7 +1002,7 @@ class Query(object):
if criterion[0] is None:
self._order_by = None
return
-
+
criterion = self._adapt_col_list(criterion)
if self._order_by is False or self._order_by is None:
@@ -1075,8 +1075,8 @@ class Query(object):
SELECT * FROM Z)
"""
-
-
+
+
return self._from_selectable(
expression.union(*([self]+ list(q))))
@@ -1200,15 +1200,15 @@ class Query(object):
to join from the right endpoint of the most recent join(),
instead of from the query's root entity. I.e. any chain
of joins, such as::
-
+
query.join(a, b, c)
-
+
is equivalent to::
-
+
query.join(a).\\
join(b, from_joinpoint=True).\\
join(c, from_joinpoint=True)
-
+
"""
aliased, from_joinpoint = kwargs.pop('aliased', False),\
kwargs.pop('from_joinpoint', False)
@@ -1239,13 +1239,13 @@ class Query(object):
def _join(self, keys, outerjoin, create_aliases, from_joinpoint):
"""consumes arguments from join() or outerjoin(), places them into a
consistent format with which to form the actual JOIN constructs.
-
+
"""
self._polymorphic_adapters = self._polymorphic_adapters.copy()
if not from_joinpoint:
self._reset_joinpoint()
-
+
if len(keys) == 2 and \
isinstance(keys[0], (expression.FromClause,
type, AliasedClass)) and \
@@ -1264,7 +1264,7 @@ class Query(object):
arg1, arg2 = arg1
else:
arg2 = None
-
+
# determine onclause/right_entity. there
# is a little bit of legacy behavior still at work here
# which means they might be in either order. may possibly
@@ -1275,19 +1275,19 @@ class Query(object):
right_entity, onclause = arg1, arg2
left_entity = prop = None
-
+
if isinstance(onclause, basestring):
left_entity = self._joinpoint_zero()
descriptor = _entity_descriptor(left_entity, onclause)
onclause = descriptor
-
+
# check for q.join(Class.propname, from_joinpoint=True)
# and Class is that of the current joinpoint
elif from_joinpoint and \
isinstance(onclause, interfaces.PropComparator):
left_entity = onclause.parententity
-
+
left_mapper, left_selectable, left_is_aliased = \
_entity_info(self._joinpoint_zero())
if left_mapper is left_entity:
@@ -1304,9 +1304,9 @@ class Query(object):
right_entity = of_type
else:
right_entity = onclause.property.mapper
-
+
left_entity = onclause.parententity
-
+
prop = onclause.property
if not isinstance(onclause, attributes.QueryableAttribute):
onclause = prop
@@ -1324,7 +1324,7 @@ class Query(object):
elif onclause is not None and right_entity is None:
# TODO: no coverage here
raise NotImplementedError("query.join(a==b) not supported.")
-
+
self._join_left_to_right(
left_entity,
right_entity, onclause,
@@ -1333,7 +1333,7 @@ class Query(object):
def _join_left_to_right(self, left, right,
onclause, outerjoin, create_aliases, prop):
"""append a JOIN to the query's from clause."""
-
+
if left is None:
left = self._joinpoint_zero()
@@ -1343,7 +1343,7 @@ class Query(object):
"Can't construct a join from %s to %s, they "
"are the same entity" %
(left, right))
-
+
left_mapper, left_selectable, left_is_aliased = _entity_info(left)
right_mapper, right_selectable, right_is_aliased = _entity_info(right)
@@ -1410,7 +1410,7 @@ class Query(object):
self._joinpoint = {
'_joinpoint_entity':right
}
-
+
# if an alias() of the right side was generated here,
# apply an adapter to all subsequent filter() calls
# until reset_joinpoint() is called.
@@ -1423,7 +1423,7 @@ class Query(object):
# adapters that are in place right now
if isinstance(onclause, expression.ClauseElement):
onclause = self._adapt_clause(onclause, True, True)
-
+
# if an alias() on the right side was generated,
# which is intended to wrap a the right side in a subquery,
# ensure that columns retrieved from this target in the result
@@ -1436,7 +1436,7 @@ class Query(object):
equivalents=right_mapper._equivalent_columns
)
)
-
+
# this is an overly broad assumption here, but there's a
# very wide variety of situations where we rely upon orm.join's
# adaption to glue clauses together, with joined-table inheritance's
@@ -1446,7 +1446,7 @@ class Query(object):
# adaption should be enabled (or perhaps that we're even doing the
# whole thing the way we are here).
join_to_left = not right_is_aliased and not left_is_aliased
-
+
if self._from_obj and left_selectable is not None:
replace_clause_index, clause = sql_util.find_join_source(
self._from_obj,
@@ -1457,13 +1457,13 @@ class Query(object):
# ensure it adapts to the left side.
if self._from_obj_alias and clause is self._from_obj[0]:
join_to_left = True
-
+
# An exception case where adaption to the left edge is not
# desirable. See above note on join_to_left.
if join_to_left and isinstance(clause, expression.Join) and \
sql_util.clause_is_present(left_selectable, clause):
join_to_left = False
-
+
clause = orm_join(clause,
right,
onclause, isouter=outerjoin,
@@ -1491,7 +1491,7 @@ class Query(object):
clause = orm_join(clause, right, onclause,
isouter=outerjoin, join_to_left=join_to_left)
-
+
self._from_obj = self._from_obj + (clause,)
def _reset_joinpoint(self):
@@ -1513,16 +1513,16 @@ class Query(object):
@_generative(_no_clauseelement_condition)
def select_from(self, *from_obj):
"""Set the FROM clause of this :class:`.Query` explicitly.
-
+
Sending a mapped class or entity here effectively replaces the
"left edge" of any calls to :meth:`.Query.join`, when no
joinpoint is otherwise established - usually, the default "join
point" is the leftmost entity in the :class:`.Query` object's
list of entities to be selected.
-
+
Mapped entities or plain :class:`.Table` or other selectables
can be sent here which will form the default FROM clause.
-
+
"""
obj = []
for fo in from_obj:
@@ -1534,10 +1534,10 @@ class Query(object):
raise sa_exc.ArgumentError(
"select_from() accepts FromClause objects only.")
else:
- obj.append(fo)
-
+ obj.append(fo)
+
self._set_select_from(*obj)
-
+
def __getitem__(self, item):
if isinstance(item, slice):
start, stop, step = util.decode_slice(item)
@@ -1568,7 +1568,7 @@ class Query(object):
def slice(self, start, stop):
"""apply LIMIT/OFFSET to the ``Query`` based on a "
"range and return the newly resulting ``Query``."""
-
+
if start is not None and stop is not None:
self._offset = (self._offset or 0) + start
self._limit = stop - start
@@ -1637,7 +1637,7 @@ class Query(object):
def first(self):
"""Return the first result of this ``Query`` or
None if the result doesn't contain any row.
-
+
first() applies a limit of one within the generated SQL, so that
only one primary entity row is generated on the server side
(note this may consist of multiple result rows if join-loaded
@@ -1663,7 +1663,7 @@ class Query(object):
if multiple object identities are returned, or if multiple
rows are returned for a query that does not return object
identities.
-
+
Note that an entity query, that is, one which selects one or
more mapped classes as opposed to individual column attributes,
may ultimately represent many rows but only one row of
@@ -1676,7 +1676,7 @@ class Query(object):
"""
ret = list(self)
-
+
l = len(ret)
if l == 1:
return ret[0]
@@ -1726,20 +1726,20 @@ class Query(object):
clause = querycontext.statement,
close_with_result=True).execute(querycontext.statement, self._params)
return self.instances(result, querycontext)
-
+
@property
def column_descriptions(self):
"""Return metadata about the columns which would be
returned by this :class:`Query`.
-
+
Format is a list of dictionaries::
-
+
user_alias = aliased(User, name='user2')
q = sess.query(User, User.id, user_alias)
-
+
# this expression:
q.columns
-
+
# would return:
[
{
@@ -1761,7 +1761,7 @@ class Query(object):
'expr':user_alias
}
]
-
+
"""
return [
{
@@ -1772,7 +1772,7 @@ class Query(object):
}
for ent in self._entities
]
-
+
def instances(self, cursor, __context=None):
"""Given a ResultProxy cursor as returned by connection.execute(),
return an ORM result as an iterator.
@@ -1810,8 +1810,8 @@ class Query(object):
query_entity.row_processor(self, context, custom_rows)
for query_entity in self._entities
])
-
-
+
+
while True:
context.progress = {}
context.partials = {}
@@ -1855,7 +1855,7 @@ class Query(object):
def merge_result(self, iterator, load=True):
"""Merge a result into this Query's Session.
-
+
Given an iterator returned by a Query of the same structure as this
one, return an identical iterator of results, with all mapped
instances merged into the session using Session.merge(). This is an
@@ -1863,19 +1863,19 @@ class Query(object):
structure of the result rows and unmapped columns with less method
overhead than that of calling Session.merge() explicitly for each
value.
-
+
The structure of the results is determined based on the column list of
this Query - if these do not correspond, unchecked errors will occur.
-
+
The 'load' argument is the same as that of Session.merge().
-
+
"""
-
+
session = self.session
if load:
# flush current contents if we expect to load data
session._autoflush()
-
+
autoflush = session.autoflush
try:
session.autoflush = False
@@ -1900,23 +1900,23 @@ class Query(object):
attributes.instance_state(newrow[i]),
attributes.instance_dict(newrow[i]),
load=load, _recursive={})
- result.append(util.NamedTuple(newrow, row._labels))
-
+ result.append(util.NamedTuple(newrow, row._labels))
+
return iter(result)
finally:
session.autoflush = autoflush
-
+
@classmethod
def _get_from_identity(cls, session, key, passive):
"""Look up the given key in the given session's identity map,
check the object for expired state if found.
-
+
"""
instance = session.identity_map.get(key)
if instance:
-
+
state = attributes.instance_state(instance)
-
+
# expired - ensure it still exists
if state.expired:
if passive is attributes.PASSIVE_NO_FETCH:
@@ -1930,18 +1930,18 @@ class Query(object):
return instance
else:
return None
-
+
def _load_on_ident(self, key, refresh_state=None, lockmode=None,
only_load_props=None):
"""Load the given identity key from the database."""
-
+
lockmode = lockmode or self._lockmode
if key is not None:
ident = key[1]
else:
ident = None
-
+
if refresh_state is None:
q = self._clone()
q._get_condition()
@@ -1952,7 +1952,7 @@ class Query(object):
mapper = self._mapper_zero()
(_get_clause, _get_params) = mapper._get_clause
-
+
# None present in ident - turn those comparisons
# into "IS NULL"
if None in ident:
@@ -1962,7 +1962,7 @@ class Query(object):
])
_get_clause = sql_util.adapt_criterion_to_null(
_get_clause, nones)
-
+
_get_clause = q._adapt_clause(_get_clause, True, False)
q._criterion = _get_clause
@@ -2006,7 +2006,7 @@ class Query(object):
def count(self):
"""Return a count of rows this Query would return.
-
+
For simple entity queries, count() issues
a SELECT COUNT, and will specifically count the primary
key column of the first entity only. If the query uses
@@ -2014,11 +2014,11 @@ class Query(object):
generated by this Query in a subquery, from which a SELECT COUNT
is issued, so that the contract of "how many rows
would be returned?" is honored.
-
+
For queries that request specific columns or expressions,
count() again makes no assumptions about those expressions
and will wrap everything in a subquery. Therefore,
- ``Query.count()`` is usually not what you want in this case.
+ ``Query.count()`` is usually not what you want in this case.
To count specific columns, often in conjunction with
GROUP BY, use ``func.count()`` as an individual column expression
instead of ``Query.count()``. See the ORM tutorial
@@ -2081,7 +2081,7 @@ class Query(object):
:param synchronize_session: chooses the strategy for the removal of
matched objects from the session. Valid values are:
-
+
False - don't synchronize the session. This option is the most
efficient and is reliable once the session is expired, which
typically occurs after a commit(), or explicitly using
@@ -2099,7 +2099,7 @@ class Query(object):
the objects in the session. If evaluation of the criteria isn't
implemented, an error is raised. In that case you probably
want to use the 'fetch' strategy as a fallback.
-
+
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
@@ -2149,7 +2149,7 @@ class Query(object):
else:
def eval_condition(obj):
return True
-
+
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
"Could not evaluate current criteria in Python. "
@@ -2214,7 +2214,7 @@ class Query(object):
expire_all(). Before the expiration, updated objects may still
remain in the session with stale values on their attributes, which
can lead to confusing results.
-
+
'fetch' - performs a select query before the update to find
objects that are matched by the update query. The updated
attributes are expired on matched objects.
@@ -2254,7 +2254,7 @@ class Query(object):
"the synchronize_session argument of "
"query.update() is now called 'fetch'")
synchronize_session = 'fetch'
-
+
if synchronize_session not in [False, 'evaluate', 'fetch']:
raise sa_exc.ArgumentError(
"Valid strategies for session synchronization "
@@ -2342,7 +2342,7 @@ class Query(object):
session.identity_map[identity_key],
[_attr_as_key(k) for k in values]
)
-
+
session.dispatch.after_bulk_update(session, self, context, result)
return result.rowcount
@@ -2367,21 +2367,21 @@ class Query(object):
for entity in self._entities:
entity.setup_context(self, context)
-
+
for rec in context.create_eager_joins:
strategy = rec[0]
strategy(*rec[1:])
-
+
eager_joins = context.eager_joins.values()
if context.from_clause:
# "load from explicit FROMs" mode,
# i.e. when select_from() or join() is used
- froms = list(context.from_clause)
+ froms = list(context.from_clause)
else:
# "load from discrete FROMs" mode,
# i.e. when each _MappedEntity has its own FROM
- froms = context.froms
+ froms = context.froms
if self._enable_single_crit:
self._adjust_for_single_inheritance(context)
@@ -2422,10 +2422,10 @@ class Query(object):
order_by=context.order_by,
**self._select_args
)
-
+
for hint in self._with_hints:
inner = inner.with_hint(*hint)
-
+
if self._correlate:
inner = inner.correlate(*self._correlate)
@@ -2439,7 +2439,7 @@ class Query(object):
[inner] + context.secondary_columns,
for_update=for_update,
use_labels=labels)
-
+
if self._execution_options:
statement = statement.execution_options(
**self._execution_options)
@@ -2492,7 +2492,7 @@ class Query(object):
for hint in self._with_hints:
statement = statement.with_hint(*hint)
-
+
if self._execution_options:
statement = statement.execution_options(
**self._execution_options)
@@ -2516,7 +2516,7 @@ class Query(object):
selected from the total results.
"""
-
+
for entity, (mapper, adapter, s, i, w) in \
self._mapper_adapter_map.iteritems():
single_crit = mapper._single_table_criterion
@@ -2558,7 +2558,7 @@ class _MapperEntity(_QueryEntity):
self.entities = [entity]
self.entity_zero = self.expr = entity
-
+
def setup_entity(self, entity, mapper, adapter,
from_obj, is_aliased_class, with_polymorphic):
self.mapper = mapper
@@ -2578,8 +2578,8 @@ class _MapperEntity(_QueryEntity):
self._reduced_path = (mapper.base_mapper, )
self.entity_zero = mapper
self._label_name = self.mapper.class_.__name__
-
-
+
+
def set_with_polymorphic(self, query, cls_or_mappers,
selectable, discriminator):
if cls_or_mappers is None:
@@ -2611,14 +2611,14 @@ class _MapperEntity(_QueryEntity):
query._entities.append(self)
def _get_entity_clauses(self, query, context):
-
+
adapter = None
if not self.is_aliased_class and query._polymorphic_adapters:
adapter = query._polymorphic_adapters.get(self.mapper, None)
if not adapter and self.adapter:
adapter = self.adapter
-
+
if adapter:
if query._from_obj_alias:
ret = adapter.wrap(query._from_obj_alias)
@@ -2666,7 +2666,7 @@ class _MapperEntity(_QueryEntity):
self._polymorphic_discriminator)
return _instance, self._label_name
-
+
def setup_context(self, query, context):
adapter = self._get_entity_clauses(query, context)
@@ -2688,7 +2688,7 @@ class _MapperEntity(_QueryEntity):
self._with_polymorphic)
else:
poly_properties = self.mapper._polymorphic_properties
-
+
for value in poly_properties:
if query._only_load_props and \
value.key not in query._only_load_props:
@@ -2718,7 +2718,7 @@ class _ColumnEntity(_QueryEntity):
def __init__(self, query, column):
self.expr = column
-
+
if isinstance(column, basestring):
column = sql.literal_column(column)
self._label_name = column.name
@@ -2779,17 +2779,17 @@ class _ColumnEntity(_QueryEntity):
self.entity_zero = list(self.entities)[0]
else:
self.entity_zero = None
-
+
@property
def type(self):
return self.column.type
-
+
def adapt_to_selectable(self, query, sel):
c = _ColumnEntity(query, sel.corresponding_column(self.column))
c._label_name = self._label_name
c.entity_zero = self.entity_zero
c.entities = self.entities
-
+
def setup_entity(self, entity, mapper, adapter, from_obj,
is_aliased_class, with_polymorphic):
self.selectable = from_obj
@@ -2834,7 +2834,7 @@ class QueryContext(object):
multi_row_eager_loaders = False
adapter = None
froms = ()
-
+
def __init__(self, query):
if query._statement is not None:
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index 431377da0..1068f6704 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -22,10 +22,10 @@ class ScopedSession(object):
Session = scoped_session(sessionmaker())
... use Session normally.
-
+
The internal registry is accessible as well,
and by default is an instance of :class:`.ThreadLocalRegistry`.
-
+
"""
@@ -54,14 +54,14 @@ class ScopedSession(object):
def remove(self):
"""Dispose of the current contextual session."""
-
+
if self.registry.has():
self.registry().close()
self.registry.clear()
def configure(self, **kwargs):
"""reconfigure the sessionmaker used by this ScopedSession."""
-
+
if self.registry.has():
warn('At least one scoped session is already present. '
' configure() can not affect sessions that have '
@@ -74,7 +74,7 @@ class ScopedSession(object):
class when called.
e.g.::
-
+
Session = scoped_session(sessionmaker())
class MyClass(object):
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 942b4d684..47420e207 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -138,7 +138,7 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
:param extension: An optional
:class:`~.SessionExtension` instance, or a list
of such instances, which will receive pre- and post- commit and flush
- events, as well as a post-rollback event. **Deprecated.**
+ events, as well as a post-rollback event. **Deprecated.**
Please see :class:`.SessionEvents`.
:param query_cls: Class which should be used to create new Query objects,
@@ -190,8 +190,8 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
Session.configure(bind=create_engine('sqlite://'))
"""
kwargs.update(new_kwargs)
-
-
+
+
return type("Session", (Sess, class_), {})
@@ -211,9 +211,9 @@ class SessionTransaction(object):
single: thread safety; SessionTransaction
"""
-
+
_rollback_exception = None
-
+
def __init__(self, session, parent=None, nested=False):
self.session = session
self._connections = {}
@@ -297,7 +297,7 @@ class SessionTransaction(object):
for s in set(self._new).union(self.session._new):
self.session._expunge_state(s)
-
+
for s in set(self._deleted).union(self.session._deleted):
if s.deleted:
# assert s in self._deleted
@@ -465,7 +465,7 @@ class Session(object):
"""Manages persistence operations for ORM-mapped objects.
The Session's usage paradigm is described at :ref:`session_toplevel`.
-
+
"""
public_methods = (
@@ -475,8 +475,8 @@ class Session(object):
'is_modified',
'merge', 'query', 'refresh', 'rollback',
'scalar')
-
-
+
+
def __init__(self, bind=None, autoflush=True, expire_on_commit=True,
_enable_transaction_accounting=True,
autocommit=False, twophase=False,
@@ -489,7 +489,7 @@ class Session(object):
typical point of entry.
"""
-
+
if weak_identity_map:
self._identity_cls = identity.WeakInstanceDict
else:
@@ -509,11 +509,11 @@ class Session(object):
self._enable_transaction_accounting = _enable_transaction_accounting
self.twophase = twophase
self._query_cls = query_cls
-
+
if extension:
for ext in util.to_list(extension):
SessionExtension._adapt_listener(self, ext)
-
+
if binds is not None:
for mapperortable, bind in binds.iteritems():
if isinstance(mapperortable, (type, Mapper)):
@@ -528,7 +528,7 @@ class Session(object):
dispatch = event.dispatcher(SessionEvents)
connection_callable = None
-
+
def begin(self, subtransactions=False, nested=False):
"""Begin a transaction on this Session.
@@ -537,9 +537,9 @@ class Session(object):
``subtransactions=True`` or ``nested=True`` is specified.
The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin`
- can create a subtransaction if a transaction is already in progress.
+ can create a subtransaction if a transaction is already in progress.
For documentation on subtransactions, please see :ref:`session_subtransactions`.
-
+
The ``nested`` flag begins a SAVEPOINT transaction and is equivalent
to calling :meth:`~.Session.begin_nested`. For documentation on SAVEPOINT
transactions, please see :ref:`session_begin_nested`.
@@ -588,12 +588,12 @@ class Session(object):
def commit(self):
"""Flush pending changes and commit the current transaction.
-
+
If no transaction is in progress, this method raises an
InvalidRequestError.
-
+
By default, the :class:`.Session` also expires all database
- loaded state on all ORM-managed attributes after transaction commit.
+ loaded state on all ORM-managed attributes after transaction commit.
This so that subsequent operations load the most recent
data from the database. This behavior can be disabled using
the ``expire_on_commit=False`` option to :func:`.sessionmaker` or
@@ -692,7 +692,7 @@ class Session(object):
will be created for the life of the result (i.e., a connection is
checked out from the connection pool, which is returned when the
result object is closed).
-
+
If the :class:`Session` is not bound to an
:class:`~sqlalchemy.engine.base.Engine` or
:class:`~sqlalchemy.engine.base.Connection`, the given clause will be
@@ -702,7 +702,7 @@ class Session(object):
(since the :class:`Session` keys multiple bind sources to a series of
:func:`mapper` objects). See :meth:`get_bind` for further details on
bind resolution.
-
+
:param clause:
A ClauseElement (i.e. select(), text(), etc.) or
string SQL statement to be executed
@@ -716,7 +716,7 @@ class Session(object):
:param \**kw:
Additional keyword arguments are sent to :meth:`get_bind()`
which locates a connectable to use for the execution.
-
+
"""
clause = expression._literal_as_text(clause)
@@ -727,7 +727,7 @@ class Session(object):
def scalar(self, clause, params=None, mapper=None, **kw):
"""Like execute() but return a scalar result."""
-
+
return self.execute(clause, params=params, mapper=mapper, **kw).scalar()
def close(self):
@@ -826,7 +826,7 @@ class Session(object):
"a binding.")
c_mapper = mapper is not None and _class_to_mapper(mapper) or None
-
+
# manually bound?
if self.__binds:
if c_mapper:
@@ -853,7 +853,7 @@ class Session(object):
context.append('mapper %s' % c_mapper)
if clause is not None:
context.append('SQL expression')
-
+
raise sa_exc.UnboundExecutionError(
"Could not locate a bind configured on %s or this Session" % (
', '.join(context)))
@@ -890,14 +890,14 @@ class Session(object):
:meth:`~Session.refresh` usually only makes sense if non-ORM SQL
statement were emitted in the ongoing transaction, or if autocommit
mode is turned on.
-
+
:param attribute_names: optional. An iterable collection of
string attribute names indicating a subset of attributes to
be refreshed.
-
+
:param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query`
as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`.
-
+
"""
try:
state = attributes.instance_state(instance)
@@ -916,7 +916,7 @@ class Session(object):
def expire_all(self):
"""Expires all persistent instances within this Session.
-
+
When any attributes on a persitent instance is next accessed,
a query will be issued using the
:class:`.Session` object's current transactional context in order to
@@ -927,7 +927,7 @@ class Session(object):
To expire individual objects and individual attributes
on those objects, use :meth:`Session.expire`.
-
+
The :class:`Session` object's default behavior is to
expire all state whenever the :meth:`Session.rollback`
or :meth:`Session.commit` methods are called, so that new
@@ -949,10 +949,10 @@ class Session(object):
a highly isolated transaction will return the same values as were
previously read in that same transaction, regardless of changes
in database state outside of that transaction.
-
+
To expire all objects in the :class:`.Session` simultaneously,
use :meth:`Session.expire_all`.
-
+
The :class:`Session` object's default behavior is to
expire all state whenever the :meth:`Session.rollback`
or :meth:`Session.commit` methods are called, so that new
@@ -971,7 +971,7 @@ class Session(object):
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
self._expire_state(state, attribute_names)
-
+
def _expire_state(self, state, attribute_names):
self._validate_persistent(state)
if attribute_names:
@@ -984,16 +984,16 @@ class Session(object):
self._conditional_expire(state)
for o, m, st_, dct_ in cascaded:
self._conditional_expire(st_)
-
+
def _conditional_expire(self, state):
"""Expire a state if persistent, else expunge if pending"""
-
+
if state.key:
state.expire(state.dict, self.identity_map._modified)
elif state in self._new:
self._new.pop(state)
state.detach()
-
+
def prune(self):
"""Remove unreferenced instances cached in the identity map.
@@ -1046,7 +1046,7 @@ class Session(object):
if obj is not None:
instance_key = mapper._identity_key_from_state(state)
-
+
if _none_set.issubset(instance_key[1]) and \
not mapper.allow_partial_pks or \
_none_set.issuperset(instance_key[1]):
@@ -1063,10 +1063,10 @@ class Session(object):
# map (see test/orm/test_naturalpks.py ReversePKsTest)
self.identity_map.discard(state)
state.key = instance_key
-
+
self.identity_map.replace(state)
state.commit_all(state.dict, self.identity_map)
-
+
# remove from new last, might be the last strong ref
if state in self._new:
if self._enable_transaction_accounting and self.transaction:
@@ -1132,7 +1132,7 @@ class Session(object):
if state in self._deleted:
return
-
+
# ensure object is attached to allow the
# cascade operation to load deferred attributes
# and collections
@@ -1164,19 +1164,19 @@ class Session(object):
mapped with ``cascade="merge"``.
See :ref:`unitofwork_merging` for a detailed discussion of merging.
-
+
"""
if 'dont_load' in kw:
load = not kw['dont_load']
util.warn_deprecated('dont_load=True has been renamed to '
'load=False.')
-
+
_recursive = {}
-
+
if load:
# flush current contents if we expect to load data
self._autoflush()
-
+
_object_mapper(instance) # verify mapped
autoflush = self.autoflush
try:
@@ -1187,7 +1187,7 @@ class Session(object):
load=load, _recursive=_recursive)
finally:
self.autoflush = autoflush
-
+
def _merge(self, state, state_dict, load=True, _recursive=None):
mapper = _state_mapper(state)
if state in _recursive:
@@ -1195,7 +1195,7 @@ class Session(object):
new_instance = False
key = state.key
-
+
if key is None:
if not load:
raise sa_exc.InvalidRequestError(
@@ -1207,7 +1207,7 @@ class Session(object):
if key in self.identity_map:
merged = self.identity_map[key]
-
+
elif not load:
if state.modified:
raise sa_exc.InvalidRequestError(
@@ -1219,14 +1219,14 @@ class Session(object):
merged_state.key = key
self._update_impl(merged_state)
new_instance = True
-
+
elif not _none_set.issubset(key[1]) or \
(mapper.allow_partial_pks and
not _none_set.issuperset(key[1])):
merged = self.query(mapper.class_).get(key[1])
else:
merged = None
-
+
if merged is None:
merged = mapper.class_manager.new_instance()
merged_state = attributes.instance_state(merged)
@@ -1236,15 +1236,15 @@ class Session(object):
else:
merged_state = attributes.instance_state(merged)
merged_dict = attributes.instance_dict(merged)
-
+
_recursive[state] = merged
# check that we didn't just pull the exact same
- # state out.
+ # state out.
if state is not merged_state:
merged_state.load_path = state.load_path
merged_state.load_options = state.load_options
-
+
for prop in mapper.iterate_properties:
prop.merge(self, state, state_dict,
merged_state, merged_dict,
@@ -1252,7 +1252,7 @@ class Session(object):
if not load:
# remove any history
- merged_state.commit_all(merged_dict, self.identity_map)
+ merged_state.commit_all(merged_dict, self.identity_map)
if new_instance:
merged_state.manager.dispatch.load(merged_state)
@@ -1279,7 +1279,7 @@ class Session(object):
raise sa_exc.InvalidRequestError(
"Object '%s' already has an identity - it can't be registered "
"as pending" % mapperutil.state_str(state))
-
+
self._attach(state)
if state not in self._new:
self._new[state] = state.obj()
@@ -1289,12 +1289,12 @@ class Session(object):
if (self.identity_map.contains_state(state) and
state not in self._deleted):
return
-
+
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
mapperutil.state_str(state))
-
+
if state.deleted:
raise sa_exc.InvalidRequestError(
"Instance '%s' has been deleted. Use the make_transient() "
@@ -1317,11 +1317,11 @@ class Session(object):
if state.key is None:
return
-
+
self._attach(state)
self._deleted[state] = state.obj()
self.identity_map.add(state)
-
+
def _attach(self, state):
if state.key and \
state.key in self.identity_map and \
@@ -1330,13 +1330,13 @@ class Session(object):
"%s; another instance with key %s is already "
"present in this session."
% (mapperutil.state_str(state), state.key))
-
+
if state.session_id and state.session_id is not self.hash_key:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
"(this is '%s')" % (mapperutil.state_str(state),
state.session_id, self.hash_key))
-
+
if state.session_id != self.hash_key:
state.session_id = self.hash_key
if self.dispatch.after_attach:
@@ -1393,16 +1393,16 @@ class Session(object):
"The 'objects' argument to session.flush() is deprecated; "
"Please do not add objects to the session which should not "
"yet be persisted.")
-
+
if self._flushing:
raise sa_exc.InvalidRequestError("Session is already flushing")
-
+
try:
self._flushing = True
self._flush(objects)
finally:
self._flushing = False
-
+
def _flush(self, objects=None):
if (not self.identity_map.check_modified() and
not self._deleted and not self._new):
@@ -1414,13 +1414,13 @@ class Session(object):
return
flush_context = UOWTransaction(self)
-
+
if self.dispatch.before_flush:
self.dispatch.before_flush(self, flush_context, objects)
# re-establish "dirty states" in case the listeners
# added
dirty = self._dirty_states
-
+
deleted = set(self._deleted)
new = set(self._new)
@@ -1448,7 +1448,7 @@ class Session(object):
proc = new.union(dirty).intersection(objset).difference(deleted)
else:
proc = new.union(dirty).difference(deleted)
-
+
for state in proc:
is_orphan = _state_mapper(state)._is_orphan(state) and state.has_identity
flush_context.register_object(state, isdelete=is_orphan)
@@ -1475,7 +1475,7 @@ class Session(object):
except:
transaction.rollback(_capture_exception=True)
raise
-
+
flush_context.finalize_flush_changes()
# useful assertions:
@@ -1485,7 +1485,7 @@ class Session(object):
# assert self.identity_map._modified == \
# self.identity_map._modified.difference(objects)
#self.identity_map._modified.clear()
-
+
self.dispatch.after_flush_postexec(self, flush_context)
def is_modified(self, instance, include_collections=True, passive=False):
@@ -1493,7 +1493,7 @@ class Session(object):
This method retrieves a history instance for each instrumented
attribute on the instance and performs a comparison of the current
- value to its previously committed value.
+ value to its previously committed value.
``include_collections`` indicates if multivalued collections should be
included in the operation. Setting this to False is a way to detect
@@ -1503,9 +1503,9 @@ class Session(object):
The ``passive`` flag indicates if unloaded attributes and collections
should not be loaded in the course of performing this test.
-
+
A few caveats to this method apply:
-
+
* Instances present in the 'dirty' collection may result in a value
of ``False`` when tested with this method. This because while
the object may have received attribute set events, there may be
@@ -1520,7 +1520,7 @@ class Session(object):
based on the assumption that an UPDATE of the scalar value is
usually needed, and in those few cases where it isn't, is less
expensive on average than issuing a defensive SELECT.
-
+
The "old" value is fetched unconditionally only if the attribute
container has the "active_history" flag set to ``True``. This flag
is set typically for primary key attributes and scalar references
@@ -1539,10 +1539,10 @@ class Session(object):
hasattr(attr.impl, 'get_collection')
) or not hasattr(attr.impl, 'get_history'):
continue
-
+
(added, unchanged, deleted) = \
attr.impl.get_history(state, dict_, passive=passive)
-
+
if added or deleted:
return True
return False
@@ -1604,18 +1604,18 @@ _sessions = weakref.WeakValueDictionary()
def make_transient(instance):
"""Make the given instance 'transient'.
-
+
This will remove its association with any
session and additionally will remove its "identity key",
such that it's as though the object were newly constructed,
except retaining its values. It also resets the
"deleted" flag on the state if this object
had been explicitly deleted by its session.
-
+
Attributes which were "expired" or deferred at the
instance level are reverted to undefined, and
will not trigger any loads.
-
+
"""
state = attributes.instance_state(instance)
s = _state_session(state)
@@ -1629,19 +1629,19 @@ def make_transient(instance):
del state.key
if state.deleted:
del state.deleted
-
+
def object_session(instance):
"""Return the ``Session`` to which instance belongs.
-
+
If the instance is not a mapped instance, an error is raised.
"""
-
+
try:
return _state_session(attributes.instance_state(instance))
except exc.NO_STATE:
raise exc.UnmappedInstanceError(instance)
-
+
def _state_session(state):
if state.session_id:
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index 974b3d500..da91a353e 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -37,14 +37,14 @@ class InstanceState(object):
modified = False
expired = False
deleted = False
-
+
def __init__(self, obj, manager):
self.class_ = obj.__class__
self.manager = manager
self.obj = weakref.ref(obj, self._cleanup)
self.callables = {}
self.committed_state = {}
-
+
@util.memoized_property
def parents(self):
return {}
@@ -56,14 +56,14 @@ class InstanceState(object):
@property
def has_identity(self):
return bool(self.key)
-
+
def detach(self):
self.session_id = None
def dispose(self):
self.detach()
del self.obj
-
+
def _cleanup(self, ref):
instance_dict = self._instance_dict()
if instance_dict:
@@ -71,14 +71,14 @@ class InstanceState(object):
instance_dict.remove(self)
except AssertionError:
pass
-
+
self.callables = {}
self.session_id = None
del self.obj
-
+
def obj(self):
return None
-
+
@property
def dict(self):
o = self.obj()
@@ -86,7 +86,7 @@ class InstanceState(object):
return attributes.instance_dict(o)
else:
return {}
-
+
@property
def sort_key(self):
return self.key and self.key[1] or (self.insert_order, )
@@ -96,7 +96,7 @@ class InstanceState(object):
manager = self.manager
manager.dispatch.init(self, args, kwargs)
-
+
#if manager.mutable_attributes:
# assert self.__class__ is MutableAttrInstanceState
@@ -148,7 +148,7 @@ class InstanceState(object):
if self.load_path:
d['load_path'] = interfaces.serialize_path(self.load_path)
return d
-
+
def __setstate__(self, state):
from sqlalchemy.orm import instrumentation
self.obj = weakref.ref(state['instance'], self._cleanup)
@@ -162,17 +162,17 @@ class InstanceState(object):
self.class_)
elif manager.is_mapped and not manager.mapper.configured:
mapperlib.configure_mappers()
-
+
self.committed_state = state.get('committed_state', {})
self.pending = state.get('pending', {})
self.parents = state.get('parents', {})
self.modified = state.get('modified', False)
self.expired = state.get('expired', False)
self.callables = state.get('callables', {})
-
+
if self.modified:
self._strong_obj = state['instance']
-
+
self.__dict__.update([
(k, state[k]) for k in (
'key', 'load_options', 'mutable_dict'
@@ -181,13 +181,13 @@ class InstanceState(object):
if 'load_path' in state:
self.load_path = interfaces.deserialize_path(state['load_path'])
-
+
# TODO: need an event here, link to composite, mutable
-
+
def initialize(self, key):
"""Set this attribute to an empty value or collection,
based on the AttributeImpl in use."""
-
+
self.manager.get_impl(key).initialize(self, self.dict)
def reset(self, dict_, key):
@@ -212,10 +212,10 @@ class InstanceState(object):
def set_callable(self, dict_, key, callable_):
"""Remove the given attribute and set the given callable
as a loader."""
-
+
dict_.pop(key, None)
self.callables[key] = callable_
-
+
def expire(self, dict_, modified_set):
self.expired = True
if self.modified:
@@ -230,26 +230,26 @@ class InstanceState(object):
mutable_dict.clear()
if pending:
pending.clear()
-
+
for key in self.manager:
impl = self.manager[key].impl
if impl.accepts_scalar_loader and \
(impl.expire_missing or key in dict_):
self.callables[key] = self
dict_.pop(key, None)
-
+
self.manager.dispatch.expire(self, None)
def expire_attributes(self, dict_, attribute_names):
pending = self.__dict__.get('pending', None)
mutable_dict = self.mutable_dict
-
+
for key in attribute_names:
impl = self.manager[key].impl
if impl.accepts_scalar_loader:
self.callables[key] = self
dict_.pop(key, None)
-
+
self.committed_state.pop(key, None)
if mutable_dict:
mutable_dict.pop(key, None)
@@ -267,10 +267,10 @@ class InstanceState(object):
if passive is PASSIVE_NO_FETCH:
return PASSIVE_NO_RESULT
-
+
toload = self.expired_attributes.\
intersection(self.unmodified)
-
+
self.manager.deferred_scalar_loader(self, toload)
# if the loader failed, or this
@@ -279,13 +279,13 @@ class InstanceState(object):
# dict. ensure they are removed.
for k in toload.intersection(self.callables):
del self.callables[k]
-
+
return ATTR_WAS_SET
@property
def unmodified(self):
"""Return the set of keys which have no uncommitted changes"""
-
+
return set(self.manager).difference(self.committed_state)
def unmodified_intersection(self, keys):
@@ -311,11 +311,11 @@ class InstanceState(object):
def expired_attributes(self):
"""Return the set of keys which are 'expired' to be loaded by
the manager's deferred scalar loader, assuming no pending
- changes.
-
+ changes.
+
see also the ``unmodified`` collection which is intersected
against this set when a refresh operation occurs.
-
+
"""
return set([k for k, v in self.callables.items() if v is self])
@@ -324,24 +324,24 @@ class InstanceState(object):
def _is_really_none(self):
return self.obj()
-
+
def modified_event(self, dict_, attr, previous, collection=False):
if attr.key not in self.committed_state:
if collection:
if previous is NEVER_SET:
if attr.key in dict_:
previous = dict_[attr.key]
-
+
if previous not in (None, NO_VALUE, NEVER_SET):
previous = attr.copy(previous)
self.committed_state[attr.key] = previous
-
+
# the "or not self.modified" is defensive at
# this point. The assertion below is expected
# to be True:
# assert self._strong_obj is None or self.modified
-
+
if self._strong_obj is None or not self.modified:
instance_dict = self._instance_dict()
if instance_dict:
@@ -350,7 +350,7 @@ class InstanceState(object):
self._strong_obj = self.obj()
self.modified = True
-
+
def commit(self, dict_, keys):
"""Commit attributes.
@@ -371,14 +371,14 @@ class InstanceState(object):
else:
for key in keys:
self.committed_state.pop(key, None)
-
+
self.expired = False
-
+
for key in set(self.callables).\
intersection(keys).\
intersection(dict_):
del self.callables[key]
-
+
def commit_all(self, dict_, instance_dict=None):
"""commit all attributes unconditionally.
@@ -402,30 +402,30 @@ class InstanceState(object):
for key in list(callables):
if key in dict_ and callables[key] is self:
del callables[key]
-
+
for key in self.manager.mutable_attributes:
if key in dict_:
self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
-
+
if instance_dict and self.modified:
instance_dict._modified.discard(self)
-
+
self.modified = self.expired = False
self._strong_obj = None
class MutableAttrInstanceState(InstanceState):
"""InstanceState implementation for objects that reference 'mutable'
attributes.
-
+
Has a more involved "cleanup" handler that checks mutable attributes
for changes upon dereference, resurrecting if needed.
-
+
"""
-
+
@util.memoized_property
def mutable_dict(self):
return {}
-
+
def _get_modified(self, dict_=None):
if self.__dict__.get('modified', False):
return True
@@ -437,44 +437,44 @@ class MutableAttrInstanceState(InstanceState):
return True
else:
return False
-
+
def _set_modified(self, value):
self.__dict__['modified'] = value
-
+
modified = property(_get_modified, _set_modified)
-
+
@property
def unmodified(self):
"""a set of keys which have no uncommitted changes"""
dict_ = self.dict
-
+
return set([
key for key in self.manager
if (key not in self.committed_state or
(key in self.manager.mutable_attributes and
not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
+
def unmodified_intersection(self, keys):
"""Return self.unmodified.intersection(keys)."""
dict_ = self.dict
-
+
return set([
key for key in keys
if (key not in self.committed_state or
(key in self.manager.mutable_attributes and
not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
-
+
+
def _is_really_none(self):
"""do a check modified/resurrect.
-
+
This would be called in the extremely rare
race condition that the weakref returned None but
the cleanup handler had not yet established the
__resurrect callable as its replacement.
-
+
"""
if self.modified:
self.obj = self.__resurrect
@@ -485,19 +485,19 @@ class MutableAttrInstanceState(InstanceState):
def reset(self, dict_, key):
self.mutable_dict.pop(key, None)
InstanceState.reset(self, dict_, key)
-
+
def _cleanup(self, ref):
"""weakref callback.
-
+
This method may be called by an asynchronous
gc.
-
+
If the state shows pending changes, the weakref
is replaced by the __resurrect callable which will
re-establish an object reference on next access,
else removes this InstanceState from the owning
identity map, if any.
-
+
"""
if self._get_modified(self.mutable_dict):
self.obj = self.__resurrect
@@ -509,13 +509,13 @@ class MutableAttrInstanceState(InstanceState):
except AssertionError:
pass
self.dispose()
-
+
def __resurrect(self):
"""A substitute for the obj() weakref function which resurrects."""
-
+
# store strong ref'ed version of the object; will revert
# to weakref when changes are persisted
-
+
obj = self.manager.new_instance(state=self)
self.obj = weakref.ref(obj, self._cleanup)
self._strong_obj = obj
@@ -523,7 +523,7 @@ class MutableAttrInstanceState(InstanceState):
# re-establishes identity attributes from the key
self.manager.dispatch.resurrect(self)
-
+
return obj
class PendingCollection(object):
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index d62bf3771..7d3e563f4 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -32,15 +32,15 @@ def _register_attribute(strategy, mapper, useobject,
proxy_property=None,
active_history=False,
impl_class=None,
- **kw
+ **kw
):
prop = strategy.parent_property
attribute_ext = list(util.to_list(prop.extension, default=[]))
-
+
listen_hooks = []
-
+
if useobject and prop.single_parent:
listen_hooks.append(single_parent_validator)
@@ -50,10 +50,10 @@ def _register_attribute(strategy, mapper, useobject,
prop.key,
prop.parent._validators[prop.key])
)
-
+
if useobject:
listen_hooks.append(unitofwork.track_cascade_events)
-
+
# need to assemble backref listeners
# after the singleparentvalidator, mapper validator
backref = kw.pop('backref', None)
@@ -63,10 +63,10 @@ def _register_attribute(strategy, mapper, useobject,
backref,
uselist)
)
-
+
for m in mapper.self_and_descendants:
if prop is m._props.get(prop.key):
-
+
desc = attributes.register_attribute_impl(
m.class_,
prop.key,
@@ -85,16 +85,16 @@ def _register_attribute(strategy, mapper, useobject,
doc=prop.doc,
**kw
)
-
+
for hook in listen_hooks:
hook(desc, prop)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
-
+
The polymorphic_on argument of mapper() often results in this,
if the argument is against the with_polymorphic selectable.
-
+
"""
def init(self):
self.columns = self.parent_property.columns
@@ -111,24 +111,24 @@ class UninstrumentedColumnLoader(LoaderStrategy):
class ColumnLoader(LoaderStrategy):
"""Strategize the loading of a plain column-based MapperProperty."""
-
+
def init(self):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
-
+
def setup_query(self, context, entity, path, reduced_path, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
-
+
def init_class_attribute(self, mapper):
self.is_class_level = True
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.parent_property.active_history or \
- self.columns[0].primary_key
+ self.columns[0].primary_key
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -136,7 +136,7 @@ class ColumnLoader(LoaderStrategy):
mutable_scalars=self.columns[0].type.is_mutable(),
active_history = active_history
)
-
+
def create_row_processor(self, selectcontext, path, reduced_path, mapper, row, adapter):
key = self.key
# look through list of columns represented here
@@ -189,7 +189,7 @@ class DeferredColumnLoader(LoaderStrategy):
def init_class_attribute(self, mapper):
self.is_class_level = True
-
+
_register_attribute(self, mapper, useobject=False,
compare_function=self.columns[0].type.compare_values,
copy_function=self.columns[0].type.copy_value,
@@ -207,17 +207,17 @@ class DeferredColumnLoader(LoaderStrategy):
self.parent_property._get_strategy(ColumnLoader).\
setup_query(context, entity,
path, reduced_path, adapter, **kwargs)
-
+
def _load_for_state(self, state, passive):
if not state.key:
return attributes.ATTR_EMPTY
if passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
-
+
prop = self.parent_property
localparent = state.manager.mapper
-
+
if self.group:
toload = [
p.key for p in
@@ -244,7 +244,7 @@ class DeferredColumnLoader(LoaderStrategy):
query._load_on_ident(state.key,
only_load_props=group, refresh_state=state)
return attributes.ATTR_WAS_SET
-
+
log.class_logger(DeferredColumnLoader)
class LoadDeferredColumns(object):
@@ -253,7 +253,7 @@ class LoadDeferredColumns(object):
def __init__(self, state, key):
self.state = state
self.key = key
-
+
def __call__(self, passive=False):
state, key = self.state, self.key
@@ -264,7 +264,7 @@ class LoadDeferredColumns(object):
class DeferredOption(StrategizedOption):
propagate_to_loaders = True
-
+
def __init__(self, key, defer=False):
super(DeferredOption, self).__init__(key)
self.defer = defer
@@ -280,7 +280,7 @@ class UndeferGroupOption(MapperOption):
def __init__(self, group):
self.group = group
-
+
def process_query(self, query):
query._attributes[('undefer', self.group)] = True
@@ -311,7 +311,7 @@ class NoLoader(AbstractRelationshipLoader):
return new_execute, None, None
log.class_logger(NoLoader)
-
+
class LazyLoader(AbstractRelationshipLoader):
"""Strategize a relationship() that loads when first accessed."""
@@ -320,7 +320,7 @@ class LazyLoader(AbstractRelationshipLoader):
self.__lazywhere, \
self.__bind_to_col, \
self._equated_columns = self._create_lazy_clause(self.parent_property)
-
+
self.logger.info("%s lazy loading clause %s", self, self.__lazywhere)
# determine if our "lazywhere" clause is the same as the mapper's
@@ -332,19 +332,19 @@ class LazyLoader(AbstractRelationshipLoader):
use_proxies=True,
equivalents=self.mapper._equivalent_columns
)
-
+
if self.use_get:
for col in self._equated_columns.keys():
if col in self.mapper._equivalent_columns:
for c in self.mapper._equivalent_columns[col]:
self._equated_columns[c] = self._equated_columns[col]
-
+
self.logger.info("%s will use query.get() to "
"optimize instance loads" % self)
def init_class_attribute(self, mapper):
self.is_class_level = True
-
+
# MANYTOONE currently only needs the
# "old" value for delete-orphan
# cascades. the required _SingleParentValidator
@@ -372,7 +372,7 @@ class LazyLoader(AbstractRelationshipLoader):
return self._lazy_none_clause(
reverse_direction,
adapt_source=adapt_source)
-
+
if not reverse_direction:
criterion, bind_to_col, rev = \
self.__lazywhere, \
@@ -391,10 +391,10 @@ class LazyLoader(AbstractRelationshipLoader):
o = state.obj() # strong ref
dict_ = attributes.instance_dict(o)
-
+
# use the "committed state" only if we're in a flush
# for this state.
-
+
sess = sessionlib._state_session(state)
if sess is not None and sess._flushing:
def visit_bindparam(bindparam):
@@ -407,8 +407,8 @@ class LazyLoader(AbstractRelationshipLoader):
if bindparam.key in bind_to_col:
bindparam.callable = lambda: mapper._get_state_attr_by_column(
state, dict_, bind_to_col[bindparam.key])
-
-
+
+
if self.parent_property.secondary is not None and alias_secondary:
criterion = sql_util.ClauseAdapter(
self.parent_property.secondary.alias()).\
@@ -420,7 +420,7 @@ class LazyLoader(AbstractRelationshipLoader):
if adapt_source:
criterion = adapt_source(criterion)
return criterion
-
+
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
if not reverse_direction:
criterion, bind_to_col, rev = \
@@ -438,18 +438,18 @@ class LazyLoader(AbstractRelationshipLoader):
if adapt_source:
criterion = adapt_source(criterion)
return criterion
-
+
def _load_for_state(self, state, passive):
if not state.key and \
(not self.parent_property.load_on_pending or not state.session_id):
return attributes.ATTR_EMPTY
-
+
instance_mapper = state.manager.mapper
prop = self.parent_property
key = self.key
prop_mapper = self.mapper
pending = not state.key
-
+
if (
passive is attributes.PASSIVE_NO_FETCH and
not self.use_get
@@ -458,7 +458,7 @@ class LazyLoader(AbstractRelationshipLoader):
pending
):
return attributes.PASSIVE_NO_RESULT
-
+
session = sessionlib._state_session(state)
if not session:
raise orm_exc.DetachedInstanceError(
@@ -474,7 +474,7 @@ class LazyLoader(AbstractRelationshipLoader):
get_attr = instance_mapper._get_committed_state_attr_by_column
else:
get_attr = instance_mapper._get_state_attr_by_column
-
+
dict_ = state.dict
ident = [
get_attr(
@@ -486,23 +486,23 @@ class LazyLoader(AbstractRelationshipLoader):
]
if attributes.PASSIVE_NO_RESULT in ident:
return attributes.PASSIVE_NO_RESULT
-
+
if _none_set.issuperset(ident):
return None
-
+
ident_key = prop_mapper.identity_key_from_primary_key(ident)
instance = Query._get_from_identity(session, ident_key, passive)
if instance is not None:
return instance
elif passive is attributes.PASSIVE_NO_FETCH:
return attributes.PASSIVE_NO_RESULT
-
+
q = session.query(prop_mapper)._adapt_all_clauses()
-
+
# don't autoflush on pending
if pending:
q = q.autoflush(False)
-
+
if state.load_path:
q = q._with_current_path(state.load_path + (key,))
@@ -524,12 +524,12 @@ class LazyLoader(AbstractRelationshipLoader):
q = q.options(EagerLazyOption((rev.key,), lazy='select'))
lazy_clause = self.lazy_clause(state)
-
+
if pending:
bind_values = sql_util.bind_values(lazy_clause)
if None in bind_values:
return None
-
+
q = q.filter(lazy_clause)
result = q.all()
@@ -543,7 +543,7 @@ class LazyLoader(AbstractRelationshipLoader):
"Multiple rows returned with "
"uselist=False for lazily-loaded attribute '%s' "
% prop)
-
+
return result[0]
else:
return None
@@ -568,14 +568,14 @@ class LazyLoader(AbstractRelationshipLoader):
# this class - reset its
# per-instance attribute state, so that the class-level
# lazy loader is
- # executed when next referenced on this instance.
+ # executed when next referenced on this instance.
# this is needed in
# populate_existing() types of scenarios to reset
# any existing state.
state.reset(dict_, key)
return new_execute, None, None
-
+
@classmethod
def _create_lazy_clause(cls, prop, reverse_direction=False):
binds = util.column_dict()
@@ -592,7 +592,7 @@ class LazyLoader(AbstractRelationshipLoader):
_list = lookup.setdefault(l, [])
_list.append((l, r))
equated_columns[r] = l
-
+
def col_to_bind(col):
if col in lookup:
for tobind, equated in lookup[col]:
@@ -602,48 +602,48 @@ class LazyLoader(AbstractRelationshipLoader):
binds[col] = sql.bindparam(None, None, type_=col.type)
return binds[col]
return None
-
+
lazywhere = prop.primaryjoin
if prop.secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
lazywhere, {}, col_to_bind)
-
+
if prop.secondaryjoin is not None:
secondaryjoin = prop.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.replacement_traverse(
secondaryjoin, {}, col_to_bind)
lazywhere = sql.and_(lazywhere, secondaryjoin)
-
+
bind_to_col = dict((binds[col].key, col) for col in binds)
-
+
return lazywhere, bind_to_col, equated_columns
-
+
log.class_logger(LazyLoader)
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
-
+
def __init__(self, state, key):
self.state = state
self.key = key
-
+
def __call__(self, passive=False):
state, key = self.state, self.key
instance_mapper = state.manager.mapper
prop = instance_mapper._props[key]
strategy = prop._strategies[LazyLoader]
-
+
return strategy._load_for_state(state, passive)
-
+
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
-
+
def setup_query(self, context, entity,
path, reduced_path, adapter, column_collection=None,
parentmapper=None, **kwargs):
@@ -652,29 +652,29 @@ class ImmediateLoader(AbstractRelationshipLoader):
def create_row_processor(self, context, path, reduced_path, mapper, row, adapter):
def execute(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
-
+
return None, None, execute
-
+
class SubqueryLoader(AbstractRelationshipLoader):
def init(self):
super(SubqueryLoader, self).init()
self.join_depth = self.parent_property.join_depth
-
+
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).\
init_class_attribute(mapper)
-
+
def setup_query(self, context, entity,
path, reduced_path, adapter, column_collection=None,
parentmapper=None, **kwargs):
if not context.query._enable_eagerloads:
return
-
+
path = path + (self.key, )
reduced_path = reduced_path + (self.key, )
-
+
# build up a path indicating the path from the leftmost
# entity to the thing we're subquery loading.
subq_path = context.attributes.get(('subquery_path', None), ())
@@ -689,13 +689,13 @@ class SubqueryLoader(AbstractRelationshipLoader):
else:
if self.mapper.base_mapper in interfaces._reduce_path(subq_path):
return
-
+
orig_query = context.attributes.get(
("orig_query", SubqueryLoader),
context.query)
subq_mapper = mapperutil._class_to_mapper(subq_path[0])
-
+
# determine attributes of the leftmost mapper
if self.parent.isa(subq_mapper) and self.key==subq_path[1]:
leftmost_mapper, leftmost_prop = \
@@ -705,7 +705,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
subq_mapper, \
subq_mapper._props[subq_path[1]]
leftmost_cols, remote_cols = self._local_remote_columns(leftmost_prop)
-
+
leftmost_attr = [
leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
@@ -728,11 +728,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
# which we'll join onto.
embed_q = q.with_labels().subquery()
left_alias = mapperutil.AliasedClass(leftmost_mapper, embed_q)
-
+
# q becomes a new query. basically doing a longhand
# "from_self()". (from_self() itself not quite industrial
# strength enough for all contingencies...but very close)
-
+
q = q.session.query(self.mapper)
q._attributes = {
("orig_query", SubqueryLoader): orig_query,
@@ -760,25 +760,25 @@ class SubqueryLoader(AbstractRelationshipLoader):
]
q = q.order_by(*local_attr)
q = q.add_columns(*local_attr)
-
+
for i, (mapper, key) in enumerate(to_join):
-
+
# we need to use query.join() as opposed to
# orm.join() here because of the
# rich behavior it brings when dealing with
# "with_polymorphic" mappers. "aliased"
# and "from_joinpoint" take care of most of
# the chaining and aliasing for us.
-
+
first = i == 0
middle = i < len(to_join) - 1
second_to_last = i == len(to_join) - 2
-
+
if first:
attr = getattr(left_alias, key)
else:
attr = key
-
+
if second_to_last:
q = q.join(parent_alias, attr, from_joinpoint=True)
else:
@@ -804,11 +804,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
)
)
q = q.order_by(*eager_order_by)
-
+
# add new query to attributes to be picked up
# by create_row_processor
context.attributes[('subquery', reduced_path)] = q
-
+
def _local_remote_columns(self, prop):
if prop.secondary is None:
return zip(*prop.local_remote_pairs)
@@ -819,7 +819,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
p[0] for p in prop.
secondary_synchronize_pairs
]
-
+
def create_row_processor(self, context, path, reduced_path,
mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
@@ -827,30 +827,30 @@ class SubqueryLoader(AbstractRelationshipLoader):
"'%s' does not support object "
"population - eager loading cannot be applied." %
self)
-
+
reduced_path = reduced_path + (self.key,)
-
+
if ('subquery', reduced_path) not in context.attributes:
return None, None, None
-
+
local_cols, remote_cols = self._local_remote_columns(self.parent_property)
remote_attr = [
self.mapper._columntoproperty[c].key
for c in remote_cols]
-
+
q = context.attributes[('subquery', reduced_path)]
-
+
collections = dict(
(k, [v[0] for v in v])
for k, v in itertools.groupby(
q,
lambda x:x[1:]
))
-
+
if adapter:
local_cols = [adapter.columns[c] for c in local_cols]
-
+
if self.uselist:
def execute(state, dict_, row):
collection = collections.get(
@@ -870,11 +870,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
-
+
scalar = collection[0]
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
-
+
return execute, None, None
log.class_logger(SubqueryLoader)
@@ -882,7 +882,7 @@ log.class_logger(SubqueryLoader)
class EagerLoader(AbstractRelationshipLoader):
"""Strategize a relationship() that loads within the process
of the parent object being selected."""
-
+
def init(self):
super(EagerLoader, self).init()
self.join_depth = self.parent_property.join_depth
@@ -890,27 +890,27 @@ class EagerLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy(LazyLoader).init_class_attribute(mapper)
-
+
def setup_query(self, context, entity, path, reduced_path, adapter, \
column_collection=None, parentmapper=None,
allow_innerjoin=True,
**kwargs):
"""Add a left outer join to the statement thats being constructed."""
-
+
if not context.query._enable_eagerloads:
return
-
+
path = path + (self.key,)
reduced_path = reduced_path + (self.key,)
-
+
# check for user-defined eager alias
if ("user_defined_eager_row_processor", reduced_path) in\
context.attributes:
clauses = context.attributes[
("user_defined_eager_row_processor",
reduced_path)]
-
+
adapter = entity._get_entity_clauses(context.query, context)
if adapter and clauses:
context.attributes[
@@ -920,9 +920,9 @@ class EagerLoader(AbstractRelationshipLoader):
context.attributes[
("user_defined_eager_row_processor",
reduced_path)] = clauses = adapter
-
+
add_to_collection = context.primary_columns
-
+
else:
# check for join_depth or basic recursion,
# if the current path was not explicitly stated as
@@ -950,7 +950,7 @@ class EagerLoader(AbstractRelationshipLoader):
# if this is an outer join, all eager joins from
# here must also be outer joins
allow_innerjoin = False
-
+
context.create_eager_joins.append(
(self._create_eager_join, context,
entity, path, adapter,
@@ -961,10 +961,10 @@ class EagerLoader(AbstractRelationshipLoader):
context.attributes[
("eager_row_processor", reduced_path)
] = clauses
-
+
path += (self.mapper,)
reduced_path += (self.mapper.base_mapper,)
-
+
for value in self.mapper._polymorphic_properties:
value.setup(
context,
@@ -975,22 +975,22 @@ class EagerLoader(AbstractRelationshipLoader):
parentmapper=self.mapper,
column_collection=add_to_collection,
allow_innerjoin=allow_innerjoin)
-
+
def _create_eager_join(self, context, entity,
path, adapter, parentmapper,
clauses, innerjoin):
-
+
if parentmapper is None:
localparent = entity.mapper
else:
localparent = parentmapper
-
+
# whether or not the Query will wrap the selectable in a subquery,
# and then attach eager load joins to that (i.e., in the case of
# LIMIT/OFFSET etc.)
should_nest_selectable = context.multi_row_eager_loaders and \
context.query._should_nest_selectable
-
+
entity_key = None
if entity not in context.eager_joins and \
not should_nest_selectable and \
@@ -1024,7 +1024,7 @@ class EagerLoader(AbstractRelationshipLoader):
),
self.key, self.parent_property
)
-
+
if onclause is self.parent_property:
# TODO: this is a temporary hack to
# account for polymorphic eager loads where
@@ -1051,7 +1051,7 @@ class EagerLoader(AbstractRelationshipLoader):
# ensure all the parent cols in the primaryjoin are actually
# in the
# columns clause (i.e. are not deferred), so that aliasing applied
- # by the Query propagates those columns outward.
+ # by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
for col in sql_util.find_columns(
@@ -1060,7 +1060,7 @@ class EagerLoader(AbstractRelationshipLoader):
if adapter:
col = adapter.columns[col]
context.primary_columns.append(col)
-
+
if self.parent_property.order_by:
context.eager_order_by += \
eagerjoin._target_adapter.\
@@ -1070,7 +1070,7 @@ class EagerLoader(AbstractRelationshipLoader):
)
)
-
+
def _create_eager_adapter(self, context, row, adapter, path, reduced_path):
if ("user_defined_eager_row_processor", reduced_path) in \
context.attributes:
@@ -1107,13 +1107,13 @@ class EagerLoader(AbstractRelationshipLoader):
our_path = path + (self.key,)
our_reduced_path = reduced_path + (self.key,)
-
+
eager_adapter = self._create_eager_adapter(
context,
row,
adapter, our_path,
our_reduced_path)
-
+
if eager_adapter is not False:
key = self.key
_instance = self.mapper._instance_processor(
@@ -1121,7 +1121,7 @@ class EagerLoader(AbstractRelationshipLoader):
our_path + (self.mapper,),
our_reduced_path + (self.mapper.base_mapper,),
eager_adapter)
-
+
if not self.uselist:
def new_execute(state, dict_, row):
# set a scalar object instance directly on the parent
@@ -1184,11 +1184,11 @@ class EagerLazyOption(StrategizedOption):
self.chained = chained
self.propagate_to_loaders = propagate_to_loaders
self.strategy_cls = factory(lazy)
-
+
@property
def is_eager(self):
return self.lazy in (False, 'joined', 'subquery')
-
+
@property
def is_chained(self):
return self.is_eager and self.chained
@@ -1209,16 +1209,16 @@ def factory(identifier):
return ImmediateLoader
else:
return LazyLoader
-
-
-
+
+
+
class EagerJoinOption(PropertyOption):
-
+
def __init__(self, key, innerjoin, chained=False):
super(EagerJoinOption, self).__init__(key)
self.innerjoin = innerjoin
self.chained = chained
-
+
def is_chained(self):
return self.chained
@@ -1228,9 +1228,9 @@ class EagerJoinOption(PropertyOption):
query._attributes[("eager_join_type", path)] = self.innerjoin
else:
query._attributes[("eager_join_type", paths[-1])] = self.innerjoin
-
+
class LoadEagerFromAliasOption(PropertyOption):
-
+
def __init__(self, key, alias=None):
super(LoadEagerFromAliasOption, self).__init__(key)
if alias is not None:
@@ -1270,13 +1270,13 @@ def single_parent_validator(desc, prop):
(mapperutil.instance_str(value), state.class_, prop)
)
return value
-
+
def append(state, value, initiator):
return _do_check(state, value, None, initiator)
def set_(state, value, oldvalue, initiator):
return _do_check(state, value, oldvalue, initiator)
-
+
event.listen(desc, 'append', append, raw=True, retval=True, active_history=True)
event.listen(desc, 'set', set_, raw=True, retval=True, active_history=True)
-
+
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index 30d56d168..bc250b226 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -14,7 +14,7 @@ def populate(source, source_mapper, dest, dest_mapper,
synchronize_pairs, uowcommit, flag_cascaded_pks):
source_dict = source.dict
dest_dict = dest.dict
-
+
for l, r in synchronize_pairs:
try:
# inline of source_mapper._get_state_attr_by_column
@@ -29,7 +29,7 @@ def populate(source, source_mapper, dest, dest_mapper,
dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
except exc.UnmappedColumnError:
_raise_col_to_prop(True, source_mapper, l, dest_mapper, r)
-
+
# techically the "r.primary_key" check isn't
# needed here, but we check for this condition to limit
# how often this logic is invoked for memory/performance
@@ -75,7 +75,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs):
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
"""return true if the source object has changes from an old to a
new value on the given synchronize pairs
-
+
"""
for l, r in synchronize_pairs:
try:
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index e6b1c0483..f1c5fcfc6 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -21,10 +21,10 @@ session = util.importlater("sqlalchemy.orm", "session")
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
cascade-on-set/append.
-
+
"""
key = prop.key
-
+
def append(state, item, initiator):
# process "save_update" cascade rules for when
# an instance is appended to the list of another instance
@@ -38,7 +38,7 @@ def track_cascade_events(descriptor, prop):
not sess._contains_state(item_state):
sess._save_or_update_state(item_state)
return item
-
+
def remove(state, item, initiator):
sess = session._state_session(state)
if sess:
@@ -65,15 +65,15 @@ def track_cascade_events(descriptor, prop):
(prop.cascade_backrefs or key == initiator.key) and \
not sess._contains_state(newvalue_state):
sess._save_or_update_state(newvalue_state)
-
+
if oldvalue is not None and prop.cascade.delete_orphan:
oldvalue_state = attributes.instance_state(oldvalue)
-
+
if oldvalue_state in sess._new and \
prop.mapper._is_orphan(oldvalue_state):
sess.expunge(oldvalue)
return newvalue
-
+
event.listen(descriptor, 'append', append, raw=True, retval=True)
event.listen(descriptor, 'remove', remove, raw=True, retval=True)
event.listen(descriptor, 'set', set_, raw=True, retval=True)
@@ -86,45 +86,45 @@ class UOWTransaction(object):
# dictionary used by external actors to
# store arbitrary state information.
self.attributes = {}
-
+
# dictionary of mappers to sets of
# DependencyProcessors, which are also
# set to be part of the sorted flush actions,
# which have that mapper as a parent.
self.deps = util.defaultdict(set)
-
+
# dictionary of mappers to sets of InstanceState
# items pending for flush which have that mapper
# as a parent.
self.mappers = util.defaultdict(set)
-
+
# a dictionary of Preprocess objects, which gather
# additional states impacted by the flush
# and determine if a flush action is needed
self.presort_actions = {}
-
+
# dictionary of PostSortRec objects, each
# one issues work during the flush within
# a certain ordering.
self.postsort_actions = {}
-
+
# a set of 2-tuples, each containing two
# PostSortRec objects where the second
# is dependent on the first being executed
# first
self.dependencies = set()
-
+
# dictionary of InstanceState-> (isdelete, listonly)
# tuples, indicating if this state is to be deleted
# or insert/updated, or just refreshed
self.states = {}
-
+
# tracks InstanceStates which will be receiving
# a "post update" call. Keys are mappers,
# values are a set of states and a set of the
# columns which should be included in the update.
self.post_update_states = util.defaultdict(lambda: (set(), set()))
-
+
@property
def has_work(self):
return bool(self.states)
@@ -132,23 +132,23 @@ class UOWTransaction(object):
def is_deleted(self, state):
"""return true if the given state is marked as deleted
within this uowtransaction."""
-
+
return state in self.states and self.states[state][0]
-
+
def memo(self, key, callable_):
if key in self.attributes:
return self.attributes[key]
else:
self.attributes[key] = ret = callable_()
return ret
-
+
def remove_state_actions(self, state):
"""remove pending actions for a state from the uowtransaction."""
-
+
isdelete = self.states[state][0]
-
+
self.states[state] = (isdelete, True)
-
+
def get_attribute_history(self, state, key, passive=True):
"""facade to attributes.get_state_history(), including caching of results."""
@@ -157,7 +157,7 @@ class UOWTransaction(object):
# cache the objects, not the states; the strong reference here
# prevents newly loaded objects from being dereferenced during the
# flush process
-
+
if hashkey in self.attributes:
history, state_history, cached_passive = self.attributes[hashkey]
# if the cached lookup was "passive" and now
@@ -180,17 +180,17 @@ class UOWTransaction(object):
else:
state_history = history
self.attributes[hashkey] = (history, state_history, passive)
-
+
return state_history
-
+
def has_dep(self, processor):
return (processor, True) in self.presort_actions
-
+
def register_preprocessor(self, processor, fromparent):
key = (processor, fromparent)
if key not in self.presort_actions:
self.presort_actions[key] = Preprocess(processor, fromparent)
-
+
def register_object(self, state, isdelete=False,
listonly=False, cancel_delete=False,
operation=None, prop=None):
@@ -203,56 +203,56 @@ class UOWTransaction(object):
if state not in self.states:
mapper = state.manager.mapper
-
+
if mapper not in self.mappers:
mapper._per_mapper_flush_actions(self)
-
+
self.mappers[mapper].add(state)
self.states[state] = (isdelete, listonly)
else:
if not listonly and (isdelete or cancel_delete):
self.states[state] = (isdelete, False)
return True
-
+
def issue_post_update(self, state, post_update_cols):
mapper = state.manager.mapper.base_mapper
states, cols = self.post_update_states[mapper]
states.add(state)
cols.update(post_update_cols)
-
+
@util.memoized_property
def _mapper_for_dep(self):
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
True or False, indicating if the DependencyProcessor operates
on objects of that Mapper.
-
+
The result is stored in the dictionary persistently once
calculated.
-
+
"""
return util.PopulateDict(
lambda tup:tup[0]._props.get(tup[1].key) is tup[1].prop
)
-
+
def filter_states_for_dep(self, dep, states):
"""Filter the given list of InstanceStates to those relevant to the
given DependencyProcessor.
-
+
"""
mapper_for_dep = self._mapper_for_dep
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
-
+
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
checktup = (isdelete, listonly)
for mapper in mapper.base_mapper.self_and_descendants:
for state in self.mappers[mapper]:
if self.states[state] == checktup:
yield state
-
+
def _generate_actions(self):
"""Generate the full, unsorted collection of PostSortRecs as
well as dependency pairs for this UOWTransaction.
-
+
"""
# execute presort_actions, until all states
# have been processed. a presort_action might
@@ -269,7 +269,7 @@ class UOWTransaction(object):
self.cycles = cycles = topological.find_cycles(
self.dependencies,
self.postsort_actions.values())
-
+
if cycles:
# if yes, break the per-mapper actions into
# per-state actions
@@ -294,7 +294,7 @@ class UOWTransaction(object):
self.dependencies.remove(edge)
for dep in convert[edge[1]]:
self.dependencies.add((edge[0], dep))
-
+
return set([a for a in self.postsort_actions.values()
if not a.disabled
]
@@ -302,13 +302,13 @@ class UOWTransaction(object):
def execute(self):
postsort_actions = self._generate_actions()
-
+
#sort = topological.sort(self.dependencies, postsort_actions)
#print "--------------"
#print self.dependencies
#print list(sort)
#print "COUNT OF POSTSORT ACTIONS", len(postsort_actions)
-
+
# execute
if self.cycles:
for set_ in topological.sort_as_subsets(
@@ -322,14 +322,14 @@ class UOWTransaction(object):
self.dependencies,
postsort_actions):
rec.execute(self)
-
-
+
+
def finalize_flush_changes(self):
"""mark processed objects as clean / deleted after a successful flush().
this method is called within the flush() method after the
execute() method has succeeded and the transaction has been committed.
-
+
"""
for state, (isdelete, listonly) in self.states.iteritems():
if isdelete:
@@ -348,18 +348,18 @@ class IterateMappersMixin(object):
)
else:
return self.dependency_processor.mapper.self_and_descendants
-
+
class Preprocess(IterateMappersMixin):
def __init__(self, dependency_processor, fromparent):
self.dependency_processor = dependency_processor
self.fromparent = fromparent
self.processed = set()
self.setup_flush_actions = False
-
+
def execute(self, uow):
delete_states = set()
save_states = set()
-
+
for mapper in self._mappers(uow):
for state in uow.mappers[mapper].difference(self.processed):
(isdelete, listonly) = uow.states[state]
@@ -375,7 +375,7 @@ class Preprocess(IterateMappersMixin):
if save_states:
self.dependency_processor.presort_saves(uow, save_states)
self.processed.update(save_states)
-
+
if (delete_states or save_states):
if not self.setup_flush_actions and (
self.dependency_processor.\
@@ -391,7 +391,7 @@ class Preprocess(IterateMappersMixin):
class PostSortRec(object):
disabled = False
-
+
def __new__(cls, uow, *args):
key = (cls, ) + args
if key in uow.postsort_actions:
@@ -401,10 +401,10 @@ class PostSortRec(object):
ret = \
object.__new__(cls)
return ret
-
+
def execute_aggregate(self, uow, recs):
self.execute(uow)
-
+
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
@@ -417,7 +417,7 @@ class ProcessAll(IterateMappersMixin, PostSortRec):
self.delete = delete
self.fromparent = fromparent
uow.deps[dependency_processor.parent.base_mapper].add(dependency_processor)
-
+
def execute(self, uow):
states = self._elements(uow)
if self.delete:
@@ -454,20 +454,20 @@ class IssuePostUpdate(PostSortRec):
def execute(self, uow):
states, cols = uow.post_update_states[self.mapper]
states = [s for s in states if uow.states[s][0] == self.isdelete]
-
+
self.mapper._post_update(states, uow, cols)
class SaveUpdateAll(PostSortRec):
def __init__(self, uow, mapper):
self.mapper = mapper
assert mapper is mapper.base_mapper
-
+
def execute(self, uow):
self.mapper._save_obj(
uow.states_for_mapper_hierarchy(self.mapper, False, False),
uow
)
-
+
def per_state_flush_actions(self, uow):
states = list(uow.states_for_mapper_hierarchy(self.mapper, False, False))
for rec in self.mapper._per_state_flush_actions(
@@ -475,11 +475,11 @@ class SaveUpdateAll(PostSortRec):
states,
False):
yield rec
-
+
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, False)
-
+
class DeleteAll(PostSortRec):
def __init__(self, uow, mapper):
self.mapper = mapper
@@ -498,7 +498,7 @@ class DeleteAll(PostSortRec):
states,
True):
yield rec
-
+
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, True)
@@ -531,12 +531,12 @@ class ProcessState(PostSortRec):
mapperutil.state_str(self.state),
self.delete
)
-
+
class SaveUpdateState(PostSortRec):
def __init__(self, uow, state, mapper):
self.state = state
self.mapper = mapper
-
+
def execute_aggregate(self, uow, recs):
cls_ = self.__class__
mapper = self.mapper
@@ -559,7 +559,7 @@ class DeleteState(PostSortRec):
def __init__(self, uow, state, mapper):
self.state = state
self.mapper = mapper
-
+
def execute_aggregate(self, uow, recs):
cls_ = self.__class__
mapper = self.mapper
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index a69670c29..7866aab2b 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -28,7 +28,7 @@ class CascadeOptions(dict):
values = set()
else:
values = set(c.strip() for c in arg.split(','))
-
+
for name in ['save-update', 'delete', 'refresh-expire',
'merge', 'expunge']:
boolean = name in values or 'all' in values
@@ -38,7 +38,7 @@ class CascadeOptions(dict):
self.delete_orphan = "delete-orphan" in values
if self.delete_orphan:
self['delete-orphan'] = True
-
+
if self.delete_orphan and not self.delete:
util.warn("The 'delete-orphan' cascade option requires "
"'delete'. This will raise an error in 0.6.")
@@ -61,10 +61,10 @@ def _validator_events(desc, key, validator):
def set_(state, value, oldvalue, initiator):
return validator(state.obj(), key, value)
-
+
event.listen(desc, 'append', append, raw=True, retval=True)
event.listen(desc, 'set', set_, raw=True, retval=True)
-
+
def polymorphic_union(table_map, typecolname, aliasname='p_union'):
"""Create a ``UNION`` statement used by a polymorphic mapper.
@@ -197,8 +197,8 @@ class AliasedClass(object):
The ORM equivalent of a :func:`sqlalchemy.sql.expression.alias`
construct, this object mimics the mapped class using a
__getattr__ scheme and maintains a reference to a
- real :class:`~sqlalchemy.sql.expression.Alias` object.
-
+ real :class:`~sqlalchemy.sql.expression.Alias` object.
+
Usage is via the :class:`~sqlalchemy.orm.aliased()` synonym::
# find all pairs of users with the same name
@@ -264,7 +264,7 @@ class AliasedClass(object):
break
else:
raise AttributeError(key)
-
+
if isinstance(attr, attributes.QueryableAttribute):
return self.__adapt_prop(attr, key)
elif hasattr(attr, 'func_code'):
@@ -391,19 +391,19 @@ def with_parent(instance, prop):
"""Create filtering criterion that relates this query's primary entity
to the given related instance, using established :func:`.relationship()`
configuration.
-
+
The SQL rendered is the same as that rendered when a lazy loader
would fire off from the given parent on that attribute, meaning
that the appropriate state is taken from the parent object in
Python without the need to render joins to the parent table
in the rendered statement.
-
+
As of 0.6.4, this method accepts parent instances in all
persistence states, including transient, persistent, and detached.
Only the requisite primary key/foreign key attributes need to
be populated. Previous versions didn't work with transient
instances.
-
+
:param instance:
An instance which has some :func:`.relationship`.
@@ -411,7 +411,7 @@ def with_parent(instance, prop):
String property name, or class-bound attribute, which indicates
what relationship from the instance should be used to reconcile the
parent/child relationship.
-
+
"""
if isinstance(prop, basestring):
mapper = object_mapper(instance)
@@ -440,24 +440,24 @@ def _entity_info(entity, compile=True):
if isinstance(entity, mapperlib.Mapper):
mapper = entity
-
+
elif isinstance(entity, type):
class_manager = attributes.manager_of_class(entity)
-
+
if class_manager is None:
return None, entity, False
-
+
mapper = class_manager.mapper
else:
return None, entity, False
-
+
if compile and mapperlib.module._new_mappers:
mapperlib.configure_mappers()
return mapper, mapper._with_polymorphic_selectable, False
def _entity_descriptor(entity, key):
"""Return a class attribute given an entity and string name.
-
+
May return :class:`.InstrumentedAttribute` or user-defined
attribute.
@@ -516,7 +516,7 @@ def class_mapper(class_, compile=True):
Raises UnmappedClassError if no mapping is configured.
"""
-
+
try:
class_manager = attributes.manager_of_class(class_)
mapper = class_manager.mapper
@@ -542,7 +542,7 @@ def _class_to_mapper(class_or_mapper, compile=True):
mapper = class_or_mapper
else:
raise exc.UnmappedClassError(class_or_mapper)
-
+
if compile and mapperlib.module._new_mappers:
mapperlib.configure_mappers()
return mapper
@@ -581,7 +581,7 @@ def state_class_str(state):
return "None"
else:
return '<%s>' % (state.class_.__name__, )
-
+
def attribute_str(instance, attribute):
return instance_str(instance) + "." + attribute
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 7c88d663a..23a4c6946 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -61,7 +61,7 @@ class Pool(log.Identified):
"""Abstract base class for connection pools."""
_no_finalize = False
-
+
def __init__(self,
creator, recycle=-1, echo=None,
use_threadlocal=False,
@@ -106,13 +106,13 @@ class Pool(log.Identified):
connections returned to the pool. This is typically a
ROLLBACK to release locks and transaction resources.
Disable at your own peril. Defaults to True.
-
+
:param events: a list of 2-tuples, each of the form
``(callable, target)`` which will be passed to event.listen()
upon construction. Provided here so that event listeners
can be assigned via ``create_engine`` before dialect-level
listeners are applied.
-
+
:param listeners: Deprecated. A list of
:class:`~sqlalchemy.interfaces.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
@@ -125,7 +125,7 @@ class Pool(log.Identified):
self.logging_name = self._orig_logging_name = logging_name
else:
self._orig_logging_name = None
-
+
log.instance_logger(self, echoflag=echo)
self._threadconns = threading.local()
self._creator = creator
@@ -146,42 +146,42 @@ class Pool(log.Identified):
self.add_listener(l)
dispatch = event.dispatcher(events.PoolEvents)
-
+
@util.deprecated(2.7, "Pool.add_listener is deprecated. Use event.listen()")
def add_listener(self, listener):
"""Add a :class:`.PoolListener`-like object to this pool.
-
+
``listener`` may be an object that implements some or all of
PoolListener, or a dictionary of callables containing implementations
of some or all of the named methods in PoolListener.
"""
interfaces.PoolListener._adapt_listener(self, listener)
-
+
def unique_connection(self):
"""Produce a DBAPI connection that is not referenced by any
thread-local context.
-
+
This method is different from :meth:`.Pool.connect` only if the
``use_threadlocal`` flag has been set to ``True``.
-
+
"""
-
+
return _ConnectionFairy(self).checkout()
def _create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
-
+
return _ConnectionRecord(self)
def recreate(self):
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
-
+
This method is used in conjunection with :meth:`dispose`
to close out an entire :class:`.Pool` and create a new one in
its place.
-
+
"""
raise NotImplementedError()
@@ -193,18 +193,18 @@ class Pool(log.Identified):
remaining open, It is advised to not reuse the pool once dispose()
is called, and to instead use a new pool constructed by the
recreate() method.
-
+
"""
raise NotImplementedError()
def connect(self):
"""Return a DBAPI connection from the pool.
-
+
The connection is instrumented such that when its
``close()`` method is called, the connection will be returned to
the pool.
-
+
"""
if not self._use_threadlocal:
return _ConnectionFairy(self).checkout()
@@ -222,10 +222,10 @@ class Pool(log.Identified):
def _return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
-
+
This method is called when an instrumented DBAPI connection
has its ``close()`` method called.
-
+
"""
if self._use_threadlocal:
try:
@@ -236,12 +236,12 @@ class Pool(log.Identified):
def _do_get(self):
"""Implementation for :meth:`get`, supplied by subclasses."""
-
+
raise NotImplementedError()
def _do_return_conn(self, conn):
"""Implementation for :meth:`return_conn`, supplied by subclasses."""
-
+
raise NotImplementedError()
def status(self):
@@ -321,10 +321,10 @@ class _ConnectionRecord(object):
def _finalize_fairy(connection, connection_record, pool, ref, echo):
_refs.discard(connection_record)
-
+
if pool._no_finalize:
return
-
+
if ref is not None and \
connection_record.fairy is not ref:
return
@@ -341,7 +341,7 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo):
connection_record.invalidate(e=e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
-
+
if connection_record is not None:
connection_record.fairy = None
if echo:
@@ -360,7 +360,7 @@ class _ConnectionFairy(object):
__slots__ = '_pool', '__counter', 'connection', \
'_connection_record', '__weakref__', \
'_detached_info', '_echo'
-
+
def __init__(self, pool):
self._pool = pool
self.__counter = 0
@@ -501,7 +501,7 @@ class SingletonThreadPool(Pool):
at once. Defaults to five.
:class:`.SingletonThreadPool` is used by the SQLite dialect
- automatically when a memory-based database is used.
+ automatically when a memory-based database is used.
See :ref:`sqlite_toplevel`.
"""
@@ -535,9 +535,9 @@ class SingletonThreadPool(Pool):
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
-
+
self._all_conns.clear()
-
+
def _cleanup(self):
while len(self._all_conns) > self.size:
self._all_conns.pop()
@@ -565,10 +565,10 @@ class SingletonThreadPool(Pool):
class QueuePool(Pool):
"""A :class:`Pool` that imposes a limit on the number of open connections.
-
+
:class:`.QueuePool` is the default pooling implementation used for
all :class:`.Engine` objects, unless the SQLite dialect is in use.
-
+
"""
def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30,
@@ -741,9 +741,9 @@ class NullPool(Pool):
Reconnect-related functions such as ``recycle`` and connection
invalidation are not supported by this Pool implementation, since
no connections are held persistently.
-
+
:class:`.NullPool` is used by the SQlite dilalect automatically
- when a file-based database is used (as of SQLAlchemy 0.7).
+ when a file-based database is used (as of SQLAlchemy 0.7).
See :ref:`sqlite_toplevel`.
"""
@@ -788,7 +788,7 @@ class StaticPool(Pool):
@memoized_property
def connection(self):
return _ConnectionRecord(self)
-
+
def status(self):
return "StaticPool"
@@ -826,12 +826,12 @@ class AssertionPool(Pool):
"""
_no_finalize = True
-
+
def __init__(self, *args, **kw):
self._conn = None
self._checked_out = False
Pool.__init__(self, *args, **kw)
-
+
def status(self):
return "AssertionPool"
@@ -851,14 +851,14 @@ class AssertionPool(Pool):
return AssertionPool(self._creator, echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch)
-
+
def _do_get(self):
if self._checked_out:
raise AssertionError("connection is already checked out")
-
+
if not self._conn:
self._conn = self._create_connection()
-
+
self._checked_out = True
return self._conn
@@ -880,7 +880,7 @@ class _DBProxy(object):
a Pool class, defaulting to QueuePool
Other parameters are sent to the Pool object's constructor.
-
+
"""
self.module = module
@@ -888,7 +888,7 @@ class _DBProxy(object):
self.poolclass = poolclass
self.pools = {}
self._create_pool_mutex = threading.Lock()
-
+
def close(self):
for key in self.pools.keys():
del self.pools[key]
@@ -915,7 +915,7 @@ class _DBProxy(object):
return self.pools[key]
finally:
self._create_pool_mutex.release()
-
+
def connect(self, *args, **kw):
"""Activate a connection to the database.
@@ -927,7 +927,7 @@ class _DBProxy(object):
If the pool has no available connections and allows new connections
to be created, a new database connection will be made.
-
+
"""
return self.get_pool(*args, **kw).connect()
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index 422991ec0..cb5f00bde 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -46,7 +46,7 @@ try:
return UnicodeResultProcessor(encoding, errors).process
else:
return UnicodeResultProcessor(encoding).process
-
+
def to_decimal_processor_factory(target_class, scale=10):
# Note that the scale argument is not taken into account for integer
# values in the C implementation while it is in the Python one.
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index 2156bc546..eb03fae98 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -83,12 +83,12 @@ def _get_table_key(name, schema):
else:
return schema + "." + name
-
+
class Table(SchemaItem, expression.TableClause):
"""Represent a table in a database.
-
+
e.g.::
-
+
mytable = Table("mytable", metadata,
Column('mytable_id', Integer, primary_key=True),
Column('value', String(50))
@@ -97,7 +97,7 @@ class Table(SchemaItem, expression.TableClause):
The Table object constructs a unique instance of itself based on its
name within the given MetaData object. Constructor
arguments are as follows:
-
+
:param name: The name of this table as represented in the database.
This property, along with the *schema*, indicates the *singleton
@@ -123,7 +123,7 @@ class Table(SchemaItem, expression.TableClause):
table. Similar to the style of a CREATE TABLE statement, other
:class:`.SchemaItem` constructs may be added here, including
:class:`PrimaryKeyConstraint`, and :class:`ForeignKeyConstraint`.
-
+
:param autoload: Defaults to False: the Columns for this table should
be reflected from the database. Usually there will be no Column
objects in the constructor if this property is set.
@@ -174,7 +174,7 @@ class Table(SchemaItem, expression.TableClause):
:class:`Table` are overwritten.
"""
-
+
__visit_name__ = 'table'
dispatch = event.dispatcher(events.DDLEvents)
@@ -183,12 +183,12 @@ class Table(SchemaItem, expression.TableClause):
if not args:
# python3k pickle seems to call this
return object.__new__(cls)
-
+
try:
name, metadata, args = args[0], args[1], args[2:]
except IndexError:
raise TypeError("Table() takes at least two arguments")
-
+
schema = kw.get('schema', None)
useexisting = kw.pop('useexisting', False)
mustexist = kw.pop('mustexist', False)
@@ -206,7 +206,7 @@ class Table(SchemaItem, expression.TableClause):
if mustexist:
raise exc.InvalidRequestError(
"Table '%s' not defined" % (key))
- table = object.__new__(cls)
+ table = object.__new__(cls)
metadata._add_table(name, schema, table)
try:
table._init(name, metadata, *args, **kw)
@@ -214,12 +214,12 @@ class Table(SchemaItem, expression.TableClause):
except:
metadata._remove_table(name, schema)
raise
-
+
def __init__(self, *args, **kw):
# __init__ is overridden to prevent __new__ from
# calling the superclass constructor.
pass
-
+
def _init(self, name, metadata, *args, **kwargs):
super(Table, self).__init__(name)
self.metadata = metadata
@@ -309,7 +309,7 @@ class Table(SchemaItem, expression.TableClause):
def _init_collections(self):
pass
-
+
def _set_primary_key(self, pk):
if self.primary_key in self.constraints:
self.constraints.remove(self.primary_key)
@@ -350,18 +350,18 @@ class Table(SchemaItem, expression.TableClause):
def add_is_dependent_on(self, table):
"""Add a 'dependency' for this Table.
-
+
This is another Table object which must be created
first before this one can, or dropped after this one.
-
+
Usually, dependencies between tables are determined via
ForeignKey objects. However, for other situations that
create dependencies outside of foreign keys (rules, inheriting),
this method can manually establish such a link.
-
+
"""
self._extra_dependencies.add(table)
-
+
def append_column(self, column):
"""Append a ``Column`` to this ``Table``."""
@@ -374,14 +374,14 @@ class Table(SchemaItem, expression.TableClause):
def append_ddl_listener(self, event_name, listener):
"""Append a DDL event listener to this ``Table``.
-
+
Deprecated. See :class:`.DDLEvents`.
"""
-
+
def adapt_listener(target, connection, **kw):
listener(event_name, target, connection, **kw)
-
+
event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
def _set_parent(self, metadata):
@@ -428,14 +428,14 @@ class Table(SchemaItem, expression.TableClause):
if bind is None:
bind = _bind_or_error(self)
bind.drop(self, checkfirst=checkfirst)
-
+
def tometadata(self, metadata, schema=RETAIN_SCHEMA):
"""Return a copy of this :class:`Table` associated with a different
:class:`MetaData`.
-
+
E.g.::
-
+
# create two metadata
meta1 = MetaData('sqlite:///querytest.db')
meta2 = MetaData()
@@ -445,7 +445,7 @@ class Table(SchemaItem, expression.TableClause):
# create the same Table object for the plain metadata
users_table_2 = users_table.tometadata(meta2)
-
+
"""
if schema is RETAIN_SCHEMA:
@@ -481,38 +481,38 @@ class Column(SchemaItem, expression.ColumnClause):
"""Represents a column in a database table."""
__visit_name__ = 'column'
-
+
def __init__(self, *args, **kwargs):
"""
Construct a new ``Column`` object.
-
+
:param name: The name of this column as represented in the database.
This argument may be the first positional argument, or specified
via keyword.
-
+
Names which contain no upper case characters
will be treated as case insensitive names, and will not be quoted
unless they are a reserved word. Names with any number of upper
case characters will be quoted and sent exactly. Note that this
behavior applies even for databases which standardize upper
case names as case insensitive such as Oracle.
-
+
The name field may be omitted at construction time and applied
later, at any time before the Column is associated with a
:class:`Table`. This is to support convenient
usage within the :mod:`~sqlalchemy.ext.declarative` extension.
-
+
:param type\_: The column's type, indicated using an instance which
subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
are required for the type, the class of the type can be sent
as well, e.g.::
-
+
# use a type with arguments
Column('data', String(50))
-
+
# use no arguments
Column('level', Integer)
-
+
The ``type`` argument may be the second positional argument
or specified by keyword.
@@ -541,21 +541,21 @@ class Column(SchemaItem, expression.ColumnClause):
has a composite primary key consisting of more than one
integer column, set this flag to True only on the
column that should be considered "autoincrement".
-
+
The setting *only* has an effect for columns which are:
-
+
* Integer derived (i.e. INT, SMALLINT, BIGINT)
-
+
* Part of the primary key
-
+
* Are not referenced by any foreign keys
-
+
* have no server side or client side defaults (with the exception
of Postgresql SERIAL).
-
+
The setting has these two effects on columns that meet the
above criteria:
-
+
* DDL issued for the column will include database-specific
keywords intended to signify this column as an
"autoincrement" column, such as AUTO INCREMENT on MySQL,
@@ -564,7 +564,7 @@ class Column(SchemaItem, expression.ColumnClause):
special SQLite flag that is not required for autoincrementing
behavior. See the SQLite dialect documentation for
information on SQLite's AUTOINCREMENT.
-
+
* The column will be considered to be available as
cursor.lastrowid or equivalent, for those dialects which
"post fetch" newly inserted identifiers after a row has
@@ -579,14 +579,14 @@ class Column(SchemaItem, expression.ColumnClause):
if this column is otherwise not specified in the VALUES clause of
the insert. This is a shortcut to using :class:`ColumnDefault` as
a positional argument.
-
+
Contrast this argument to ``server_default`` which creates a
default generator on the database side.
-
+
:param doc: optional String that can be used by the ORM or similar
to document attributes. This attribute does not render SQL
comments (a future attribute 'comment' will achieve that).
-
+
:param key: An optional string identifier which will identify this
``Column`` object on the :class:`Table`. When a key is provided,
this is the only identifier referencing the ``Column`` within the
@@ -614,7 +614,7 @@ class Column(SchemaItem, expression.ColumnClause):
present in the SET clause of the update. This is a shortcut to
using :class:`ColumnDefault` as a positional argument with
``for_update=True``.
-
+
:param primary_key: If ``True``, marks this column as a primary key
column. Multiple columns can have this flag set to specify
composite primary keys. As an alternative, the primary key of a
@@ -640,7 +640,7 @@ class Column(SchemaItem, expression.ColumnClause):
Strings and text() will be converted into a :class:`DefaultClause`
object upon initialization.
-
+
Use :class:`FetchedValue` to indicate that an already-existing
column will generate a default value on the database side which
will be available to SQLAlchemy for post-fetch after inserts. This
@@ -682,7 +682,7 @@ class Column(SchemaItem, expression.ColumnClause):
name = args.pop(0)
if args:
coltype = args[0]
-
+
if (isinstance(coltype, types.TypeEngine) or
(isinstance(coltype, type) and
issubclass(coltype, types.TypeEngine))):
@@ -690,9 +690,9 @@ class Column(SchemaItem, expression.ColumnClause):
raise exc.ArgumentError(
"May not pass type_ positionally and as a keyword.")
type_ = args.pop(0)
-
+
no_type = type_ is None
-
+
super(Column, self).__init__(name, None, type_)
self.key = kwargs.pop('key', name)
self.primary_key = kwargs.pop('primary_key', False)
@@ -716,7 +716,7 @@ class Column(SchemaItem, expression.ColumnClause):
# otherwise, add DDL-related events
elif isinstance(self.type, types.SchemaType):
self.type._set_parent(self)
-
+
if self.default is not None:
if isinstance(self.default, (ColumnDefault, Sequence)):
args.append(self.default)
@@ -728,13 +728,13 @@ class Column(SchemaItem, expression.ColumnClause):
args.append(self.server_default)
else:
args.append(DefaultClause(self.server_default))
-
+
if self.onupdate is not None:
if isinstance(self.onupdate, (ColumnDefault, Sequence)):
args.append(self.onupdate)
else:
args.append(ColumnDefault(self.onupdate, for_update=True))
-
+
if self.server_onupdate is not None:
if isinstance(self.server_onupdate, FetchedValue):
args.append(self.server_default)
@@ -750,7 +750,7 @@ class Column(SchemaItem, expression.ColumnClause):
if 'info' in kwargs:
self.info = kwargs.pop('info')
-
+
if kwargs:
raise exc.ArgumentError(
"Unknown arguments passed to Column: " + repr(kwargs.keys()))
@@ -822,7 +822,7 @@ class Column(SchemaItem, expression.ColumnClause):
# already, if it's a composite constraint
# and more than one col being replaced
table.constraints.remove(fk.constraint)
-
+
table._columns.replace(self)
if self.primary_key:
@@ -854,25 +854,25 @@ class Column(SchemaItem, expression.ColumnClause):
for fn in self._table_events:
fn(table, self)
del self._table_events
-
+
def _on_table_attach(self, fn):
if self.table is not None:
fn(self.table, self)
else:
self._table_events.add(fn)
-
+
def copy(self, **kw):
"""Create a copy of this ``Column``, unitialized.
This is used in ``Table.tometadata``.
"""
-
+
# Constraint objects plus non-constraint-bound ForeignKey objects
args = \
[c.copy(**kw) for c in self.constraints] + \
[c.copy(**kw) for c in self.foreign_keys if not c.constraint]
-
+
c = Column(
name=self.name,
type_=self.type,
@@ -893,7 +893,7 @@ class Column(SchemaItem, expression.ColumnClause):
if hasattr(self, '_table_events'):
c._table_events = list(self._table_events)
return c
-
+
def _make_proxy(self, selectable, name=None):
"""Create a *proxy* for this column.
@@ -901,7 +901,7 @@ class Column(SchemaItem, expression.ColumnClause):
(such as an alias or select statement). The column should
be used only in select scenarios, as its full DDL/default
information is not transferred.
-
+
"""
fk = [ForeignKey(f.column) for f in self.foreign_keys]
if name is None and self.name is None:
@@ -938,11 +938,11 @@ class ForeignKey(SchemaItem):
``ForeignKey`` is specified as an argument to a :class:`Column` object,
e.g.::
-
+
t = Table("remote_table", metadata,
Column("remote_id", ForeignKey("main_table.id"))
)
-
+
Note that ``ForeignKey`` is only a marker object that defines
a dependency between two columns. The actual constraint
is in all cases represented by the :class:`ForeignKeyConstraint`
@@ -953,18 +953,18 @@ class ForeignKey(SchemaItem):
``ForeignKey`` markers are automatically generated to be
present on each associated :class:`Column`, which are also
associated with the constraint object.
-
+
Note that you cannot define a "composite" foreign key constraint,
that is a constraint between a grouping of multiple parent/child
columns, using ``ForeignKey`` objects. To define this grouping,
the :class:`ForeignKeyConstraint` object must be used, and applied
to the :class:`Table`. The associated ``ForeignKey`` objects
are created automatically.
-
+
The ``ForeignKey`` objects associated with an individual
:class:`Column` object are available in the `foreign_keys` collection
of that column.
-
+
Further examples of foreign key configuration are in
:ref:`metadata_foreignkeys`.
@@ -976,8 +976,8 @@ class ForeignKey(SchemaItem):
onupdate=None, ondelete=None, deferrable=None,
initially=None, link_to_name=False):
"""
- Construct a column-level FOREIGN KEY.
-
+ Construct a column-level FOREIGN KEY.
+
The :class:`ForeignKey` object when constructed generates a
:class:`ForeignKeyConstraint` which is associated with the parent
:class:`Table` object's collection of constraints.
@@ -1005,28 +1005,28 @@ class ForeignKey(SchemaItem):
:param initially: Optional string. If set, emit INITIALLY <value> when
issuing DDL for this constraint.
-
+
:param link_to_name: if True, the string name given in ``column`` is
the rendered name of the referenced column, not its locally
assigned ``key``.
-
+
:param use_alter: passed to the underlying
:class:`ForeignKeyConstraint` to indicate the constraint should be
generated/dropped externally from the CREATE TABLE/ DROP TABLE
statement. See that classes' constructor for details.
-
+
"""
self._colspec = column
-
+
# the linked ForeignKeyConstraint.
# ForeignKey will create this when parent Column
# is attached to a Table, *or* ForeignKeyConstraint
# object passes itself in when creating ForeignKey
# markers.
self.constraint = _constraint
-
-
+
+
self.use_alter = use_alter
self.name = name
self.onupdate = onupdate
@@ -1040,20 +1040,20 @@ class ForeignKey(SchemaItem):
def copy(self, schema=None):
"""Produce a copy of this :class:`ForeignKey` object.
-
+
The new :class:`ForeignKey` will not be bound
to any :class:`Column`.
-
+
This method is usually used by the internal
copy procedures of :class:`Column`, :class:`Table`,
and :class:`MetaData`.
-
+
:param schema: The returned :class:`ForeignKey` will
reference the original table and column name, qualified
by the given string schema name.
-
+
"""
-
+
return ForeignKey(
self._get_colspec(schema=schema),
use_alter=self.use_alter,
@@ -1067,10 +1067,10 @@ class ForeignKey(SchemaItem):
def _get_colspec(self, schema=None):
"""Return a string based 'column specification' for this :class:`ForeignKey`.
-
+
This is usually the equivalent of the string-based "tablename.colname"
argument first passed to the object's constructor.
-
+
"""
if schema:
return schema + "." + self.column.table.name + \
@@ -1081,14 +1081,14 @@ class ForeignKey(SchemaItem):
_column = self._colspec.__clause_element__()
else:
_column = self._colspec
-
+
return "%s.%s" % (_column.table.fullname, _column.key)
target_fullname = property(_get_colspec)
def references(self, table):
"""Return True if the given :class:`Table` is referenced by this :class:`ForeignKey`."""
-
+
return table.corresponding_column(self.column) is not None
def get_referent(self, table):
@@ -1105,7 +1105,7 @@ class ForeignKey(SchemaItem):
@util.memoized_property
def column(self):
"""Return the target :class:`.Column` referenced by this :class:`.ForeignKey`.
-
+
If this :class:`ForeignKey` was created using a
string-based target column specification, this
attribute will on first access initiate a resolution
@@ -1114,7 +1114,7 @@ class ForeignKey(SchemaItem):
to the parent :class:`.Column`, :class:`.Table`, and
:class:`.MetaData` to proceed - if any of these aren't
yet present, an error is raised.
-
+
"""
# ForeignKey inits its remote column as late as possible, so tables
# can be defined without dependencies
@@ -1165,7 +1165,7 @@ class ForeignKey(SchemaItem):
"foreign key to target column '%s'" % (self.parent, tname, colname))
table = Table(tname, parenttable.metadata,
mustexist=True, schema=schema)
-
+
_column = None
if colname is None:
# colname is None in the case that ForeignKey argument
@@ -1208,7 +1208,7 @@ class ForeignKey(SchemaItem):
self.parent = column
self.parent.foreign_keys.add(self)
self.parent._on_table_attach(self._set_table)
-
+
def _set_table(self, table, column):
# standalone ForeignKey - create ForeignKeyConstraint
# on the hosting Table when attached to the Table.
@@ -1221,14 +1221,14 @@ class ForeignKey(SchemaItem):
self.constraint._elements[self.parent] = self
self.constraint._set_parent(table)
table.foreign_keys.add(self)
-
+
class DefaultGenerator(SchemaItem):
"""Base class for column *default* values."""
__visit_name__ = 'default_generator'
is_sequence = False
-
+
def __init__(self, for_update=False):
self.for_update = for_update
@@ -1261,21 +1261,21 @@ class ColumnDefault(DefaultGenerator):
This could correspond to a constant, a callable function,
or a SQL clause.
-
+
:class:`.ColumnDefault` is generated automatically
whenever the ``default``, ``onupdate`` arguments of
:class:`.Column` are used. A :class:`.ColumnDefault`
can be passed positionally as well.
-
+
For example, the following::
-
+
Column('foo', Integer, default=50)
-
+
Is equivalent to::
-
+
Column('foo', Integer, ColumnDefault(50))
-
+
"""
def __init__(self, arg, **kwargs):
@@ -1286,21 +1286,21 @@ class ColumnDefault(DefaultGenerator):
if util.callable(arg):
arg = self._maybe_wrap_callable(arg)
self.arg = arg
-
+
@util.memoized_property
def is_callable(self):
return util.callable(self.arg)
-
+
@util.memoized_property
def is_clause_element(self):
return isinstance(self.arg, expression.ClauseElement)
-
+
@util.memoized_property
def is_scalar(self):
return not self.is_callable and \
not self.is_clause_element and \
not self.is_sequence
-
+
def _maybe_wrap_callable(self, fn):
"""Backward compat: Wrap callables that don't accept a context."""
@@ -1319,7 +1319,7 @@ class ColumnDefault(DefaultGenerator):
return lambda ctx: fn()
positionals = len(argspec[0])
-
+
# Py3K compat - no unbound methods
if inspect.ismethod(inspectable) or inspect.isclass(fn):
positionals -= 1
@@ -1350,7 +1350,7 @@ class Sequence(DefaultGenerator):
__visit_name__ = 'sequence'
is_sequence = True
-
+
def __init__(self, name, start=None, increment=None, schema=None,
optional=False, quote=None, metadata=None, for_update=False):
super(Sequence, self).__init__(for_update=for_update)
@@ -1379,17 +1379,17 @@ class Sequence(DefaultGenerator):
def _set_parent(self, column):
super(Sequence, self)._set_parent(column)
column._on_table_attach(self._set_table)
-
+
def _set_table(self, table, column):
self.metadata = table.metadata
-
+
@property
def bind(self):
if self.metadata:
return self.metadata.bind
else:
return None
-
+
def create(self, bind=None, checkfirst=True):
"""Creates this sequence in the database."""
@@ -1407,18 +1407,18 @@ class Sequence(DefaultGenerator):
class FetchedValue(object):
"""A marker for a transparent database-side default.
-
+
Use :class:`.FetchedValue` when the database is configured
to provide some automatic default for a column.
-
+
E.g.::
-
+
Column('foo', Integer, FetchedValue())
-
+
Would indicate that some trigger or default generator
will create a new value for the ``foo`` column during an
INSERT.
-
+
"""
def __init__(self, for_update=False):
@@ -1437,24 +1437,24 @@ class FetchedValue(object):
class DefaultClause(FetchedValue):
"""A DDL-specified DEFAULT column value.
-
+
:class:`.DefaultClause` is a :class:`.FetchedValue`
that also generates a "DEFAULT" clause when
"CREATE TABLE" is emitted.
-
+
:class:`.DefaultClause` is generated automatically
whenever the ``server_default``, ``server_onupdate`` arguments of
:class:`.Column` are used. A :class:`.DefaultClause`
can be passed positionally as well.
-
+
For example, the following::
-
+
Column('foo', Integer, server_default="50")
-
+
Is equivalent to::
-
+
Column('foo', Integer, DefaultClause("50"))
-
+
"""
def __init__(self, arg, for_update=False):
@@ -1470,7 +1470,7 @@ class DefaultClause(FetchedValue):
class PassiveDefault(DefaultClause):
"""A DDL-specified DEFAULT column value.
-
+
.. deprecated:: 0.6 :class:`.PassiveDefault` is deprecated.
Use :class:`.DefaultClause`.
"""
@@ -1500,7 +1500,7 @@ class Constraint(SchemaItem):
:param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
-
+
:param _create_rule:
a callable which is passed the DDLCompiler object during
compilation. Returns True or False to signal inline generation of
@@ -1517,7 +1517,7 @@ class Constraint(SchemaItem):
_create_rule is used by some types to create constraints.
Currently, its call signature is subject to change at any time.
-
+
"""
self.name = name
@@ -1545,7 +1545,7 @@ class Constraint(SchemaItem):
class ColumnCollectionConstraint(Constraint):
"""A constraint that proxies a ColumnCollection."""
-
+
def __init__(self, *columns, **kw):
"""
:param \*columns:
@@ -1561,7 +1561,7 @@ class ColumnCollectionConstraint(Constraint):
:param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
-
+
"""
super(ColumnCollectionConstraint, self).__init__(**kw)
self.columns = expression.ColumnCollection()
@@ -1571,7 +1571,7 @@ class ColumnCollectionConstraint(Constraint):
isinstance(self._pending_colargs[0], Column) and \
self._pending_colargs[0].table is not None:
self._set_parent(self._pending_colargs[0].table)
-
+
def _set_parent(self, table):
super(ColumnCollectionConstraint, self)._set_parent(table)
for col in self._pending_colargs:
@@ -1613,7 +1613,7 @@ class CheckConstraint(Constraint):
:param sqltext:
A string containing the constraint definition, which will be used
verbatim, or a SQL expression construct.
-
+
:param name:
Optional, the in-database name of the constraint.
@@ -1624,7 +1624,7 @@ class CheckConstraint(Constraint):
:param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
-
+
"""
super(CheckConstraint, self).\
@@ -1632,7 +1632,7 @@ class CheckConstraint(Constraint):
self.sqltext = expression._literal_as_text(sqltext)
if table is not None:
self._set_parent(table)
-
+
def __visit_name__(self):
if isinstance(self.parent, Table):
return "check_constraint"
@@ -1650,9 +1650,9 @@ class ForeignKeyConstraint(Constraint):
constraint. For a no-frills, single column foreign key, adding a
:class:`ForeignKey` to the definition of a :class:`Column` is a shorthand
equivalent for an unnamed, single column :class:`ForeignKeyConstraint`.
-
+
Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
-
+
"""
__visit_name__ = 'foreign_key_constraint'
@@ -1698,7 +1698,7 @@ class ForeignKeyConstraint(Constraint):
as "after-create" and "before-drop" events on the MetaData object.
This is normally used to generate/drop constraints on objects that
are mutually dependent on each other.
-
+
"""
super(ForeignKeyConstraint, self).\
__init__(name, deferrable, initially)
@@ -1711,7 +1711,7 @@ class ForeignKeyConstraint(Constraint):
self.use_alter = use_alter
self._elements = util.OrderedDict()
-
+
# standalone ForeignKeyConstraint - create
# associated ForeignKey objects which will be applied to hosted
# Column objects (in col.foreign_keys), either now or when attached
@@ -1729,15 +1729,15 @@ class ForeignKeyConstraint(Constraint):
if table is not None:
self._set_parent(table)
-
+
@property
def columns(self):
return self._elements.keys()
-
+
@property
def elements(self):
return self._elements.values()
-
+
def _set_parent(self, table):
super(ForeignKeyConstraint, self)._set_parent(table)
for col, fk in self._elements.iteritems():
@@ -1746,16 +1746,16 @@ class ForeignKeyConstraint(Constraint):
if isinstance(col, basestring):
col = table.c[col]
fk._set_parent(col)
-
+
if self.use_alter:
def supports_alter(ddl, event, schema_item, bind, **kw):
return table in set(kw['tables']) and \
bind.dialect.supports_alter
-
+
event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter))
event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter))
-
-
+
+
def copy(self, **kw):
return ForeignKeyConstraint(
[x.parent.name for x in self._elements.values()],
@@ -1823,7 +1823,7 @@ class Index(SchemaItem):
:param \**kw:
Other keyword arguments may be interpreted by specific dialects.
-
+
"""
self.name = name
@@ -1851,7 +1851,7 @@ class Index(SchemaItem):
@property
def bind(self):
"""Return the connectable associated with this Index."""
-
+
return self.table.bind
def create(self, bind=None):
@@ -1943,7 +1943,7 @@ class MetaData(SchemaItem):
dict.__setitem__(self.tables, key, table)
if schema:
self._schemas.add(schema)
-
+
def _remove_table(self, name, schema):
key = _get_table_key(name, schema)
dict.pop(self.tables, key, None)
@@ -1951,7 +1951,7 @@ class MetaData(SchemaItem):
self._schemas = set([t.schema
for t in self.tables.values()
if t.schema is not None])
-
+
def __getstate__(self):
return {'tables': self.tables}
@@ -1970,7 +1970,7 @@ class MetaData(SchemaItem):
This property may be assigned an ``Engine`` or ``Connection``, or
assigned a string or URL to automatically create a basic ``Engine``
for this bind with ``create_engine()``.
-
+
"""
return self._bind
@@ -1989,10 +1989,10 @@ class MetaData(SchemaItem):
dict.clear(self.tables)
self._schemas.clear()
-
+
def remove(self, table):
"""Remove the given Table object from this MetaData."""
-
+
self._remove_table(table.name, table.schema)
@property
@@ -2001,7 +2001,7 @@ class MetaData(SchemaItem):
dependency.
"""
return sqlutil.sort_tables(self.tables.itervalues())
-
+
def reflect(self, bind=None, schema=None, views=False, only=None):
"""Load all available table definitions from the database.
@@ -2018,10 +2018,10 @@ class MetaData(SchemaItem):
:param schema:
Optional, query and reflect tables from an alterate schema.
-
+
:param views:
If True, also reflect views.
-
+
:param only:
Optional. Load only a sub-set of available named tables. May be
specified as a sequence of names or a callable.
@@ -2054,7 +2054,7 @@ class MetaData(SchemaItem):
available.update(
bind.dialect.get_view_names(conn or bind, schema)
)
-
+
current = set(self.tables.iterkeys())
if only is None:
@@ -2083,7 +2083,7 @@ class MetaData(SchemaItem):
"""
def adapt_listener(target, connection, **kw):
listener(event, target, connection, **kw)
-
+
event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
def create_all(self, bind=None, tables=None, checkfirst=True):
@@ -2104,7 +2104,7 @@ class MetaData(SchemaItem):
:param checkfirst:
Defaults to True, don't issue CREATEs for tables already present
in the target database.
-
+
"""
if bind is None:
bind = _bind_or_error(self)
@@ -2206,16 +2206,16 @@ class SchemaVisitor(visitors.ClauseVisitor):
class DDLElement(expression.Executable, expression.ClauseElement):
"""Base class for DDL expression constructs.
-
+
This class is the base for the general purpose :class:`.DDL` class,
as well as the various create/drop clause constructs such as
:class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
etc.
-
+
:class:`.DDLElement` integrates closely with SQLAlchemy events,
introduced in :ref:`event_toplevel`. An instance of one is
itself an event receiving callable::
-
+
event.listen(
users,
'after_create',
@@ -2223,17 +2223,17 @@ class DDLElement(expression.Executable, expression.ClauseElement):
)
See also:
-
+
:class:`.DDL`
-
+
:class:`.DDLEvents`
-
+
:ref:`event_toplevel`
:ref:`schema_ddl_sequences`
-
+
"""
-
+
_execution_options = expression.Executable.\
_execution_options.union({'autocommit':True})
@@ -2241,7 +2241,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
on = None
dialect = None
callable_ = None
-
+
def execute(self, bind=None, target=None):
"""Execute this DDL immediately.
@@ -2277,13 +2277,13 @@ class DDLElement(expression.Executable, expression.ClauseElement):
":meth:`.DDLElement.execute_if`.")
def execute_at(self, event_name, target):
"""Link execution of this DDL to the DDL lifecycle of a SchemaItem.
-
+
Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
executing it when that schema item is created or dropped. The DDL
statement will be executed using the same Connection and transactional
context as the Table create/drop itself. The ``.bind`` property of
this statement is ignored.
-
+
:param event:
One of the events defined in the schema item's ``.ddl_events``;
e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
@@ -2300,14 +2300,14 @@ class DDLElement(expression.Executable, expression.ClauseElement):
Caveat: Creating or dropping a Table in isolation will also trigger
any DDL set to ``execute_at`` that Table's MetaData. This may change
in a future release.
-
+
"""
-
+
def call_event(target, connection, **kw):
if self._should_execute_deprecated(event_name,
target, connection, **kw):
return connection.execute(self.against(target))
-
+
event.listen(target, "" + event_name.replace('-', '_'), call_event)
@expression._generative
@@ -2320,15 +2320,15 @@ class DDLElement(expression.Executable, expression.ClauseElement):
def execute_if(self, dialect=None, callable_=None):
"""Return a callable that will execute this
DDLElement conditionally.
-
+
Used to provide a wrapper for event listening::
-
+
event.listen(
metadata,
'before_create',
DDL("my_ddl").execute_if(dialect='postgresql')
)
-
+
:param dialect: May be a string, tuple or a callable
predicate. If a string, it will be compared to the name of the
executing database dialect::
@@ -2338,14 +2338,14 @@ class DDLElement(expression.Executable, expression.ClauseElement):
If a tuple, specifies multiple dialect names::
DDL('something').execute_if(dialect=('postgresql', 'mysql'))
-
+
:param callable_: A callable, which will be invoked with
four positional arguments as well as optional keyword
arguments:
-
+
:ddl:
This DDL element.
-
+
:target:
The :class:`.Table` or :class:`.MetaData` object which is the target of
this event. May be None if the DDL is executed explicitly.
@@ -2353,20 +2353,20 @@ class DDLElement(expression.Executable, expression.ClauseElement):
:bind:
The :class:`.Connection` being used for DDL execution
- :tables:
+ :tables:
Optional keyword argument - a list of Table objects which are to
be created/ dropped within a MetaData.create_all() or drop_all()
method call.
If the callable returns a true value, the DDL statement will be
executed.
-
+
See also:
-
+
:class:`.DDLEvents`
-
+
:ref:`event_toplevel`
-
+
"""
self.dialect = dialect
self.callable_ = callable_
@@ -2375,7 +2375,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
if self.on is not None and \
not self._should_execute_deprecated(None, target, bind, **kw):
return False
-
+
if isinstance(self.dialect, basestring):
if self.dialect != bind.engine.name:
return False
@@ -2385,7 +2385,7 @@ class DDLElement(expression.Executable, expression.ClauseElement):
if self.callable_ is not None and \
not self.callable_(self, target, bind, **kw):
return False
-
+
return True
def _should_execute_deprecated(self, event, target, bind, **kw):
@@ -2397,10 +2397,10 @@ class DDLElement(expression.Executable, expression.ClauseElement):
return bind.engine.name in self.on
else:
return self.on(self, event, target, bind, **kw)
-
+
def __call__(self, target, bind, **kw):
"""Execute the DDL as a ddl_listener."""
-
+
if self._should_execute(target, bind, **kw):
return bind.execute(self.against(target))
@@ -2424,11 +2424,11 @@ class DDLElement(expression.Executable, expression.ClauseElement):
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
return s
-
+
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
Dialect."""
-
+
return dialect.ddl_compiler(dialect, self, **kw)
class DDL(DDLElement):
@@ -2441,9 +2441,9 @@ class DDL(DDLElement):
to handle repetitive tasks for multiple tables.
Examples::
-
+
from sqlalchemy import event, DDL
-
+
tbl = Table('users', metadata, Column('uid', Integer))
event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
@@ -2467,7 +2467,7 @@ class DDL(DDLElement):
"""
__visit_name__ = "ddl"
-
+
def __init__(self, statement, on=None, context=None, bind=None):
"""Create a DDL statement.
@@ -2495,10 +2495,10 @@ class DDL(DDLElement):
If a callable, it will be invoked with four positional arguments
as well as optional keyword arguments:
-
+
:ddl:
This DDL element.
-
+
:event:
The name of the event that has triggered this DDL, such as
'after-create' Will be None if the DDL is executed explicitly.
@@ -2510,12 +2510,12 @@ class DDL(DDLElement):
:connection:
The ``Connection`` being used for DDL execution
- :tables:
+ :tables:
Optional keyword argument - a list of Table objects which are to
be created/ dropped within a MetaData.create_all() or drop_all()
method call.
-
+
If the callable returns a true value, the DDL statement will be
executed.
@@ -2529,10 +2529,10 @@ class DDL(DDLElement):
See also:
-
+
:class:`.DDLEvents`
:mod:`sqlalchemy.event`
-
+
"""
if not isinstance(statement, basestring):
@@ -2575,9 +2575,9 @@ class _CreateDropBase(DDLElement):
The common theme of _CreateDropBase is a single
``element`` attribute which refers to the element
to be created or dropped.
-
+
"""
-
+
def __init__(self, element, on=None, bind=None):
self.element = element
self._check_ddl_on(on)
@@ -2586,19 +2586,19 @@ class _CreateDropBase(DDLElement):
def _create_rule_disable(self, compiler):
"""Allow disable of _create_rule using a callable.
-
+
Pass to _create_rule using
util.portable_instancemethod(self._create_rule_disable)
to retain serializability.
-
+
"""
return False
class CreateTable(_CreateDropBase):
"""Represent a CREATE TABLE statement."""
-
+
__visit_name__ = "create_table"
-
+
class DropTable(_CreateDropBase):
"""Represent a DROP TABLE statement."""
@@ -2606,17 +2606,17 @@ class DropTable(_CreateDropBase):
class CreateSequence(_CreateDropBase):
"""Represent a CREATE SEQUENCE statement."""
-
+
__visit_name__ = "create_sequence"
class DropSequence(_CreateDropBase):
"""Represent a DROP SEQUENCE statement."""
__visit_name__ = "drop_sequence"
-
+
class CreateIndex(_CreateDropBase):
"""Represent a CREATE INDEX statement."""
-
+
__visit_name__ = "create_index"
class DropIndex(_CreateDropBase):
@@ -2626,19 +2626,19 @@ class DropIndex(_CreateDropBase):
class AddConstraint(_CreateDropBase):
"""Represent an ALTER TABLE ADD CONSTRAINT statement."""
-
+
__visit_name__ = "add_constraint"
def __init__(self, element, *args, **kw):
super(AddConstraint, self).__init__(element, *args, **kw)
element._create_rule = util.portable_instancemethod(
self._create_rule_disable)
-
+
class DropConstraint(_CreateDropBase):
"""Represent an ALTER TABLE DROP CONSTRAINT statement."""
__visit_name__ = "drop_constraint"
-
+
def __init__(self, element, cascade=False, **kw):
self.cascade = cascade
super(DropConstraint, self).__init__(element, **kw)
@@ -2659,7 +2659,7 @@ def _bind_or_error(schemaitem, msg=None):
bindable = "the %s's .bind" % name
else:
bindable = "this %s's .metadata.bind" % name
-
+
if msg is None:
msg = "The %s is not bound to an Engine or Connection. "\
"Execution can not proceed without a database to execute "\
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index 07ef0f50a..39d320ede 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -152,15 +152,15 @@ class _CompileLabel(visitors.Visitable):
__visit_name__ = 'label'
__slots__ = 'element', 'name'
-
+
def __init__(self, col, name):
self.element = col
self.name = name
-
+
@property
def type(self):
return self.element.type
-
+
@property
def quote(self):
return self.element.quote
@@ -176,28 +176,28 @@ class SQLCompiler(engine.Compiled):
extract_map = EXTRACT_MAP
compound_keywords = COMPOUND_KEYWORDS
-
+
# class-level defaults which can be set at the instance
# level to define if this Compiled instance represents
# INSERT/UPDATE/DELETE
isdelete = isinsert = isupdate = False
-
+
# holds the "returning" collection of columns if
# the statement is CRUD and defines returning columns
# either implicitly or explicitly
returning = None
-
+
# set to True classwide to generate RETURNING
# clauses before the VALUES or WHERE clause (i.e. MSSQL)
returning_precedes_values = False
-
+
# SQL 92 doesn't allow bind parameters to be used
# in the columns clause of a SELECT, nor does it allow
# ambiguous expressions like "? = ?". A compiler
# subclass can set this flag to False if the target
# driver/DB enforces this
ansi_bind_rules = False
-
+
def __init__(self, dialect, statement, column_keys=None,
inline=False, **kwargs):
"""Construct a new ``DefaultCompiler`` object.
@@ -256,7 +256,7 @@ class SQLCompiler(engine.Compiled):
self.truncated_names = {}
engine.Compiled.__init__(self, dialect, statement, **kwargs)
-
+
@util.memoized_property
def _bind_processors(self):
@@ -267,14 +267,14 @@ class SQLCompiler(engine.Compiled):
for bindparam in self.bind_names )
if value is not None
)
-
+
def is_subquery(self):
return len(self.stack) > 1
@property
def sql_compiler(self):
return self
-
+
def construct_params(self, params=None, _group_number=None):
"""return a dictionary of bind parameter keys and values"""
@@ -353,25 +353,25 @@ class SQLCompiler(engine.Compiled):
return label.element._compiler_dispatch(self,
within_columns_clause=False,
**kw)
-
+
def visit_column(self, column, result_map=None, **kwargs):
name = column.name
if name is None:
raise exc.CompileError("Cannot compile Column object until "
"it's 'name' is assigned.")
-
+
is_literal = column.is_literal
if not is_literal and isinstance(name, sql._generated_label):
name = self._truncated_identifier("colident", name)
if result_map is not None:
result_map[name.lower()] = (name, (column, ), column.type)
-
+
if is_literal:
name = self.escape_literal_column(name)
else:
name = self.preparer.quote(name, column.quote)
-
+
table = column.table
if table is None or not table.named_with_column:
return name
@@ -385,7 +385,7 @@ class SQLCompiler(engine.Compiled):
tablename = table.name
if isinstance(tablename, sql._generated_label):
tablename = self._truncated_identifier("alias", tablename)
-
+
return schema_prefix + \
self.preparer.quote(tablename, table.quote) + \
"." + name
@@ -407,7 +407,7 @@ class SQLCompiler(engine.Compiled):
def post_process_text(self, text):
return text
-
+
def visit_textclause(self, textclause, **kwargs):
if textclause.typemap is not None:
for colname, type_ in textclause.typemap.iteritems():
@@ -486,14 +486,14 @@ class SQLCompiler(engine.Compiled):
self.stack.append({'from':entry.get('from', None), 'iswrapper':True})
keyword = self.compound_keywords.get(cs.keyword)
-
+
text = (" " + keyword + " ").join(
(c._compiler_dispatch(self,
asfrom=asfrom, parens=False,
compound_index=i, **kwargs)
for i, c in enumerate(cs.selects))
)
-
+
group_by = cs._group_by_clause._compiler_dispatch(
self, asfrom=asfrom, **kwargs)
if group_by:
@@ -523,7 +523,7 @@ class SQLCompiler(engine.Compiled):
isinstance(binary.left, sql._BindParamClause) and \
isinstance(binary.right, sql._BindParamClause):
kw['literal_binds'] = True
-
+
return self._operator_dispatch(binary.operator,
binary,
lambda opstr: binary.left._compiler_dispatch(self, **kw) +
@@ -550,7 +550,7 @@ class SQLCompiler(engine.Compiled):
+ (escape and
(' ESCAPE ' + self.render_literal_value(escape, None))
or '')
-
+
def visit_ilike_op(self, binary, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) LIKE lower(%s)' % (
@@ -559,7 +559,7 @@ class SQLCompiler(engine.Compiled):
+ (escape and
(' ESCAPE ' + self.render_literal_value(escape, None))
or '')
-
+
def visit_notilike_op(self, binary, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) NOT LIKE lower(%s)' % (
@@ -568,7 +568,7 @@ class SQLCompiler(engine.Compiled):
+ (escape and
(' ESCAPE ' + self.render_literal_value(escape, None))
or '')
-
+
def _operator_dispatch(self, operator, element, fn, **kw):
if util.callable(operator):
disp = getattr(self, "visit_%s" % operator.__name__, None)
@@ -578,7 +578,7 @@ class SQLCompiler(engine.Compiled):
return fn(OPERATORS[operator])
else:
return fn(" " + operator + " ")
-
+
def visit_bindparam(self, bindparam, within_columns_clause=False,
literal_binds=False, **kwargs):
if literal_binds or \
@@ -589,7 +589,7 @@ class SQLCompiler(engine.Compiled):
"renderable value not allowed here.")
return self.render_literal_bindparam(bindparam,
within_columns_clause=True, **kwargs)
-
+
name = self._truncate_bindparam(bindparam)
if name in self.binds:
existing = self.binds[name]
@@ -610,26 +610,26 @@ class SQLCompiler(engine.Compiled):
"with insert() or update() (for example, 'b_%s')."
% (bindparam.key, bindparam.key)
)
-
+
self.binds[bindparam.key] = self.binds[name] = bindparam
return self.bindparam_string(name)
-
+
def render_literal_bindparam(self, bindparam, **kw):
value = bindparam.value
processor = bindparam.type._cached_bind_processor(self.dialect)
if processor:
value = processor(value)
return self.render_literal_value(value, bindparam.type)
-
+
def render_literal_value(self, value, type_):
"""Render the value of a bind parameter as a quoted literal.
-
+
This is used for statement sections that do not accept bind paramters
on the target driver/database.
-
+
This should be implemented by subclasses using the quoting services
of the DBAPI.
-
+
"""
if isinstance(value, basestring):
value = value.replace("'", "''")
@@ -643,7 +643,7 @@ class SQLCompiler(engine.Compiled):
else:
raise NotImplementedError(
"Don't know how to literal-quote value %r" % value)
-
+
def _truncate_bindparam(self, bindparam):
if bindparam in self.bind_names:
return self.bind_names[bindparam]
@@ -672,10 +672,10 @@ class SQLCompiler(engine.Compiled):
truncname = anonname
self.truncated_names[(ident_class, name)] = truncname
return truncname
-
+
def _anonymize(self, name):
return name % self.anon_map
-
+
def _process_anon(self, key):
(ident, derived) = key.split(' ', 1)
anonymous_counter = self.anon_map.get(derived, 1)
@@ -705,12 +705,12 @@ class SQLCompiler(engine.Compiled):
asfrom=True, **kwargs) + \
" AS " + \
self.preparer.format_alias(alias, alias_name)
-
+
if fromhints and alias in fromhints:
hinttext = self.get_from_hint_text(alias, fromhints[alias])
if hinttext:
ret += " " + hinttext
-
+
return ret
else:
return alias.original._compiler_dispatch(self, **kwargs)
@@ -742,16 +742,16 @@ class SQLCompiler(engine.Compiled):
def get_select_hint_text(self, byfroms):
return None
-
+
def get_from_hint_text(self, table, text):
return None
-
+
def visit_select(self, select, asfrom=False, parens=True,
iswrapper=False, fromhints=None,
compound_index=1, **kwargs):
entry = self.stack and self.stack[-1] or {}
-
+
existingfroms = entry.get('from', None)
froms = select._get_display_froms(existingfroms)
@@ -782,7 +782,7 @@ class SQLCompiler(engine.Compiled):
]
if c is not None
]
-
+
text = "SELECT " # we're off to a good start !
if select._hints:
@@ -798,7 +798,7 @@ class SQLCompiler(engine.Compiled):
hint_text = self.get_select_hint_text(byfrom)
if hint_text:
text += hint_text + " "
-
+
if select._prefixes:
text += " ".join(
x._compiler_dispatch(self, **kwargs)
@@ -808,7 +808,7 @@ class SQLCompiler(engine.Compiled):
if froms:
text += " \nFROM "
-
+
if select._hints:
text += ', '.join([f._compiler_dispatch(self,
asfrom=True, fromhints=byfrom,
@@ -854,7 +854,7 @@ class SQLCompiler(engine.Compiled):
def get_select_precolumns(self, select):
"""Called when building a ``SELECT`` statement, position is just
before column list.
-
+
"""
return select._distinct and "DISTINCT " or ""
@@ -924,15 +924,15 @@ class SQLCompiler(engine.Compiled):
preparer = self.preparer
supports_default_values = self.dialect.supports_default_values
-
+
text = "INSERT"
-
+
prefixes = [self.process(x) for x in insert_stmt._prefixes]
if prefixes:
text += " " + " ".join(prefixes)
-
+
text += " INTO " + preparer.format_table(insert_stmt.table)
-
+
if colparams or not supports_default_values:
text += " (%s)" % ', '.join([preparer.format_column(c[0])
for c in colparams])
@@ -941,7 +941,7 @@ class SQLCompiler(engine.Compiled):
self.returning = self.returning or insert_stmt._returning
returning_clause = self.returning_clause(
insert_stmt, self.returning)
-
+
if self.returning_precedes_values:
text += " " + returning_clause
@@ -950,12 +950,12 @@ class SQLCompiler(engine.Compiled):
else:
text += " VALUES (%s)" % \
', '.join([c[1] for c in colparams])
-
+
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
-
+
return text
-
+
def visit_update(self, update_stmt):
self.stack.append({'from': set([update_stmt.table])})
@@ -963,7 +963,7 @@ class SQLCompiler(engine.Compiled):
colparams = self._get_colparams(update_stmt)
text = "UPDATE " + self.preparer.format_table(update_stmt.table)
-
+
text += ' SET ' + \
', '.join(
self.preparer.quote(c[0].name, c[0].quote) +
@@ -976,14 +976,14 @@ class SQLCompiler(engine.Compiled):
if self.returning_precedes_values:
text += " " + self.returning_clause(
update_stmt, update_stmt._returning)
-
+
if update_stmt._whereclause is not None:
text += " WHERE " + self.process(update_stmt._whereclause)
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
update_stmt, update_stmt._returning)
-
+
self.stack.pop(-1)
return text
@@ -1001,10 +1001,10 @@ class SQLCompiler(engine.Compiled):
"with insert() or update() (for example, 'b_%s')."
% (col.key, col.key)
)
-
+
self.binds[col.key] = bindparam
return self.bindparam_string(self._truncate_bindparam(bindparam))
-
+
def _get_colparams(self, stmt):
"""create a set of tuples representing column/string pairs for use
in an INSERT or UPDATE statement.
@@ -1030,7 +1030,7 @@ class SQLCompiler(engine.Compiled):
]
required = object()
-
+
# if we have statement parameters - set defaults in the
# compiled params
if self.column_keys is None:
@@ -1047,17 +1047,17 @@ class SQLCompiler(engine.Compiled):
# create a list of column assignment clauses as tuples
values = []
-
+
need_pks = self.isinsert and \
not self.inline and \
not stmt._returning
-
+
implicit_returning = need_pks and \
self.dialect.implicit_returning and \
stmt.table.implicit_returning
-
+
postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid
-
+
# iterating through columns at the top to maintain ordering.
# otherwise we might iterate through individual sets of
# "defaults", "primary key cols", etc.
@@ -1071,7 +1071,7 @@ class SQLCompiler(engine.Compiled):
self.postfetch.append(c)
value = self.process(value.self_group())
values.append((c, value))
-
+
elif self.isinsert:
if c.primary_key and \
need_pks and \
@@ -1080,7 +1080,7 @@ class SQLCompiler(engine.Compiled):
not postfetch_lastrowid or
c is not stmt.table._autoincrement_column
):
-
+
if implicit_returning:
if c.default is not None:
if c.default.is_sequence:
@@ -1115,7 +1115,7 @@ class SQLCompiler(engine.Compiled):
(c, self._create_crud_bind_param(c, None))
)
self.prefetch.append(c)
-
+
elif c.default is not None:
if c.default.is_sequence:
proc = self.process(c.default)
@@ -1127,7 +1127,7 @@ class SQLCompiler(engine.Compiled):
values.append(
(c, self.process(c.default.arg.self_group()))
)
-
+
if not c.primary_key:
# dont add primary key column to postfetch
self.postfetch.append(c)
@@ -1139,7 +1139,7 @@ class SQLCompiler(engine.Compiled):
elif c.server_default is not None:
if not c.primary_key:
self.postfetch.append(c)
-
+
elif self.isupdate:
if c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
@@ -1167,14 +1167,14 @@ class SQLCompiler(engine.Compiled):
if self.returning_precedes_values:
text += " " + self.returning_clause(
delete_stmt, delete_stmt._returning)
-
+
if delete_stmt._whereclause is not None:
text += " WHERE " + self.process(delete_stmt._whereclause)
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
delete_stmt, delete_stmt._returning)
-
+
self.stack.pop(-1)
return text
@@ -1192,18 +1192,18 @@ class SQLCompiler(engine.Compiled):
class DDLCompiler(engine.Compiled):
-
+
@util.memoized_property
def sql_compiler(self):
return self.dialect.statement_compiler(self.dialect, None)
-
+
@property
def preparer(self):
return self.dialect.identifier_preparer
def construct_params(self, params=None):
return None
-
+
def visit_ddl(self, ddl, **kwargs):
# table events can substitute table and schema name
context = ddl.context
@@ -1220,7 +1220,7 @@ class DDLCompiler(engine.Compiled):
context.setdefault('table', table)
context.setdefault('schema', sch)
context.setdefault('fullname', preparer.format_table(ddl.target))
-
+
return self.sql_compiler.post_process_text(ddl.statement % context)
def visit_create_table(self, create):
@@ -1259,16 +1259,16 @@ class DDLCompiler(engine.Compiled):
return text
def create_table_constraints(self, table):
-
+
# On some DB order is significant: visit PK first, then the
# other constraints (engine.ReflectionTest.testbasic failed on FB2)
constraints = []
if table.primary_key:
constraints.append(table.primary_key)
-
+
constraints.extend([c for c in table.constraints
if c is not table.primary_key])
-
+
return ", \n\t".join(p for p in
(self.process(constraint)
for constraint in constraints
@@ -1280,7 +1280,7 @@ class DDLCompiler(engine.Compiled):
not getattr(constraint, 'use_alter', False)
)) if p is not None
)
-
+
def visit_drop_table(self, drop):
return "\nDROP TABLE " + self.preparer.format_table(drop.element)
@@ -1302,7 +1302,7 @@ class DDLCompiler(engine.Compiled):
preparer = self.preparer
text = "CREATE "
if index.unique:
- text += "UNIQUE "
+ text += "UNIQUE "
text += "INDEX %s ON %s (%s)" \
% (preparer.quote(self._index_identifier(index.name),
index.quote),
@@ -1332,7 +1332,7 @@ class DDLCompiler(engine.Compiled):
if create.element.start is not None:
text += " START WITH %d" % create.element.start
return text
-
+
def visit_drop_sequence(self, drop):
return "DROP SEQUENCE %s" % \
self.preparer.format_sequence(drop.element)
@@ -1344,7 +1344,7 @@ class DDLCompiler(engine.Compiled):
self.preparer.format_constraint(drop.element),
drop.cascade and " CASCADE" or ""
)
-
+
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + \
self.dialect.type_compiler.process(column.type)
@@ -1417,7 +1417,7 @@ class DDLCompiler(engine.Compiled):
def define_constraint_remote_table(self, constraint, table, preparer):
"""Format the remote table clause of a CREATE CONSTRAINT clause."""
-
+
return preparer.format_table(table)
def visit_unique_constraint(self, constraint):
@@ -1438,7 +1438,7 @@ class DDLCompiler(engine.Compiled):
if constraint.onupdate is not None:
text += " ON UPDATE %s" % constraint.onupdate
return text
-
+
def define_constraint_deferrability(self, constraint):
text = ""
if constraint.deferrable is not None:
@@ -1449,15 +1449,15 @@ class DDLCompiler(engine.Compiled):
if constraint.initially is not None:
text += " INITIALLY %s" % constraint.initially
return text
-
-
+
+
class GenericTypeCompiler(engine.TypeCompiler):
def visit_CHAR(self, type_):
return "CHAR" + (type_.length and "(%d)" % type_.length or "")
def visit_NCHAR(self, type_):
return "NCHAR" + (type_.length and "(%d)" % type_.length or "")
-
+
def visit_FLOAT(self, type_):
return "FLOAT"
@@ -1474,7 +1474,7 @@ class GenericTypeCompiler(engine.TypeCompiler):
def visit_DECIMAL(self, type_):
return "DECIMAL"
-
+
def visit_INTEGER(self, type_):
return "INTEGER"
@@ -1516,46 +1516,46 @@ class GenericTypeCompiler(engine.TypeCompiler):
def visit_VARBINARY(self, type_):
return "VARBINARY" + (type_.length and "(%d)" % type_.length or "")
-
+
def visit_BOOLEAN(self, type_):
return "BOOLEAN"
-
+
def visit_TEXT(self, type_):
return "TEXT"
-
+
def visit_large_binary(self, type_):
return self.visit_BLOB(type_)
-
+
def visit_boolean(self, type_):
return self.visit_BOOLEAN(type_)
-
+
def visit_time(self, type_):
return self.visit_TIME(type_)
-
+
def visit_datetime(self, type_):
return self.visit_DATETIME(type_)
-
+
def visit_date(self, type_):
return self.visit_DATE(type_)
def visit_big_integer(self, type_):
return self.visit_BIGINT(type_)
-
+
def visit_small_integer(self, type_):
return self.visit_SMALLINT(type_)
-
+
def visit_integer(self, type_):
return self.visit_INTEGER(type_)
-
+
def visit_float(self, type_):
return self.visit_FLOAT(type_)
-
+
def visit_numeric(self, type_):
return self.visit_NUMERIC(type_)
-
+
def visit_string(self, type_):
return self.visit_VARCHAR(type_)
-
+
def visit_unicode(self, type_):
return self.visit_VARCHAR(type_)
@@ -1564,19 +1564,19 @@ class GenericTypeCompiler(engine.TypeCompiler):
def visit_unicode_text(self, type_):
return self.visit_TEXT(type_)
-
+
def visit_enum(self, type_):
return self.visit_VARCHAR(type_)
-
+
def visit_null(self, type_):
raise NotImplementedError("Can't generate DDL for the null type")
-
+
def visit_type_decorator(self, type_):
return self.process(type_.type_engine(self.dialect))
-
+
def visit_user_defined(self, type_):
return type_.get_col_spec()
-
+
class IdentifierPreparer(object):
"""Handle quoting and case-folding of identifiers based on options."""
@@ -1609,7 +1609,7 @@ class IdentifierPreparer(object):
self.escape_to_quote = self.escape_quote * 2
self.omit_schema = omit_schema
self._strings = {}
-
+
def _escape_identifier(self, value):
"""Escape an identifier.
@@ -1689,7 +1689,7 @@ class IdentifierPreparer(object):
def format_constraint(self, constraint):
return self.quote(constraint.name, constraint.quote)
-
+
def format_table(self, table, use_schema=True, name=None):
"""Prepare a quoted table and schema name."""
@@ -1754,7 +1754,7 @@ class IdentifierPreparer(object):
'final': final,
'escaped': escaped_final })
return r
-
+
def unformat_identifiers(self, identifiers):
"""Unpack 'schema.table.column'-like strings into components."""
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index c1312c853..ede194f7c 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -474,7 +474,7 @@ def case(whens, value=None, else_=None):
do not support bind parameters in the ``then`` clause. The type
can be specified which determines the type of the :func:`case()` construct
overall::
-
+
case([(orderline.c.qty > 100,
literal_column("'greaterthan100'", String)),
(orderline.c.qty > 10, literal_column("'greaterthan10'",
@@ -482,7 +482,7 @@ def case(whens, value=None, else_=None):
], else_=literal_column("'lethan10'", String))
"""
-
+
return _Case(whens, value=value, else_=else_)
def cast(clause, totype, **kwargs):
@@ -678,29 +678,29 @@ def literal(value, type_=None):
return _BindParamClause(None, value, type_=type_, unique=True)
def tuple_(*expr):
- """Return a SQL tuple.
-
+ """Return a SQL tuple.
+
Main usage is to produce a composite IN construct::
-
+
tuple_(table.c.col1, table.c.col2).in_(
[(1, 2), (5, 12), (10, 19)]
)
-
+
"""
return _Tuple(*expr)
def type_coerce(expr, type_):
"""Coerce the given expression into the given type, on the Python side only.
-
+
:func:`.type_coerce` is roughly similar to :func:.`cast`, except no
"CAST" expression is rendered - the given type is only applied towards
expression typing and against received result values.
-
+
e.g.::
-
+
from sqlalchemy.types import TypeDecorator
import uuid
-
+
class AsGuid(TypeDecorator):
impl = String
@@ -709,25 +709,25 @@ def type_coerce(expr, type_):
return str(value)
else:
return None
-
+
def process_result_value(self, value, dialect):
if value is not None:
return uuid.UUID(value)
else:
return None
-
+
conn.execute(
select([type_coerce(mytable.c.ident, AsGuid)]).\\
where(
type_coerce(mytable.c.ident, AsGuid) ==
uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
)
- )
-
+ )
+
"""
if hasattr(expr, '__clause_expr__'):
return type_coerce(expr.__clause_expr__())
-
+
elif not isinstance(expr, Visitable):
if expr is None:
return null()
@@ -735,8 +735,8 @@ def type_coerce(expr, type_):
return literal(expr, type_=type_)
else:
return _Label(None, expr, type_=type_)
-
-
+
+
def label(name, obj):
"""Return a :class:`_Label` object for the
given :class:`ColumnElement`.
@@ -826,14 +826,14 @@ def bindparam(key, value=None, type_=None, unique=False, required=False, callabl
Initial value for this bind param. This value may be
overridden by the dictionary of parameters sent to statement
compilation/execution.
-
+
:param callable\_:
A callable function that takes the place of "value". The function
will be called at statement execution time to determine the
ultimate value. Used for scenarios where the actual bind
value cannot be determined at the point at which the clause
construct is created, but embeded bind values are still desirable.
-
+
:param type\_:
A ``TypeEngine`` object that will be used to pre-process the
value corresponding to this :class:`_BindParamClause` at
@@ -844,10 +844,10 @@ def bindparam(key, value=None, type_=None, unique=False, required=False, callabl
modified if another :class:`_BindParamClause` of the same name
already has been located within the containing
:class:`ClauseElement`.
-
+
:param required:
a value is required at execution time.
-
+
"""
if isinstance(key, ColumnClause):
return _BindParamClause(key.name, value, type_=key.type,
@@ -873,65 +873,65 @@ def outparam(key, type_=None):
def text(text, bind=None, *args, **kwargs):
"""Create a SQL construct that is represented by a literal string.
-
+
E.g.::
-
+
t = text("SELECT * FROM users")
result = connection.execute(t)
-
+
The advantages :func:`text` provides over a plain string are
backend-neutral support for bind parameters, per-statement
execution options, as well as
bind parameter and result-column typing behavior, allowing
SQLAlchemy type constructs to play a role when executing
a statement that is specified literally.
-
+
Bind parameters are specified by name, using the format ``:name``.
E.g.::
-
+
t = text("SELECT * FROM users WHERE id=:user_id")
result = connection.execute(t, user_id=12)
-
+
To invoke SQLAlchemy typing logic for bind parameters, the
``bindparams`` list allows specification of :func:`bindparam`
constructs which specify the type for a given name::
-
+
t = text("SELECT id FROM users WHERE updated_at>:updated",
bindparams=[bindparam('updated', DateTime())]
)
-
- Typing during result row processing is also an important concern.
+
+ Typing during result row processing is also an important concern.
Result column types
are specified using the ``typemap`` dictionary, where the keys
match the names of columns. These names are taken from what
the DBAPI returns as ``cursor.description``::
-
+
t = text("SELECT id, name FROM users",
typemap={
'id':Integer,
'name':Unicode
}
)
-
+
The :func:`text` construct is used internally for most cases when
a literal string is specified for part of a larger query, such as
within :func:`select()`, :func:`update()`,
:func:`insert()` or :func:`delete()`. In those cases, the same
bind parameter syntax is applied::
-
+
s = select([users.c.id, users.c.name]).where("id=:user_id")
result = connection.execute(s, user_id=12)
-
+
Using :func:`text` explicitly usually implies the construction
of a full, standalone statement. As such, SQLAlchemy refers
to it as an :class:`Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`text` construct that should be subject to "autocommit"
can be set explicitly so using the ``autocommit`` option::
-
+
t = text("EXEC my_procedural_thing()").\\
execution_options(autocommit=True)
-
+
Note that SQLAlchemy's usual "autocommit" behavior applies to
:func:`text` constructs - that is, statements which begin
with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
@@ -970,7 +970,7 @@ def text(text, bind=None, *args, **kwargs):
def null():
"""Return a :class:`_Null` object, which compiles to ``NULL`` in a sql
statement.
-
+
"""
return _Null()
@@ -1023,24 +1023,24 @@ def _escape_for_generated(x):
return x
else:
return x.replace('%', '%%')
-
+
def _clone(element):
return element._clone()
def _expand_cloned(elements):
"""expand the given set of ClauseElements to be the set of all 'cloned'
predecessors.
-
+
"""
return itertools.chain(*[x._cloned_set for x in elements])
def _select_iterables(elements):
"""expand tables into individual columns in the
given list of column expressions.
-
+
"""
return itertools.chain(*[c._select_iterable for c in elements])
-
+
def _cloned_intersection(a, b):
"""return the intersection of sets a and b, counting
any overlap between 'cloned' predecessors.
@@ -1072,7 +1072,7 @@ def _column_as_key(element):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
return element.key
-
+
def _literal_as_text(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
@@ -1088,7 +1088,7 @@ def _clause_element_as_expr(element):
return element.__clause_element__()
else:
return element
-
+
def _literal_as_column(element):
if isinstance(element, Visitable):
return element
@@ -1133,7 +1133,7 @@ def _only_column_elements(element, name):
raise exc.ArgumentError("Column-based expression object expected for argument '%s'; "
"got: '%s', type %s" % (name, element, type(element)))
return element
-
+
def _corresponding_column_or_error(fromclause, column,
require_embedded=False):
c = fromclause.corresponding_column(column,
@@ -1159,14 +1159,14 @@ def _generative(fn, *args, **kw):
def is_column(col):
"""True if ``col`` is an instance of :class:`ColumnElement`."""
-
+
return isinstance(col, ColumnElement)
class ClauseElement(Visitable):
"""Base class for elements of a programmatically constructed SQL
expression.
-
+
"""
__visit_name__ = 'clause'
@@ -1174,7 +1174,7 @@ class ClauseElement(Visitable):
supports_execution = False
_from_objects = []
_bind = None
-
+
def _clone(self):
"""Create a shallow copy of this ClauseElement.
@@ -1199,9 +1199,9 @@ class ClauseElement(Visitable):
@property
def _constructor(self):
"""return the 'constructor' for this ClauseElement.
-
+
This is for the purposes for creating a new object of
- this type. Usually, its just the element's __class__.
+ this type. Usually, its just the element's __class__.
However, the "Annotated" version of the object overrides
to return the class of its proxied element.
@@ -1229,7 +1229,7 @@ class ClauseElement(Visitable):
d = self.__dict__.copy()
d.pop('_is_clone_of', None)
return d
-
+
if util.jython:
def __hash__(self):
"""Return a distinct hash code.
@@ -1240,18 +1240,18 @@ class ClauseElement(Visitable):
unique values on platforms with moving GCs.
"""
return id(self)
-
+
def _annotate(self, values):
"""return a copy of this ClauseElement with the given annotations
dictionary.
-
+
"""
return sqlutil.Annotated(self, values)
def _deannotate(self):
"""return a copy of this ClauseElement with an empty annotations
dictionary.
-
+
"""
return self._clone()
@@ -1299,7 +1299,7 @@ class ClauseElement(Visitable):
Subclasses should override the default behavior, which is a
straight identity comparison.
-
+
\**kw are arguments consumed by subclass compare() methods and
may be used to modify the criteria for comparison.
(see :class:`ColumnElement`)
@@ -1332,7 +1332,7 @@ class ClauseElement(Visitable):
def self_group(self, against=None):
"""Apply a 'grouping' to this :class:`.ClauseElement`.
-
+
This method is overridden by subclasses to return a
"grouping" construct, i.e. parenthesis. In particular
it's used by "binary" expressions to provide a grouping
@@ -1342,7 +1342,7 @@ class ClauseElement(Visitable):
subqueries should be normally created using the
:func:`.Select.alias` method, as many platforms require
nested SELECT statements to be named).
-
+
As expressions are composed together, the application of
:meth:`self_group` is automatic - end-user code should never
need to use this method directly. Note that SQLAlchemy's
@@ -1350,7 +1350,7 @@ class ClauseElement(Visitable):
so parenthesis might not be needed, for example, in
an expression like ``x OR (y AND z)`` - AND takes precedence
over OR.
-
+
The base :meth:`self_group` method of :class:`.ClauseElement`
just returns self.
"""
@@ -1365,7 +1365,7 @@ class ClauseElement(Visitable):
def bind(self):
"""Returns the Engine or Connection to which this ClauseElement is
bound, or None if none found.
-
+
"""
if self._bind is not None:
return self._bind
@@ -1378,14 +1378,14 @@ class ClauseElement(Visitable):
return engine
else:
return None
-
+
@util.pending_deprecation('0.7',
'Only SQL expressions which subclass '
':class:`.Executable` may provide the '
':func:`.execute` method.')
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`ClauseElement`.
-
+
"""
e = self.bind
if e is None:
@@ -1405,7 +1405,7 @@ class ClauseElement(Visitable):
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`ClauseElement`, returning
the result's scalar representation.
-
+
"""
return self.execute(*multiparams, **params).scalar()
@@ -1442,7 +1442,7 @@ class ClauseElement(Visitable):
associated with a primary key `Column`.
"""
-
+
if not dialect:
if bind:
dialect = bind.dialect
@@ -1454,13 +1454,13 @@ class ClauseElement(Visitable):
c= self._compiler(dialect, bind=bind, **kw)
#c.string = c.process(c.statement)
return c
-
+
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
Dialect."""
-
+
return dialect.statement_compiler(dialect, self, **kw)
-
+
def __str__(self):
# Py3K
#return unicode(self.compile())
@@ -1544,7 +1544,7 @@ class ColumnOperators(Operators):
return self.operate(operators.le, other)
__hash__ = Operators.__hash__
-
+
def __eq__(self, other):
return self.operate(operators.eq, other)
@@ -1673,12 +1673,12 @@ class _CompareMixin(ColumnOperators):
def __operate(self, op, obj, reverse=False):
obj = self._check_literal(op, obj)
-
+
if reverse:
left, right = obj, self
else:
left, right = self, obj
-
+
if left.type is None:
op, result_type = sqltypes.NULLTYPE._adapt_expression(op,
right.type)
@@ -1689,7 +1689,7 @@ class _CompareMixin(ColumnOperators):
op, result_type = left.type._adapt_expression(op,
right.type)
return _BinaryExpression(left, right, op, type_=result_type)
-
+
# a mapping of operators with the method they use, along with their negated
# operator for comparison operators
@@ -1722,12 +1722,12 @@ class _CompareMixin(ColumnOperators):
def in_(self, other):
"""Compare this element to the given element or collection using IN."""
-
+
return self._in_impl(operators.in_op, operators.notin_op, other)
def _in_impl(self, op, negate_op, seq_or_selectable):
seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
-
+
if isinstance(seq_or_selectable, _ScalarSelect):
return self.__compare(op, seq_or_selectable,
negate=negate_op)
@@ -1743,8 +1743,8 @@ class _CompareMixin(ColumnOperators):
elif isinstance(seq_or_selectable, (Selectable, _TextClause)):
return self.__compare(op, seq_or_selectable,
negate=negate_op)
-
-
+
+
# Handle non selectable arguments as sequences
args = []
@@ -1778,7 +1778,7 @@ class _CompareMixin(ColumnOperators):
def __neg__(self):
return _UnaryExpression(self, operator=operators.neg)
-
+
def startswith(self, other, escape=None):
"""Produce the clause ``LIKE '<other>%'``"""
@@ -1821,7 +1821,7 @@ class _CompareMixin(ColumnOperators):
def label(self, name):
"""Produce a column label, i.e. ``<columnname> AS <name>``.
-
+
This is a shortcut to the :func:`~.expression.label` function.
if 'name' is None, an anonymous label name will be generated.
@@ -1894,7 +1894,7 @@ class _CompareMixin(ColumnOperators):
somecolumn.op('&')(0xff)
is a bitwise AND of the value in somecolumn.
-
+
"""
return lambda other: self.__operate(operator, other)
@@ -1948,7 +1948,7 @@ class ColumnElement(ClauseElement, _CompareMixin):
foreign_keys = []
quote = None
_label = None
-
+
@property
def _select_iterable(self):
return (self, )
@@ -1986,7 +1986,7 @@ class ColumnElement(ClauseElement, _CompareMixin):
key = str(self)
else:
key = name
-
+
co = ColumnClause(name, selectable, type_=getattr(self,
'type', None))
co.proxies = [self]
@@ -1995,12 +1995,12 @@ class ColumnElement(ClauseElement, _CompareMixin):
def compare(self, other, use_proxies=False, equivalents=None, **kw):
"""Compare this ColumnElement to another.
-
+
Special arguments understood:
-
+
:param use_proxies: when True, consider two columns that
share a common base column as equivalent (i.e. shares_lineage())
-
+
:param equivalents: a dictionary of columns as keys mapped to sets
of columns. If the given "other" column is present in this
dictionary, if any of the columns in the correponding set() pass the
@@ -2051,10 +2051,10 @@ class ColumnCollection(util.OrderedProperties):
super(ColumnCollection, self).__init__()
self._data.update((c.key, c) for c in cols)
self.__dict__['_all_cols'] = util.column_set(self)
-
+
def __str__(self):
return repr([str(c) for c in self])
-
+
def replace(self, column):
"""add the given column to this collection, removing unaliased
versions of this column as well as existing columns with the
@@ -2089,13 +2089,13 @@ class ColumnCollection(util.OrderedProperties):
"""
self[column.key] = column
-
+
def __delitem__(self, key):
raise NotImplementedError()
def __setattr__(self, key, object):
raise NotImplementedError()
-
+
def __setitem__(self, key, value):
if key in self:
@@ -2116,7 +2116,7 @@ class ColumnCollection(util.OrderedProperties):
def clear(self):
self._data.clear()
self._all_cols.clear()
-
+
def remove(self, column):
del self._data[column.key]
self._all_cols.remove(column)
@@ -2125,12 +2125,12 @@ class ColumnCollection(util.OrderedProperties):
self._data.update(value)
self._all_cols.clear()
self._all_cols.update(self._data.values())
-
+
def extend(self, iter):
self.update((c.key, c) for c in iter)
__hash__ = None
-
+
def __eq__(self, other):
l = []
for c in other:
@@ -2151,15 +2151,15 @@ class ColumnCollection(util.OrderedProperties):
def contains_column(self, col):
# this has to be done via set() membership
return col in self._all_cols
-
+
def as_immutable(self):
return ImmutableColumnCollection(self._data, self._all_cols)
-
+
class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
def __init__(self, data, colset):
util.ImmutableProperties.__init__(self, data)
self.__dict__['_all_cols'] = colset
-
+
extend = remove = util.ImmutableProperties._immutable
@@ -2192,7 +2192,7 @@ class Selectable(ClauseElement):
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
-
+
"""
__visit_name__ = 'fromclause'
named_with_column = False
@@ -2233,19 +2233,19 @@ class FromClause(Selectable):
def alias(self, name=None):
"""return an alias of this :class:`FromClause`.
-
+
For table objects, this has the effect of the table being rendered
- as ``tablename AS aliasname`` in a SELECT statement.
+ as ``tablename AS aliasname`` in a SELECT statement.
For select objects, the effect is that of creating a named
subquery, i.e. ``(select ...) AS aliasname``.
The :func:`alias()` method is the general way to create
a "subquery" out of an existing SELECT.
-
+
The ``name`` parameter is optional, and if left blank an
"anonymous" name will be generated at compile time, guaranteed
to be unique against other anonymous constructs used in the
same statement.
-
+
"""
return Alias(self, name)
@@ -2262,7 +2262,7 @@ class FromClause(Selectable):
def replace_selectable(self, old, alias):
"""replace all occurences of FromClause 'old' with the given Alias
object, returning a copy of this :class:`FromClause`.
-
+
"""
return sqlutil.ClauseAdapter(alias).traverse(self)
@@ -2270,7 +2270,7 @@ class FromClause(Selectable):
def correspond_on_equivalents(self, column, equivalents):
"""Return corresponding_column for the given column, or if None
search for a match in the given dictionary.
-
+
"""
col = self.corresponding_column(column, require_embedded=True)
if col is None and col in equivalents:
@@ -2286,16 +2286,16 @@ class FromClause(Selectable):
which corresponds to that original
:class:`~sqlalchemy.schema.Column` via a common anscestor
column.
-
+
:param column: the target :class:`ColumnElement` to be matched
-
+
:param require_embedded: only return corresponding columns for
the given :class:`ColumnElement`, if the given
:class:`ColumnElement` is actually present within a sub-element
of this :class:`FromClause`. Normally the column will match if
it merely shares a common anscestor with one of the exported
columns of this :class:`FromClause`.
-
+
"""
# dont dig around if the column is locally present
@@ -2365,42 +2365,42 @@ class FromClause(Selectable):
def columns(self):
"""Return the collection of Column objects contained by this
FromClause."""
-
+
if '_columns' not in self.__dict__:
self._init_collections()
self._populate_column_collection()
return self._columns.as_immutable()
-
+
@util.memoized_property
def primary_key(self):
"""Return the collection of Column objects which comprise the
primary key of this FromClause."""
-
+
self._init_collections()
self._populate_column_collection()
return self.primary_key
-
+
@util.memoized_property
def foreign_keys(self):
"""Return the collection of ForeignKey objects which this
FromClause references."""
-
+
self._init_collections()
self._populate_column_collection()
return self.foreign_keys
c = property(attrgetter('columns'))
_select_iterable = property(attrgetter('columns'))
-
+
def _init_collections(self):
assert '_columns' not in self.__dict__
assert 'primary_key' not in self.__dict__
assert 'foreign_keys' not in self.__dict__
-
+
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
-
+
def _populate_column_collection(self):
pass
@@ -2433,14 +2433,14 @@ class _BindParamClause(ColumnElement):
Initial value for this bind param. This value may be
overridden by the dictionary of parameters sent to statement
compilation/execution.
-
+
:param callable\_:
A callable function that takes the place of "value". The function
will be called at statement execution time to determine the
ultimate value. Used for scenarios where the actual bind
value cannot be determined at the point at which the clause
construct is created, but embeded bind values are still desirable.
-
+
:param type\_:
A ``TypeEngine`` object that will be used to pre-process the
value corresponding to this :class:`_BindParamClause` at
@@ -2451,10 +2451,10 @@ class _BindParamClause(ColumnElement):
modified if another :class:`_BindParamClause` of the same name
already has been located within the containing
:class:`ClauseElement`.
-
+
:param required:
a value is required at execution time.
-
+
:param isoutparam:
if True, the parameter should be treated like a stored procedure
"OUT" parameter.
@@ -2484,7 +2484,7 @@ class _BindParamClause(ColumnElement):
self.type = type_()
else:
self.type = type_
-
+
def _clone(self):
c = ClauseElement._clone(self)
if self.unique:
@@ -2537,9 +2537,9 @@ class _TypeClause(ClauseElement):
class _Generative(object):
"""Allow a ClauseElement to generate itself via the
@_generative decorator.
-
+
"""
-
+
def _generate(self):
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
@@ -2548,11 +2548,11 @@ class _Generative(object):
class Executable(_Generative):
"""Mark a ClauseElement as supporting execution.
-
+
:class:`Executable` is a superclass for all "statement" types
of objects, including :func:`select`, :func:`delete`, :func:`update`,
:func:`insert`, :func:`text`.
-
+
"""
supports_execution = True
@@ -2562,9 +2562,9 @@ class Executable(_Generative):
def execution_options(self, **kw):
""" Set non-SQL options for the statement which take effect during
execution.
-
+
Current options include:
-
+
* autocommit - when True, a COMMIT will be invoked after execution
when executed in 'autocommit' mode, i.e. when an explicit
transaction is not begun on the connection. Note that DBAPI
@@ -2576,7 +2576,7 @@ class Executable(_Generative):
constructs do not. Use this option when invoking a SELECT or other
specific SQL construct where COMMIT is desired (typically when
calling stored procedures and such).
-
+
* stream_results - indicate to the dialect that results should be
"streamed" and not pre-buffered, if possible. This is a limitation
of many DBAPIs. The flag is currently understood only by the
@@ -2592,19 +2592,19 @@ class Executable(_Generative):
as well as the "batch" mode for an INSERT or UPDATE statement.
The format of this dictionary is not guaranteed to stay the
same in future releases.
-
+
This option is usually more appropriate
to use via the
:meth:`sqlalchemy.engine.base.Connection.execution_options()`
method of :class:`Connection`, rather than upon individual
statement objects, though the effect is the same.
-
+
See also:
-
+
:meth:`sqlalchemy.engine.base.Connection.execution_options()`
:meth:`sqlalchemy.orm.query.Query.execution_options()`
-
+
"""
self._execution_options = self._execution_options.union(kw)
@@ -2625,13 +2625,13 @@ class Executable(_Generative):
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`, returning the
result's scalar representation.
-
+
"""
return self.execute(*multiparams, **params).scalar()
# legacy, some outside users may be calling this
_Executable = Executable
-
+
class _TextClause(Executable, ClauseElement):
"""Represent a literal SQL text fragment.
@@ -2645,7 +2645,7 @@ class _TextClause(Executable, ClauseElement):
_execution_options = \
Executable._execution_options.union({'autocommit'
: PARSE_AUTOCOMMIT})
-
+
@property
def _select_iterable(self):
return (self,)
@@ -2685,7 +2685,7 @@ class _TextClause(Executable, ClauseElement):
if bindparams is not None:
for b in bindparams:
self.bindparams[b.key] = b
-
+
@property
def type(self):
if self.typemap is not None and len(self.typemap) == 1:
@@ -2740,14 +2740,14 @@ class ClauseList(ClauseElement):
self.clauses = [
_literal_as_text(clause)
for clause in clauses if clause is not None]
-
+
@util.memoized_property
def type(self):
if self.clauses:
return self.clauses[0].type
else:
return sqltypes.NULLTYPE
-
+
def __iter__(self):
return iter(self.clauses)
@@ -2815,7 +2815,7 @@ class BooleanClauseList(ClauseList, ColumnElement):
return (self, )
class _Tuple(ClauseList, ColumnElement):
-
+
def __init__(self, *clauses, **kw):
clauses = [_literal_as_binds(c) for c in clauses]
super(_Tuple, self).__init__(*clauses, **kw)
@@ -2831,7 +2831,7 @@ class _Tuple(ClauseList, ColumnElement):
_compared_to_type=self.type, unique=True)
for o in obj
]).self_group()
-
+
class _Case(ColumnElement):
__visit_name__ = 'case'
@@ -2893,7 +2893,7 @@ class _Case(ColumnElement):
class FunctionElement(Executable, ColumnElement, FromClause):
"""Base for SQL function-oriented constructs."""
-
+
def __init__(self, *clauses, **kwargs):
args = [_literal_as_binds(c, self.name) for c in clauses]
self.clause_expr = ClauseList(
@@ -2937,7 +2937,7 @@ class FunctionElement(Executable, ColumnElement, FromClause):
return _BindParamClause(None, obj, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
-
+
class Function(FunctionElement):
"""Describe a named SQL function."""
@@ -2948,7 +2948,7 @@ class Function(FunctionElement):
self.name = name
self._bind = kw.get('bind', None)
self.type = sqltypes.to_instance(kw.get('type_', None))
-
+
FunctionElement.__init__(self, *clauses, **kw)
def _bind_param(self, operator, obj):
@@ -3069,13 +3069,13 @@ class _BinaryExpression(ColumnElement):
self.modifiers = {}
else:
self.modifiers = modifiers
-
+
def __nonzero__(self):
try:
return self.operator(hash(self.left), hash(self.right))
except:
raise TypeError("Boolean value of this clause is not defined")
-
+
@property
def _from_objects(self):
return self.left._from_objects + self.right._from_objects
@@ -3149,7 +3149,7 @@ class _Exists(_UnaryExpression):
def select_from(self, clause):
"""return a new exists() construct with the given expression set as
its FROM clause.
-
+
"""
e = self._clone()
e.element = self.element.select_from(clause).self_group()
@@ -3158,7 +3158,7 @@ class _Exists(_UnaryExpression):
def where(self, clause):
"""return a new exists() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
-
+
"""
e = self._clone()
e.element = self.element.where(clause).self_group()
@@ -3326,7 +3326,7 @@ class Alias(FromClause):
except AttributeError:
raise AttributeError("Element %s does not support "
"'as_scalar()'" % self.element)
-
+
def is_derived_from(self, fromclause):
if fromclause in self._cloned_set:
return True
@@ -3400,10 +3400,10 @@ class _FromGrouping(FromClause):
def __init__(self, element):
self.element = element
-
+
def _init_collections(self):
pass
-
+
@property
def columns(self):
return self.element.columns
@@ -3466,7 +3466,7 @@ class _Label(ColumnElement):
self._type = type_
self.quote = element.quote
self.proxies = [element]
-
+
@util.memoized_property
def type(self):
return sqltypes.to_instance(
@@ -3476,7 +3476,7 @@ class _Label(ColumnElement):
@util.memoized_property
def element(self):
return self._element.self_group(against=operators.as_)
-
+
def self_group(self, against=None):
sub_element = self._element.self_group(against=against)
if sub_element is not self._element:
@@ -3485,7 +3485,7 @@ class _Label(ColumnElement):
type_=self._type)
else:
return self._element
-
+
@property
def primary_key(self):
return self.element.primary_key
@@ -3509,7 +3509,7 @@ class _Label(ColumnElement):
e = self.element._make_proxy(selectable, name=self.name)
else:
e = column(self.name)._make_proxy(selectable=selectable)
-
+
e.proxies.append(self)
return e
@@ -3549,14 +3549,14 @@ class ColumnClause(_Immutable, ColumnElement):
self.table = selectable
self.type = sqltypes.to_instance(type_)
self.is_literal = is_literal
-
+
@util.memoized_property
def _from_objects(self):
if self.table is not None:
return [self.table]
else:
return []
-
+
@util.memoized_property
def description(self):
# Py3K
@@ -3634,7 +3634,7 @@ class TableClause(_Immutable, FromClause):
__visit_name__ = 'table'
named_with_column = True
-
+
def __init__(self, name, *columns):
super(TableClause, self).__init__()
self.name = self.fullname = name
@@ -3643,10 +3643,10 @@ class TableClause(_Immutable, FromClause):
self.foreign_keys = set()
for c in columns:
self.append_column(c)
-
+
def _init_collections(self):
pass
-
+
@util.memoized_property
def description(self):
# Py3K
@@ -3668,7 +3668,7 @@ class TableClause(_Immutable, FromClause):
def count(self, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`TableClause`."""
-
+
if self.primary_key:
col = list(self.primary_key)[0]
else:
@@ -3704,7 +3704,7 @@ class _SelectBase(Executable, FromClause):
_order_by_clause = ClauseList()
_group_by_clause = ClauseList()
-
+
def __init__(self,
use_labels=False,
for_update=False,
@@ -3726,12 +3726,12 @@ class _SelectBase(Executable, FromClause):
self._limit = limit
self._offset = offset
self._bind = bind
-
+
if order_by is not None:
self._order_by_clause = ClauseList(*util.to_list(order_by))
if group_by is not None:
self._group_by_clause = ClauseList(*util.to_list(group_by))
-
+
def as_scalar(self):
"""return a 'scalar' representation of this selectable, which can be
used as a column expression.
@@ -3806,10 +3806,10 @@ class _SelectBase(Executable, FromClause):
def order_by(self, *clauses):
"""return a new selectable with the given list of ORDER BY
criterion applied.
-
+
The criterion will be appended to any pre-existing ORDER BY
criterion.
-
+
"""
self.append_order_by(*clauses)
@@ -3818,10 +3818,10 @@ class _SelectBase(Executable, FromClause):
def group_by(self, *clauses):
"""return a new selectable with the given list of GROUP BY
criterion applied.
-
+
The criterion will be appended to any pre-existing GROUP BY
criterion.
-
+
"""
self.append_group_by(*clauses)
@@ -3889,7 +3889,7 @@ class CompoundSelect(_SelectBase):
EXCEPT_ALL = util.symbol('EXCEPT ALL')
INTERSECT = util.symbol('INTERSECT')
INTERSECT_ALL = util.symbol('INTERSECT ALL')
-
+
def __init__(self, keyword, *selects, **kwargs):
self._should_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
@@ -3900,7 +3900,7 @@ class CompoundSelect(_SelectBase):
# some DBs do not like ORDER BY in the inner queries of a UNION, etc.
for n, s in enumerate(selects):
s = _clause_element_as_expr(s)
-
+
if not numcols:
numcols = len(s.c)
elif len(s.c) != numcols:
@@ -3913,10 +3913,10 @@ class CompoundSelect(_SelectBase):
self.selects.append(s.self_group(self))
_SelectBase.__init__(self, **kwargs)
-
+
def _scalar_type(self):
return self.selects[0]._scalar_type()
-
+
def self_group(self, against=None):
return _FromGrouping(self)
@@ -3950,7 +3950,7 @@ class CompoundSelect(_SelectBase):
proxy.proxies = [c._annotate({'weight': i + 1}) for (i,
c) in enumerate(cols)]
-
+
def _copy_internals(self, clone=_clone):
self._reset_exported()
self.selects = [clone(s) for s in self.selects]
@@ -3987,10 +3987,10 @@ class Select(_SelectBase):
"""
__visit_name__ = 'select'
-
+
_prefixes = ()
_hints = util.frozendict()
-
+
def __init__(self,
columns,
whereclause=None,
@@ -4015,13 +4015,13 @@ class Select(_SelectBase):
self._correlate = set()
self._froms = util.OrderedSet()
-
+
try:
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
"be a Python list or other iterable")
-
+
if cols_present:
self._raw_columns = []
for c in columns:
@@ -4099,12 +4099,12 @@ class Select(_SelectBase):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
-
+
@_generative
def with_hint(self, selectable, text, dialect_name='*'):
"""Add an indexing hint for the given selectable to this
:class:`Select`.
-
+
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
to the given :class:`.Table` or :class:`.Alias` passed as the
@@ -4113,25 +4113,25 @@ class Select(_SelectBase):
with the token ``%(name)s`` to render the name of
the table or alias. E.g. when using Oracle, the
following::
-
+
select([mytable]).\\
with_hint(mytable, "+ index(%(name)s ix_mytable)")
-
+
Would render SQL as::
-
+
select /*+ index(mytable ix_mytable) */ ... from mytable
-
+
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
-
+
select([mytable]).\\
with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
-
+
"""
self._hints = self._hints.union({(selectable, dialect_name):text})
-
+
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
@@ -4141,7 +4141,7 @@ class Select(_SelectBase):
@util.memoized_instancemethod
def locate_all_froms(self):
"""return a Set of all FromClause elements referenced by this Select.
-
+
This set is a superset of that returned by the ``froms`` property,
which is specifically for those FromClause elements that would
actually be rendered.
@@ -4192,7 +4192,7 @@ class Select(_SelectBase):
def column(self, column):
"""return a new select() construct with the given column expression
added to its columns clause.
-
+
"""
column = _literal_as_column(column)
@@ -4207,7 +4207,7 @@ class Select(_SelectBase):
def with_only_columns(self, columns):
"""return a new select() construct with its columns clause replaced
with the given columns.
-
+
"""
self._raw_columns = [
@@ -4220,7 +4220,7 @@ class Select(_SelectBase):
def where(self, whereclause):
"""return a new select() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
-
+
"""
self.append_whereclause(whereclause)
@@ -4229,7 +4229,7 @@ class Select(_SelectBase):
def having(self, having):
"""return a new select() construct with the given expression added to
its HAVING clause, joined to the existing clause via AND, if any.
-
+
"""
self.append_having(having)
@@ -4237,7 +4237,7 @@ class Select(_SelectBase):
def distinct(self):
"""return a new select() construct which will apply DISTINCT to its
columns clause.
-
+
"""
self._distinct = True
@@ -4263,15 +4263,15 @@ class Select(_SelectBase):
def correlate(self, *fromclauses):
"""return a new select() construct which will correlate the given FROM
clauses to that of an enclosing select(), if a match is found.
-
+
By "match", the given fromclause must be present in this select's
list of FROM objects and also present in an enclosing select's list of
FROM objects.
-
+
Calling this method turns off the select's default behavior of
"auto-correlation". Normally, select() auto-correlates all of its FROM
clauses to those of an embedded select when compiled.
-
+
If the fromclause is None, correlation is disabled for the returned
select().
@@ -4292,7 +4292,7 @@ class Select(_SelectBase):
def append_column(self, column):
"""append the given column expression to the columns clause of this
select() construct.
-
+
"""
column = _literal_as_column(column)
@@ -4306,7 +4306,7 @@ class Select(_SelectBase):
def append_prefix(self, clause):
"""append the given columns clause prefix expression to this select()
construct.
-
+
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
@@ -4365,7 +4365,7 @@ class Select(_SelectBase):
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
-
+
This produces an element that can be embedded in an expression. Note
that this method is called automatically as needed when constructing
expressions.
@@ -4384,7 +4384,7 @@ class Select(_SelectBase):
def union_all(self, other, **kwargs):
"""return a SQL UNION ALL of this select() construct against the given
selectable.
-
+
"""
return union_all(self, other, **kwargs)
@@ -4397,21 +4397,21 @@ class Select(_SelectBase):
def except_all(self, other, **kwargs):
"""return a SQL EXCEPT ALL of this select() construct against the
given selectable.
-
+
"""
return except_all(self, other, **kwargs)
def intersect(self, other, **kwargs):
"""return a SQL INTERSECT of this select() construct against the given
selectable.
-
+
"""
return intersect(self, other, **kwargs)
def intersect_all(self, other, **kwargs):
"""return a SQL INTERSECT ALL of this select() construct against the
given selectable.
-
+
"""
return intersect_all(self, other, **kwargs)
@@ -4444,7 +4444,7 @@ class _UpdateBase(Executable, ClauseElement):
_execution_options = \
Executable._execution_options.union({'autocommit': True})
kwargs = util.frozendict()
-
+
def _process_colparams(self, parameters):
if isinstance(parameters, (list, tuple)):
pp = {}
@@ -4478,21 +4478,21 @@ class _UpdateBase(Executable, ClauseElement):
"use statement.returning(col1, col2, ...)" % k
)
return kwargs
-
+
@_generative
def returning(self, *cols):
"""Add a RETURNING or equivalent clause to this statement.
-
+
The given list of columns represent columns within the table that is
the target of the INSERT, UPDATE, or DELETE. Each element can be any
column expression. :class:`~sqlalchemy.schema.Table` objects will be
expanded into their individual columns.
-
+
Upon compilation, a RETURNING clause, or database equivalent,
will be rendered within the statement. For INSERT and UPDATE,
the values are the newly inserted/updated values. For DELETE,
the values are those of the rows which were deleted.
-
+
Upon execution, the values of the columns to be returned
are made available via the result set and can be iterated
using ``fetchone()`` and similar. For DBAPIs which do not
@@ -4500,7 +4500,7 @@ class _UpdateBase(Executable, ClauseElement):
SQLAlchemy will approximate this behavior at the result level
so that a reasonable amount of behavioral neutrality is
provided.
-
+
Note that not all databases/DBAPIs
support RETURNING. For those backends with no support,
an exception is raised upon compilation and/or execution.
@@ -4509,10 +4509,10 @@ class _UpdateBase(Executable, ClauseElement):
and other statements which return multiple rows. Please
read the documentation notes for the database in use in
order to determine the availability of RETURNING.
-
+
"""
self._returning = cols
-
+
class _ValuesBase(_UpdateBase):
__visit_name__ = 'values_base'
@@ -4555,9 +4555,9 @@ class Insert(_ValuesBase):
"""
__visit_name__ = 'insert'
-
+
_prefixes = ()
-
+
def __init__(self,
table,
values=None,
@@ -4573,7 +4573,7 @@ class Insert(_ValuesBase):
self._returning = returning
if prefixes:
self._prefixes = tuple([_literal_as_text(p) for p in prefixes])
-
+
if kwargs:
self.kwargs = self._process_deprecated_kw(kwargs)
@@ -4641,7 +4641,7 @@ class Update(_ValuesBase):
def where(self, whereclause):
"""return a new update() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
-
+
"""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
@@ -4668,7 +4668,7 @@ class Delete(_UpdateBase):
self._bind = bind
self.table = table
self._returning = returning
-
+
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index a85d49a0a..10eaa577b 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -35,7 +35,7 @@ class AnsiFunction(GenericFunction):
class ReturnTypeFromArgs(GenericFunction):
"""Define a function whose return type is the same as its arguments."""
-
+
def __init__(self, *args, **kwargs):
kwargs.setdefault('type_', _type_from_args(args))
GenericFunction.__init__(self, args=args, **kwargs)
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 3f96d5402..494f76f14 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -12,7 +12,7 @@
from operator import (
and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg
)
-
+
# Py2K
from operator import (div,)
# end Py2K
@@ -101,7 +101,7 @@ def is_commutative(op):
return op in _commutative
_associative = _commutative.union([concat_op, and_, or_])
-
+
_smallest = symbol('_smallest')
_largest = symbol('_largest')
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index e4ad7c498..331f74b7c 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -13,7 +13,7 @@ from itertools import chain
def sort_tables(tables):
"""sort a collection of Table objects in order of their foreign-key dependency."""
-
+
tables = list(tables)
tuples = []
def visit_foreign_key(fkey):
@@ -33,7 +33,7 @@ def sort_tables(tables):
tuples.extend(
[parent, table] for parent in table._extra_dependencies
)
-
+
return list(topological.sort(tuples, tables))
def find_join_source(clauses, join_to):
@@ -41,18 +41,18 @@ def find_join_source(clauses, join_to):
return the first index and element from the list of
clauses which can be joined against the selectable. returns
None, None if no match is found.
-
+
e.g.::
-
+
clause1 = table1.join(table2)
clause2 = table4.join(table5)
-
+
join_to = table2.join(table3)
-
+
find_join_source([clause1, clause2], join_to) == clause1
-
+
"""
-
+
selectables = list(expression._from_objects(join_to))
for i, f in enumerate(clauses):
for s in selectables:
@@ -65,23 +65,23 @@ def find_tables(clause, check_columns=False,
include_aliases=False, include_joins=False,
include_selects=False, include_crud=False):
"""locate Table objects within the given expression."""
-
+
tables = []
_visitors = {}
-
+
if include_selects:
_visitors['select'] = _visitors['compound_select'] = tables.append
-
+
if include_joins:
_visitors['join'] = tables.append
-
+
if include_aliases:
_visitors['alias'] = tables.append
-
+
if include_crud:
_visitors['insert'] = _visitors['update'] = \
_visitors['delete'] = lambda ent: tables.append(ent.table)
-
+
if check_columns:
def visit_column(column):
tables.append(column.table)
@@ -94,7 +94,7 @@ def find_tables(clause, check_columns=False,
def find_columns(clause):
"""locate Column objects within the given expression."""
-
+
cols = util.column_set()
visitors.traverse(clause, {}, {'column':cols.add})
return cols
@@ -103,9 +103,9 @@ def clause_is_present(clause, search):
"""Given a target clause and a second to search within, return True
if the target is plainly present in the search without any
subqueries or aliases involved.
-
+
Basically descends through Joins.
-
+
"""
stack = [search]
@@ -116,30 +116,30 @@ def clause_is_present(clause, search):
elif isinstance(elem, expression.Join):
stack.extend((elem.left, elem.right))
return False
-
-
+
+
def bind_values(clause):
"""Return an ordered list of "bound" values in the given clause.
E.g.::
-
+
>>> expr = and_(
... table.c.foo==5, table.c.foo==7
... )
>>> bind_values(expr)
[5, 7]
"""
-
+
v = []
def visit_bindparam(bind):
value = bind.value
-
+
# evaluate callables
if callable(value):
value = value()
-
+
v.append(value)
-
+
visitors.traverse(clause, {}, {'bindparam':visit_bindparam})
return v
@@ -149,15 +149,15 @@ def _quote_ddl_expr(element):
return "'%s'" % element
else:
return repr(element)
-
+
def expression_as_ddl(clause):
"""Given a SQL expression, convert for usage in DDL, such as
CREATE INDEX and CHECK CONSTRAINT.
-
+
Converts bind params into quoted literals, column identifiers
into detached column constructs so that the parent table
identifier is not included.
-
+
"""
def repl(element):
if isinstance(element, expression._BindParamClause):
@@ -167,9 +167,9 @@ def expression_as_ddl(clause):
return expression.column(element.name)
else:
return None
-
+
return visitors.replacement_traverse(clause, {}, repl)
-
+
def adapt_criterion_to_null(crit, nulls):
"""given criterion containing bind params, convert selected elements to IS NULL."""
@@ -186,23 +186,23 @@ def adapt_criterion_to_null(crit, nulls):
binary.negate = operators.isnot
return visitors.cloned_traverse(crit, {}, {'binary':visit_binary})
-
-
+
+
def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
"""create a join condition between two tables or selectables.
-
+
e.g.::
-
+
join_condition(tablea, tableb)
-
+
would produce an expression along the lines of::
-
+
tablea.c.id==tableb.c.tablea_id
-
+
The join is determined based on the foreign key relationships
between the two selectables. If there are multiple ways
to join, or no way to join, an error is raised.
-
+
:param ignore_nonexistent_tables: This flag will cause the
function to silently skip over foreign key resolution errors
due to nonexistent tables - the assumption is that these
@@ -215,11 +215,11 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
will be successful even if there are other ways to join to ``a``.
This allows the "right side" of a join to be passed thereby
providing a "natural join".
-
+
"""
crit = []
constraints = set()
-
+
for left in (a_subset, a):
if left is None:
continue
@@ -233,7 +233,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
continue
else:
raise
-
+
if col is not None:
crit.append(col == fk.parent)
constraints.add(fk.constraint)
@@ -254,7 +254,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
constraints.add(fk.constraint)
if crit:
break
-
+
if len(crit) == 0:
if isinstance(b, expression._FromGrouping):
hint = " Perhaps you meant to convert the right side to a "\
@@ -279,17 +279,17 @@ def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
class Annotated(object):
"""clones a ClauseElement and applies an 'annotations' dictionary.
-
+
Unlike regular clones, this clone also mimics __hash__() and
__cmp__() of the original element so that it takes its place
in hashed collections.
-
+
A reference to the original element is maintained, for the important
reason of keeping its hash value current. When GC'ed, the
hash value may be reused, causing conflicts.
"""
-
+
def __new__(cls, *args):
if not args:
# clone constructor
@@ -311,11 +311,11 @@ class Annotated(object):
# collections into __dict__
if isinstance(element, expression.FromClause):
element.c
-
+
self.__dict__ = element.__dict__.copy()
self.__element = element
self._annotations = values
-
+
def _annotate(self, values):
_values = self._annotations.copy()
_values.update(values)
@@ -323,17 +323,17 @@ class Annotated(object):
clone.__dict__ = self.__dict__.copy()
clone._annotations = _values
return clone
-
+
def _deannotate(self):
return self.__element
-
+
def _compiler_dispatch(self, visitor, **kw):
return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
-
+
@property
def _constructor(self):
return self.__element._constructor
-
+
def _clone(self):
clone = self.__element._clone()
if clone is self.__element:
@@ -344,7 +344,7 @@ class Annotated(object):
# to this object's __dict__.
clone.__dict__.update(self.__dict__)
return Annotated(clone, self._annotations)
-
+
def __hash__(self):
return hash(self.__element)
@@ -400,7 +400,7 @@ def _deep_deannotate(element):
def splice_joins(left, right, stop_on=None):
if left is None:
return right
-
+
stack = [(right, None)]
adapter = ClauseAdapter(left)
@@ -420,7 +420,7 @@ def splice_joins(left, right, stop_on=None):
ret = right
return ret
-
+
def reduce_columns(columns, *clauses, **kw):
"""given a list of columns, return a 'reduced' set based on natural equivalents.
@@ -433,14 +433,14 @@ def reduce_columns(columns, *clauses, **kw):
\**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys
whose tables are not yet configured.
-
+
This function is primarily used to determine the most minimal "primary key"
from a selectable, by reducing the set of primary key columns present
in the the selectable to just those that are not repeated.
"""
ignore_nonexistent_tables = kw.pop('ignore_nonexistent_tables', False)
-
+
columns = util.ordered_column_set(columns)
omit = util.column_set()
@@ -477,12 +477,12 @@ def reduce_columns(columns, *clauses, **kw):
def criterion_as_pairs(expression, consider_as_foreign_keys=None,
consider_as_referenced_keys=None, any_operator=False):
"""traverse an expression and locate binary criterion pairs."""
-
+
if consider_as_foreign_keys and consider_as_referenced_keys:
raise exc.ArgumentError("Can only specify one of "
"'consider_as_foreign_keys' or "
"'consider_as_referenced_keys'")
-
+
def visit_binary(binary):
if not any_operator and binary.operator is not operators.eq:
return
@@ -521,14 +521,14 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
def folded_equivalents(join, equivs=None):
"""Return a list of uniquely named columns.
-
+
The column list of the given Join will be narrowed
down to a list of all equivalently-named,
equated columns folded into one column, where 'equated' means they are
equated to each other in the ON clause of this join.
This function is used by Join.select(fold_equivalents=True).
-
+
Deprecated. This function is used for a certain kind of
"polymorphic_union" which is designed to achieve joined
table inheritance where the base table has no "discriminator"
@@ -564,10 +564,10 @@ def folded_equivalents(join, equivs=None):
class AliasedRow(object):
"""Wrap a RowProxy with a translation map.
-
+
This object allows a set of keys to be translated
to those present in a RowProxy.
-
+
"""
def __init__(self, row, map):
# AliasedRow objects don't nest, so un-nest
@@ -577,7 +577,7 @@ class AliasedRow(object):
else:
self.row = row
self.map = map
-
+
def __contains__(self, key):
return self.map[key] in self.row
@@ -593,7 +593,7 @@ class AliasedRow(object):
class ClauseAdapter(visitors.ReplacingCloningVisitor):
"""Clones and modifies clauses based on column correspondence.
-
+
E.g.::
table1 = Table('sometable', metadata,
@@ -623,7 +623,7 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
self.include = include
self.exclude = exclude
self.equivalents = util.column_dict(equivalents or {})
-
+
def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET):
newcol = self.selectable.corresponding_column(col, require_embedded=require_embedded)
@@ -646,17 +646,17 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
return None
elif self.exclude and col in self.exclude:
return None
-
+
return self._corresponding_column(col, True)
class ColumnAdapter(ClauseAdapter):
"""Extends ClauseAdapter with extra utility functions.
-
+
Provides the ability to "wrap" this ClauseAdapter
around another, a columns dictionary which returns
adapted elements given an original, and an
adapted_row() factory.
-
+
"""
def __init__(self, selectable, equivalents=None,
chain_to=None, include=None,
@@ -689,11 +689,11 @@ class ColumnAdapter(ClauseAdapter):
c = self._corresponding_column(col, True)
if c is None:
c = self.adapt_clause(col)
-
+
# anonymize labels in case they have a hardcoded name
if isinstance(c, expression._Label):
c = c.label(None)
-
+
# adapt_required indicates that if we got the same column
# back which we put in (i.e. it passed through),
# it's not correct. this is used by eagerloading which
@@ -702,17 +702,17 @@ class ColumnAdapter(ClauseAdapter):
# the wrong column.
if self.adapt_required and c is col:
return None
-
- return c
+
+ return c
def adapted_row(self, row):
return AliasedRow(row, self.columns)
-
+
def __getstate__(self):
d = self.__dict__.copy()
del d['columns']
return d
-
+
def __setstate__(self, state):
self.__dict__.update(state)
self.columns = util.PopulateDict(self._locate_col)
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index 91ded1130..8011aa109 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -32,18 +32,18 @@ __all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
'iterate_depthfirst', 'traverse_using', 'traverse',
'cloned_traverse', 'replacement_traverse']
-
+
class VisitableType(type):
"""Metaclass which checks for a `__visit_name__` attribute and
applies `_compiler_dispatch` method to classes.
-
+
"""
-
+
def __init__(cls, clsname, bases, clsdict):
if cls.__name__ == 'Visitable' or not hasattr(cls, '__visit_name__'):
super(VisitableType, cls).__init__(clsname, bases, clsdict)
return
-
+
# set up an optimized visit dispatch function
# for use by the compiler
if '__visit_name__' in cls.__dict__:
@@ -57,13 +57,13 @@ class VisitableType(type):
return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw)
cls._compiler_dispatch = _compiler_dispatch
-
+
super(VisitableType, cls).__init__(clsname, bases, clsdict)
class Visitable(object):
"""Base class for visitable objects, applies the
``VisitableType`` metaclass.
-
+
"""
__metaclass__ = VisitableType
@@ -71,27 +71,27 @@ class Visitable(object):
class ClauseVisitor(object):
"""Base class for visitor objects which can traverse using
the traverse() function.
-
+
"""
-
+
__traverse_options__ = {}
-
+
def traverse_single(self, obj, **kw):
for v in self._visitor_iterator:
meth = getattr(v, "visit_%s" % obj.__visit_name__, None)
if meth:
return meth(obj, **kw)
-
+
def iterate(self, obj):
"""traverse the given expression structure, returning an iterator of all elements."""
return iterate(obj, self.__traverse_options__)
-
+
def traverse(self, obj):
"""traverse and visit the given expression structure."""
return traverse(obj, self.__traverse_options__, self._visitor_dict)
-
+
@util.memoized_property
def _visitor_dict(self):
visitors = {}
@@ -100,11 +100,11 @@ class ClauseVisitor(object):
if name.startswith('visit_'):
visitors[name[6:]] = getattr(self, name)
return visitors
-
+
@property
def _visitor_iterator(self):
"""iterate through this visitor and each 'chained' visitor."""
-
+
v = self
while v:
yield v
@@ -112,9 +112,9 @@ class ClauseVisitor(object):
def chain(self, visitor):
"""'chain' an additional ClauseVisitor onto this ClauseVisitor.
-
+
the chained visitor will receive all visit events after this one.
-
+
"""
tail = list(self._visitor_iterator)[-1]
tail._next = visitor
@@ -123,7 +123,7 @@ class ClauseVisitor(object):
class CloningVisitor(ClauseVisitor):
"""Base class for visitor objects which can traverse using
the cloned_traverse() function.
-
+
"""
def copy_and_process(self, list_):
@@ -139,12 +139,12 @@ class CloningVisitor(ClauseVisitor):
class ReplacingCloningVisitor(CloningVisitor):
"""Base class for visitor objects which can traverse using
the replacement_traverse() function.
-
+
"""
def replace(self, elem):
"""receive pre-copied elements during a cloning traversal.
-
+
If the method returns a new element, the element is used
instead of creating a simple copy of the element. Traversal
will halt on the newly returned element if it is re-encountered.
@@ -163,9 +163,9 @@ class ReplacingCloningVisitor(CloningVisitor):
def iterate(obj, opts):
"""traverse the given expression structure, returning an iterator.
-
+
traversal is configured to be breadth-first.
-
+
"""
stack = deque([obj])
while stack:
@@ -176,9 +176,9 @@ def iterate(obj, opts):
def iterate_depthfirst(obj, opts):
"""traverse the given expression structure, returning an iterator.
-
+
traversal is configured to be depth-first.
-
+
"""
stack = deque([obj])
traversal = deque()
@@ -197,7 +197,7 @@ def traverse_using(iterator, obj, visitors):
if meth:
meth(target)
return obj
-
+
def traverse(obj, opts, visitors):
"""traverse and visit the given expression structure using the default iterator."""
@@ -210,7 +210,7 @@ def traverse_depthfirst(obj, opts, visitors):
def cloned_traverse(obj, opts, visitors):
"""clone the given expression structure, allowing modifications by visitors."""
-
+
cloned = util.column_dict()
def clone(element):
@@ -237,7 +237,7 @@ def cloned_traverse(obj, opts, visitors):
def replacement_traverse(obj, opts, replace):
"""clone the given expression structure, allowing element replacement by a given replacement function."""
-
+
cloned = util.column_dict()
stop_on = util.column_set(opts.get('stop_on', []))
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 5d3ec08fc..613dad82a 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -46,7 +46,7 @@ class AbstractType(Visitable):
class TypeEngine(AbstractType):
"""Base for built-in types."""
-
+
def copy_value(self, value):
return value
@@ -93,22 +93,22 @@ class TypeEngine(AbstractType):
objects alone. Values such as dicts, lists which
are serialized into strings are examples of "mutable"
column structures.
-
+
.. note:: This functionality is now superceded by the
``sqlalchemy.ext.mutable`` extension described in
:ref:`mutable_toplevel`.
-
+
When this method is overridden, :meth:`copy_value` should
also be supplied. The :class:`.MutableType` mixin
is recommended as a helper.
-
+
"""
return False
def get_dbapi_type(self, dbapi):
"""Return the corresponding type object from the underlying DB-API, if
any.
-
+
This can be useful for calling ``setinputsizes()``, for example.
"""
@@ -117,10 +117,10 @@ class TypeEngine(AbstractType):
def _adapt_expression(self, op, othertype):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
-
+
"""
return op, self
-
+
@util.memoized_property
def _type_affinity(self):
"""Return a rudimental 'affinity' value expressing the general class
@@ -137,12 +137,12 @@ class TypeEngine(AbstractType):
def dialect_impl(self, dialect):
"""Return a dialect-specific implementation for this type."""
-
+
try:
return dialect._type_memos[self]['impl']
except KeyError:
return self._dialect_info(dialect)['impl']
-
+
def _cached_bind_processor(self, dialect):
"""Return a dialect-specific bind processor for this type."""
@@ -152,7 +152,7 @@ class TypeEngine(AbstractType):
d = self._dialect_info(dialect)
d['bind'] = bp = d['impl'].bind_processor(dialect)
return bp
-
+
def _cached_result_processor(self, dialect, coltype):
"""Return a dialect-specific result processor for this type."""
@@ -170,7 +170,7 @@ class TypeEngine(AbstractType):
"""Return a dialect-specific registry which
caches a dialect-specific implementation, bind processing
function, and one or more result processing functions."""
-
+
if self in dialect._type_memos:
return dialect._type_memos[self]
else:
@@ -184,10 +184,10 @@ class TypeEngine(AbstractType):
def _gen_dialect_impl(self, dialect):
return dialect.type_descriptor(self)
-
+
def adapt(self, cls, **kw):
return util.constructor_copy(self, cls, **kw)
-
+
def _coerce_compared_value(self, op, value):
_coerced_type = _type_map.get(type(value), NULLTYPE)
if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
@@ -195,19 +195,19 @@ class TypeEngine(AbstractType):
return self
else:
return _coerced_type
-
+
def _compare_type_affinity(self, other):
return self._type_affinity is other._type_affinity
def compile(self, dialect=None):
# arg, return value is inconsistent with
# ClauseElement.compile()....this is a mistake.
-
+
if not dialect:
dialect = self._default_dialect
-
+
return dialect.type_compiler.process(self)
-
+
@property
def _default_dialect(self):
if self.__class__.__module__.startswith("sqlalchemy.dialects"):
@@ -216,7 +216,7 @@ class TypeEngine(AbstractType):
return getattr(__import__(mod).dialects, tokens[-1]).dialect()
else:
return default.DefaultDialect()
-
+
def __str__(self):
# Py3K
#return unicode(self.compile())
@@ -276,24 +276,24 @@ class UserDefinedType(TypeEngine):
def _adapt_expression(self, op, othertype):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
-
+
"""
return self.adapt_operator(op), self
def adapt_operator(self, op):
"""A hook which allows the given operator to be adapted
to something new.
-
+
See also UserDefinedType._adapt_expression(), an as-yet-
semi-public method with greater capability in this regard.
-
+
"""
return op
class TypeDecorator(TypeEngine):
"""Allows the creation of types which add additional functionality
to an existing type.
-
+
This method is preferred to direct subclassing of SQLAlchemy's
built-in types as it ensures that all required functionality of
the underlying type is kept in place.
@@ -308,7 +308,7 @@ class TypeDecorator(TypeEngine):
'''
impl = types.Unicode
-
+
def process_bind_param(self, value, dialect):
return "PREFIX:" + value
@@ -329,40 +329,40 @@ class TypeDecorator(TypeEngine):
method. This is used to give the expression system a hint when coercing
Python objects into bind parameters within expressions. Consider this
expression::
-
+
mytable.c.somecol + datetime.date(2009, 5, 15)
-
+
Above, if "somecol" is an ``Integer`` variant, it makes sense that
we're doing date arithmetic, where above is usually interpreted
by databases as adding a number of days to the given date.
The expression system does the right thing by not attempting to
coerce the "date()" value into an integer-oriented bind parameter.
-
+
However, in the case of ``TypeDecorator``, we are usually changing an
incoming Python type to something new - ``TypeDecorator`` by default will
"coerce" the non-typed side to be the same type as itself. Such as below,
we define an "epoch" type that stores a date value as an integer::
-
+
class MyEpochType(types.TypeDecorator):
impl = types.Integer
-
+
epoch = datetime.date(1970, 1, 1)
-
+
def process_bind_param(self, value, dialect):
return (value - self.epoch).days
-
+
def process_result_value(self, value, dialect):
return self.epoch + timedelta(days=value)
Our expression of ``somecol + date`` with the above type will coerce the
- "date" on the right side to also be treated as ``MyEpochType``.
-
+ "date" on the right side to also be treated as ``MyEpochType``.
+
This behavior can be overridden via the
:meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
that should be used for the value of the expression. Below we set it such
that an integer value will be treated as an ``Integer``, and any other
value is assumed to be a date and will be treated as a ``MyEpochType``::
-
+
def coerce_compared_value(self, op, value):
if isinstance(value, int):
return Integer()
@@ -380,8 +380,8 @@ class TypeDecorator(TypeEngine):
"'impl' which refers to the class of "
"type being decorated")
self.impl = to_instance(self.__class__.impl, *args, **kwargs)
-
-
+
+
def _gen_dialect_impl(self, dialect):
adapted = dialect.type_descriptor(self)
if adapted is not self:
@@ -406,7 +406,7 @@ class TypeDecorator(TypeEngine):
def type_engine(self, dialect):
"""Return a TypeEngine instance for this TypeDecorator.
-
+
"""
adapted = dialect.type_descriptor(self)
if adapted is not self:
@@ -471,28 +471,28 @@ class TypeDecorator(TypeEngine):
return process
else:
return self.impl.result_processor(dialect, coltype)
-
+
def coerce_compared_value(self, op, value):
"""Suggest a type for a 'coerced' Python value in an expression.
-
+
By default, returns self. This method is called by
the expression system when an object using this type is
on the left or right side of an expression against a plain Python
object which does not yet have a SQLAlchemy type assigned::
-
+
expr = table.c.somecolumn + 35
-
+
Where above, if ``somecolumn`` uses this type, this method will
be called with the value ``operator.add``
and ``35``. The return value is whatever SQLAlchemy type should
be used for ``35`` for this particular operation.
-
+
"""
return self
def _coerce_compared_value(self, op, value):
return self.coerce_compared_value(op, value)
-
+
def copy(self):
instance = self.__class__.__new__(self.__class__)
instance.__dict__.update(self.__dict__)
@@ -519,7 +519,7 @@ class TypeDecorator(TypeEngine):
.. note:: This functionality is now superceded by the
``sqlalchemy.ext.mutable`` extension described in
:ref:`mutable_toplevel`.
-
+
"""
return self.impl.is_mutable()
@@ -534,14 +534,14 @@ class MutableType(object):
"""A mixin that marks a :class:`TypeEngine` as representing
a mutable Python object type. This functionality is used
only by the ORM.
-
+
.. note:: :class:`.MutableType` is superceded as of SQLAlchemy 0.7
by the ``sqlalchemy.ext.mutable`` extension described in
:ref:`mutable_toplevel`. This extension provides an event
driven approach to in-place mutation detection that does not
incur the severe performance penalty of the :class:`.MutableType`
approach.
-
+
"mutable" means that changes can occur in place to a value
of this type. Examples includes Python lists, dictionaries,
and sets, as well as user-defined objects. The primary
@@ -549,7 +549,7 @@ class MutableType(object):
which applies special rules to such values in order to guarantee
that changes are detected. These rules may have a significant
performance impact, described below.
-
+
A :class:`MutableType` usually allows a flag called
``mutable=False`` to enable/disable the "mutability" flag,
represented on this class by :meth:`is_mutable`. Examples
@@ -557,12 +557,12 @@ class MutableType(object):
:class:`~sqlalchemy.dialects.postgresql.base.ARRAY`. Setting
this flag to ``True`` enables mutability-specific behavior
by the ORM.
-
+
The :meth:`copy_value` and :meth:`compare_values` functions
represent a copy and compare function for values of this
type - implementing subclasses should override these
appropriately.
-
+
.. warning:: The usage of mutable types has significant performance
implications when using the ORM. In order to detect changes, the
ORM must create a copy of the value when it is first
@@ -577,21 +577,21 @@ class MutableType(object):
execution of :class:`Query` will require a full scan of that subset of
the 6000 objects that have mutable attributes, possibly resulting
in tens of thousands of additional method calls for every query.
-
+
As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which
allows an event driven approach to in-place mutation detection. This
approach should now be favored over the usage of :class:`.MutableType`
with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in
:ref:`mutable_toplevel`.
-
+
"""
def is_mutable(self):
"""Return True if the target Python type is 'mutable'.
-
+
For :class:`.MutableType`, this method is set to
return ``True``.
-
+
"""
return True
@@ -672,13 +672,13 @@ class Concatenable(object):
class _DateAffinity(object):
"""Mixin date/time specific expression adaptations.
-
+
Rules are implemented within Date,Time,Interval,DateTime, Numeric,
Integer. Based on http://www.postgresql.org/docs/current/static
/functions-datetime.html.
-
+
"""
-
+
@property
def _expression_adaptations(self):
raise NotImplementedError()
@@ -737,12 +737,12 @@ class String(Concatenable, TypeEngine):
for all String types by setting
:attr:`sqlalchemy.engine.base.Dialect.convert_unicode`
on create_engine().
-
+
To instruct SQLAlchemy to perform Unicode encoding/decoding
even on a platform that already handles Unicode natively,
set convert_unicode='force'. This will incur significant
performance overhead when fetching unicode result columns.
-
+
:param assert_unicode: Deprecated. A warning is raised in all cases
when a non-Unicode object is passed when SQLAlchemy would coerce
into an encoding (note: but **not** when the DBAPI handles unicode
@@ -767,7 +767,7 @@ class String(Concatenable, TypeEngine):
if unicode_error is not None and convert_unicode != 'force':
raise exc.ArgumentError("convert_unicode must be 'force' "
"when unicode_error is set.")
-
+
if assert_unicode:
util.warn_deprecated('assert_unicode is deprecated. '
'SQLAlchemy emits a warning in all '
@@ -781,7 +781,7 @@ class String(Concatenable, TypeEngine):
self.convert_unicode = convert_unicode
self.unicode_error = unicode_error
self._warn_on_bytestring = _warn_on_bytestring
-
+
def bind_processor(self, dialect):
if self.convert_unicode or dialect.convert_unicode:
if dialect.supports_unicode_binds and \
@@ -818,16 +818,16 @@ class String(Concatenable, TypeEngine):
needs_convert = wants_unicode and \
(dialect.returns_unicode_strings is not True or
self.convert_unicode == 'force')
-
+
if needs_convert:
to_unicode = processors.to_unicode_processor_factory(
dialect.encoding, self.unicode_error)
-
+
if dialect.returns_unicode_strings:
# we wouldn't be here unless convert_unicode='force'
# was specified, or the driver has erratic unicode-returning
# habits. since we will be getting back unicode
- # in most cases, we check for it (decode will fail).
+ # in most cases, we check for it (decode will fail).
def process(value):
if isinstance(value, unicode):
return value
@@ -862,7 +862,7 @@ class Unicode(String):
``u'somevalue'``) into encoded bytestrings when passing the value
to the database driver, and similarly decodes values from the
database back into Python ``unicode`` objects.
-
+
It's roughly equivalent to using a ``String`` object with
``convert_unicode=True``, however
the type has other significances in that it implies the usage
@@ -870,7 +870,7 @@ class Unicode(String):
This may affect what type is emitted when issuing CREATE TABLE
and also may effect some DBAPI-specific details, such as type
information passed along to ``setinputsizes()``.
-
+
When using the ``Unicode`` type, it is only appropriate to pass
Python ``unicode`` objects, and not plain ``str``. If a
bytestring (``str``) is passed, a runtime warning is issued. If
@@ -890,7 +890,7 @@ class Unicode(String):
"""
__visit_name__ = 'unicode'
-
+
def __init__(self, length=None, **kwargs):
"""
Create a Unicode-converting String type.
@@ -901,10 +901,10 @@ class Unicode(String):
*length* for use in DDL, and will raise an exception when
the ``CREATE TABLE`` DDL is issued. Whether the value is
interpreted as bytes or characters is database specific.
-
+
:param \**kwargs: passed through to the underlying ``String``
type.
-
+
"""
kwargs.setdefault('convert_unicode', True)
kwargs.setdefault('_warn_on_bytestring', True)
@@ -947,7 +947,7 @@ class Integer(_DateAffinity, TypeEngine):
def get_dbapi_type(self, dbapi):
return dbapi.NUMBER
-
+
@util.memoized_property
def _expression_adaptations(self):
# TODO: need a dictionary object that will
@@ -1019,16 +1019,16 @@ class Numeric(_DateAffinity, TypeEngine):
foolproof way to use "cdecimal" given current DBAPI and Python support
is to patch it directly into sys.modules before anything else is
imported::
-
+
import sys
import cdecimal
sys.modules["decimal"] = cdecimal
-
+
While the global patch is a little ugly, it's particularly
important to use just one decimal library at a time since
Python Decimal and cdecimal Decimal objects
are not currently compatible *with each other*::
-
+
>>> import cdecimal
>>> import decimal
>>> decimal.Decimal("10") == cdecimal.Decimal("10")
@@ -1055,14 +1055,14 @@ class Numeric(_DateAffinity, TypeEngine):
values should be sent as Python Decimal objects, or
as floats. Different DBAPIs send one or the other based on
datatypes - the Numeric type will ensure that return values
- are one or the other across DBAPIs consistently.
-
+ are one or the other across DBAPIs consistently.
+
When using the ``Numeric`` type, care should be taken to ensure
that the asdecimal setting is apppropriate for the DBAPI in use -
when Numeric applies a conversion from Decimal->float or float->
Decimal, this conversion incurs an additional performance overhead
for all result columns received.
-
+
DBAPIs that return Decimal natively (e.g. psycopg2) will have
better accuracy and higher performance with a setting of ``True``,
as the native translation to Decimal reduces the amount of floating-
@@ -1072,7 +1072,7 @@ class Numeric(_DateAffinity, TypeEngine):
overhead, and is still subject to floating point data loss - in
which case ``asdecimal=False`` will at least remove the extra
conversion overhead.
-
+
"""
self.precision = precision
self.scale = scale
@@ -1100,7 +1100,7 @@ class Numeric(_DateAffinity, TypeEngine):
'consider storing Decimal numbers as strings '
'or integers on this platform for lossless '
'storage.' % (dialect.name, dialect.driver))
-
+
# we're a "numeric", DBAPI returns floats, convert.
if self.scale is not None:
return processors.to_decimal_processor_factory(
@@ -1143,24 +1143,24 @@ class Numeric(_DateAffinity, TypeEngine):
}
class Float(Numeric):
- """A type for ``float`` numbers.
-
+ """A type for ``float`` numbers.
+
Returns Python ``float`` objects by default, applying
conversion as needed.
-
+
"""
__visit_name__ = 'float'
-
+
scale = None
-
+
def __init__(self, precision=None, asdecimal=False, **kwargs):
"""
Construct a Float.
:param precision: the numeric precision for use in DDL ``CREATE
TABLE``.
-
+
:param asdecimal: the same flag as that of :class:`Numeric`, but
defaults to ``False``. Note that setting this flag to ``True``
results in floating point conversion.
@@ -1229,7 +1229,7 @@ class DateTime(_DateAffinity, TypeEngine):
DateTime:Interval,
},
}
-
+
class Date(_DateAffinity,TypeEngine):
"""A type for ``datetime.date()`` objects."""
@@ -1250,12 +1250,12 @@ class Date(_DateAffinity,TypeEngine):
operators.sub:{
# date - integer = date
Integer:Date,
-
+
# date - date = integer.
Date:Integer,
Interval:DateTime,
-
+
# date - datetime = interval,
# this one is not in the PG docs
# but works
@@ -1330,10 +1330,10 @@ class _Binary(TypeEngine):
return self
else:
return super(_Binary, self)._coerce_compared_value(op, value)
-
+
def get_dbapi_type(self, dbapi):
return dbapi.BINARY
-
+
class LargeBinary(_Binary):
"""A type for large binary byte data.
@@ -1363,7 +1363,7 @@ class LargeBinary(_Binary):
class Binary(LargeBinary):
"""Deprecated. Renamed to LargeBinary."""
-
+
def __init__(self, *arg, **kw):
util.warn_deprecated('The Binary type has been renamed to '
'LargeBinary.')
@@ -1371,13 +1371,13 @@ class Binary(LargeBinary):
class SchemaType(object):
"""Mark a type as possibly requiring schema-level DDL for usage.
-
+
Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
as well as types that are complimented by table or schema level
constraints, triggers, and other rules.
-
+
"""
-
+
def __init__(self, **kw):
self.name = kw.pop('name', None)
self.quote = kw.pop('quote', None)
@@ -1388,10 +1388,10 @@ class SchemaType(object):
util.portable_instancemethod(self._on_metadata_create))
self.metadata.append_ddl_listener('after-drop',
util.portable_instancemethod(self._on_metadata_drop))
-
+
def _set_parent(self, column):
column._on_table_attach(util.portable_instancemethod(self._set_table))
-
+
def _set_table(self, table, column):
table.append_ddl_listener('before-create',
util.portable_instancemethod(
@@ -1404,14 +1404,14 @@ class SchemaType(object):
util.portable_instancemethod(self._on_metadata_create))
table.metadata.append_ddl_listener('after-drop',
util.portable_instancemethod(self._on_metadata_drop))
-
+
@property
def bind(self):
return self.metadata and self.metadata.bind or None
-
+
def create(self, bind=None, checkfirst=False):
"""Issue CREATE ddl for this type, if applicable."""
-
+
if bind is None:
bind = schema._bind_or_error(self)
t = self.dialect_impl(bind.dialect)
@@ -1426,7 +1426,7 @@ class SchemaType(object):
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
t.drop(bind=bind, checkfirst=checkfirst)
-
+
def _on_table_create(self, event, target, bind, **kw):
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
@@ -1446,22 +1446,22 @@ class SchemaType(object):
t = self.dialect_impl(bind.dialect)
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
t._on_metadata_drop(event, target, bind, **kw)
-
+
class Enum(String, SchemaType):
"""Generic Enum Type.
-
+
The Enum type provides a set of possible string values which the
column is constrained towards.
-
+
By default, uses the backend's native ENUM type if available,
else uses VARCHAR + a CHECK constraint.
"""
-
+
__visit_name__ = 'enum'
-
+
def __init__(self, *enums, **kw):
"""Construct an enum.
-
+
Keyword arguments which don't apply to a specific backend are ignored
by that backend.
@@ -1513,7 +1513,7 @@ class Enum(String, SchemaType):
break
else:
convert_unicode = False
-
+
if self.enums:
length =max(len(x) for x in self.enums)
else:
@@ -1527,11 +1527,11 @@ class Enum(String, SchemaType):
def _should_create_constraint(self, compiler):
return not self.native_enum or \
not compiler.dialect.supports_native_enum
-
+
def _set_table(self, table, column):
if self.native_enum:
SchemaType._set_table(self, table, column)
-
+
e = schema.CheckConstraint(
column.in_(self.enums),
@@ -1540,7 +1540,7 @@ class Enum(String, SchemaType):
self._should_create_constraint)
)
table.append_constraint(e)
-
+
def adapt(self, impltype, **kw):
if issubclass(impltype, Enum):
return impltype(name=self.name,
@@ -1601,13 +1601,13 @@ class PickleType(MutableType, TypeDecorator):
self.mutable = mutable
self.comparator = comparator
super(PickleType, self).__init__()
-
+
def __reduce__(self):
return PickleType, (self.protocol,
None,
self.mutable,
self.comparator)
-
+
def bind_processor(self, dialect):
impl_processor = self.impl.bind_processor(dialect)
dumps = self.pickler.dumps
@@ -1655,11 +1655,11 @@ class PickleType(MutableType, TypeDecorator):
def is_mutable(self):
"""Return True if the target Python type is 'mutable'.
-
+
When this method is overridden, :meth:`copy_value` should
also be supplied. The :class:`.MutableType` mixin
is recommended as a helper.
-
+
"""
return self.mutable
@@ -1676,25 +1676,25 @@ class Boolean(TypeEngine, SchemaType):
def __init__(self, create_constraint=True, name=None):
"""Construct a Boolean.
-
+
:param create_constraint: defaults to True. If the boolean
is generated as an int/smallint, also create a CHECK constraint
on the table that ensures 1 or 0 as a value.
-
+
:param name: if a CHECK constraint is generated, specify
the name of the constraint.
-
+
"""
self.create_constraint = create_constraint
self.name = name
-
+
def _should_create_constraint(self, compiler):
return not compiler.dialect.supports_native_boolean
-
+
def _set_table(self, table, column):
if not self.create_constraint:
return
-
+
e = schema.CheckConstraint(
column.in_([0, 1]),
name=self.name,
@@ -1702,13 +1702,13 @@ class Boolean(TypeEngine, SchemaType):
self._should_create_constraint)
)
table.append_constraint(e)
-
+
def bind_processor(self, dialect):
if dialect.supports_native_boolean:
return None
else:
return processors.boolean_to_int
-
+
def result_processor(self, dialect, coltype):
if dialect.supports_native_boolean:
return None
@@ -1729,7 +1729,7 @@ class Interval(_DateAffinity, TypeDecorator):
(such as, conversion of both sides into integer epoch values first) which
currently is a manual procedure (such as via
:attr:`~sqlalchemy.sql.expression.func`).
-
+
"""
impl = DateTime
@@ -1739,20 +1739,20 @@ class Interval(_DateAffinity, TypeDecorator):
second_precision=None,
day_precision=None):
"""Construct an Interval object.
-
+
:param native: when True, use the actual
INTERVAL type provided by the database, if
- supported (currently Postgresql, Oracle).
+ supported (currently Postgresql, Oracle).
Otherwise, represent the interval data as
an epoch value regardless.
-
+
:param second_precision: For native interval types
which support a "fractional seconds precision" parameter,
i.e. Oracle and Postgresql
-
+
:param day_precision: for native interval types which
support a "day precision" parameter, i.e. Oracle.
-
+
"""
super(Interval, self).__init__()
self.native = native
@@ -1764,7 +1764,7 @@ class Interval(_DateAffinity, TypeDecorator):
return cls._adapt_from_generic_interval(self, **kw)
else:
return cls(**kw)
-
+
def bind_processor(self, dialect):
impl_processor = self.impl.bind_processor(dialect)
epoch = self.epoch
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 9ce223939..6950aa8e6 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -15,7 +15,7 @@ from _collections import NamedTuple, ImmutableContainer, frozendict, \
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
to_column_set, update_copy, flatten_iterator, WeakIdentityMapping, \
LRUCache, ScopedRegistry, ThreadLocalRegistry
-
+
from langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 4f9c5dc8a..269a3d539 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -18,9 +18,9 @@ EMPTY_SET = frozenset()
class NamedTuple(tuple):
"""tuple() subclass that adds labeled names.
-
+
Is also pickleable.
-
+
"""
def __new__(cls, vals, labels=None):
@@ -40,7 +40,7 @@ class ImmutableContainer(object):
__delitem__ = __setitem__ = __setattr__ = _immutable
class frozendict(ImmutableContainer, dict):
-
+
clear = pop = popitem = setdefault = \
update = ImmutableContainer._immutable
@@ -62,7 +62,7 @@ class frozendict(ImmutableContainer, dict):
d2 = frozendict(self)
dict.update(d2, d)
return d2
-
+
def __repr__(self):
return "frozendict(%s)" % dict.__repr__(self)
@@ -107,12 +107,12 @@ class Properties(object):
def __contains__(self, key):
return key in self._data
-
+
def as_immutable(self):
"""Return an immutable proxy for this :class:`.Properties`."""
-
+
return ImmutableProperties(self._data)
-
+
def update(self, value):
self._data.update(value)
@@ -136,12 +136,12 @@ class OrderedProperties(Properties):
as backing store."""
def __init__(self):
Properties.__init__(self, OrderedDict())
-
+
class ImmutableProperties(ImmutableContainer, Properties):
"""Provide immutable dict/object attribute to an underlying dictionary."""
-
-
+
+
class OrderedDict(dict):
"""A dict that returns keys/values/items in the order they were added."""
@@ -266,10 +266,10 @@ class OrderedSet(set):
def __iter__(self):
return iter(self._list)
-
+
def __add__(self, other):
return self.union(other)
-
+
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._list)
@@ -340,11 +340,11 @@ class IdentitySet(object):
This strategy has edge cases for builtin types- it's possible to have
two 'foo' strings in one of these sets, for example. Use sparingly.
-
+
"""
_working_set = set
-
+
def __init__(self, iterable=None):
self._members = dict()
if iterable:
@@ -501,10 +501,10 @@ class IdentitySet(object):
result._members.update(
self._working_set(self._member_id_tuples()).symmetric_difference(_iter_id(iterable)))
return result
-
+
def _member_id_tuples(self):
return ((id(v), v) for v in self._members.itervalues())
-
+
def __xor__(self, other):
if not isinstance(other, IdentitySet):
return NotImplemented
@@ -544,7 +544,7 @@ class OrderedIdentitySet(IdentitySet):
# but it's safe here: IDS operates on (id, instance) tuples in the
# working set.
__sa_hash_exempt__ = True
-
+
def __init__(self, iterable=None):
IdentitySet.__init__(self)
self._members = OrderedDict()
@@ -564,7 +564,7 @@ if sys.version_info >= (2, 5):
def __init__(self, creator):
self.creator = creator
-
+
def __missing__(self, key):
self[key] = val = self.creator(key)
return val
@@ -574,7 +574,7 @@ else:
def __init__(self, creator):
self.creator = creator
-
+
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
@@ -652,13 +652,13 @@ def to_column_set(x):
def update_copy(d, _new=None, **kw):
"""Copy the given dict and update with the given values."""
-
+
d = d.copy()
if _new:
d.update(_new)
d.update(**kw)
return d
-
+
def flatten_iterator(x):
"""Given an iterator of which further sub-elements may also be
iterators, flatten the sub-elements into a single iterator.
@@ -748,7 +748,7 @@ class WeakIdentityMapping(weakref.WeakKeyDictionary):
del self.by_id[key]
except (KeyError, AttributeError): # pragma: no cover
pass # pragma: no cover
-
+
class _keyed_weakref(weakref.ref):
def __init__(self, object, callback):
weakref.ref.__init__(self, object, callback)
@@ -761,7 +761,7 @@ class WeakIdentityMapping(weakref.WeakKeyDictionary):
class LRUCache(dict):
"""Dictionary with 'squishy' removal of least
recently used items.
-
+
"""
def __init__(self, capacity=100, threshold=.5):
self.capacity = capacity
@@ -809,7 +809,7 @@ class LRUCache(dict):
class ScopedRegistry(object):
"""A Registry that can store one or multiple instances of a single
class on the basis of a "scope" function.
-
+
The object implements ``__call__`` as the "getter", so by
calling ``myregistry()`` the contained object is returned
for the current scope.
@@ -823,14 +823,14 @@ class ScopedRegistry(object):
def __init__(self, createfunc, scopefunc):
"""Construct a new :class:`.ScopedRegistry`.
-
+
:param createfunc: A creation function that will generate
a new value for the current scope, if none is present.
-
+
:param scopefunc: A function that returns a hashable
token representing the current scope (such as, current
thread identifier).
-
+
"""
self.createfunc = createfunc
self.scopefunc = scopefunc
@@ -845,17 +845,17 @@ class ScopedRegistry(object):
def has(self):
"""Return True if an object is present in the current scope."""
-
+
return self.scopefunc() in self.registry
def set(self, obj):
"""Set the value forthe current scope."""
-
+
self.registry[self.scopefunc()] = obj
def clear(self):
"""Clear the current scope, if any."""
-
+
try:
del self.registry[self.scopefunc()]
except KeyError:
@@ -864,7 +864,7 @@ class ScopedRegistry(object):
class ThreadLocalRegistry(ScopedRegistry):
"""A :class:`.ScopedRegistry` that uses a ``threading.local()``
variable for storage.
-
+
"""
def __init__(self, createfunc):
self.createfunc = createfunc
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index fb2a14633..6b6051973 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -52,7 +52,7 @@ else:
# a controversial feature, required by MySQLdb currently
def buffer(x):
return x
-
+
# Py2K
buffer = getattr(__builtin__, 'buffer', buffer)
# end Py2K
@@ -136,7 +136,7 @@ except ImportError:
class _probe(dict):
def __missing__(self, key):
return 1
-
+
try:
try:
_probe()['missing']
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index 532594b72..d9018a26d 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -73,7 +73,7 @@ def pending_deprecation(version, message=None,
if message is None:
message = "Call to deprecated function %(func)s"
-
+
def decorate(fn):
return _decorate_with_warning(
fn, exc.SAPendingDeprecationWarning,
@@ -87,13 +87,13 @@ def _sanitize_restructured_text(text):
name += "()"
return name
return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text)
-
-
+
+
def _decorate_with_warning(func, wtype, message, docstring_header=None):
"""Wrap a function with a warnings.warn and augmented docstring."""
message = _sanitize_restructured_text(message)
-
+
@decorator
def warned(fn, *args, **kwargs):
warnings.warn(wtype(message), stacklevel=3)
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index 38260cd66..4ac78bd16 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -59,10 +59,10 @@ def get_cls_kwargs(cls):
__init__ defines a \**kwargs catch-all, then the constructor is presumed to
pass along unrecognized keywords to it's base classes, and the collection
process is repeated recursively on each of the bases.
-
+
Uses a subset of inspect.getargspec() to cut down on method overhead.
No anonymous tuple arguments please !
-
+
"""
for c in cls.__mro__:
@@ -79,10 +79,10 @@ def get_cls_kwargs(cls):
if not ctr or not isinstance(ctr, types.FunctionType):
stack.update(class_.__bases__)
continue
-
+
# this is shorthand for
# names, _, has_kw, _ = inspect.getargspec(ctr)
-
+
names, has_kw = inspect_func_args(ctr)
args.update(names)
if has_kw:
@@ -106,12 +106,12 @@ except ImportError:
def get_func_kwargs(func):
"""Return the set of legal kwargs for the given `func`.
-
+
Uses getargspec so is safe to call for methods, functions,
etc.
-
+
"""
-
+
return inspect.getargspec(func)[0]
def format_argspec_plus(fn, grouped=True):
@@ -203,7 +203,7 @@ def getargspec_init(method):
else:
return (['self'], 'args', 'kwargs', None)
-
+
def unbound_method_to_callable(func_or_cls):
"""Adjust the incoming callable such that a 'self' argument is not required."""
@@ -215,7 +215,7 @@ def unbound_method_to_callable(func_or_cls):
class portable_instancemethod(object):
"""Turn an instancemethod into a (parent, name) pair
to produce a serializable callable.
-
+
"""
def __init__(self, meth):
self.target = meth.im_self
@@ -223,7 +223,7 @@ class portable_instancemethod(object):
def __call__(self, *arg, **kw):
return getattr(self.target, self.name)(*arg, **kw)
-
+
def class_hierarchy(cls):
"""Return an unordered sequence of all classes related to cls.
@@ -468,22 +468,22 @@ class group_expirable_memoized_property(object):
class importlater(object):
"""Deferred import object.
-
+
e.g.::
-
+
somesubmod = importlater("mypackage.somemodule", "somesubmod")
-
+
is equivalent to::
-
+
from mypackage.somemodule import somesubmod
-
+
except evaluted upon attribute access to "somesubmod".
-
+
"""
def __init__(self, path, addtl=None):
self._il_path = path
self._il_addtl = addtl
-
+
@memoized_property
def module(self):
if self._il_addtl:
@@ -501,7 +501,7 @@ class importlater(object):
for token in self._il_path.split(".")[1:]:
m = getattr(m, token)
return m
-
+
def __getattr__(self, key):
try:
attr = getattr(self.module, key)
@@ -528,7 +528,7 @@ def asbool(obj):
def bool_or_str(*text):
"""Return a callable that will evaulate a string as
boolean, or one of a set of "alternate" string values.
-
+
"""
def bool_or_value(obj):
if obj in text:
@@ -536,7 +536,7 @@ def bool_or_str(*text):
else:
return asbool(obj)
return bool_or_value
-
+
def coerce_kw_type(kw, key, type_, flexi_bool=True):
"""If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
necessary. If 'flexi_bool' is True, the string '0' is considered false
@@ -552,11 +552,11 @@ def coerce_kw_type(kw, key, type_, flexi_bool=True):
def constructor_copy(obj, cls, **kw):
"""Instantiate cls using the __dict__ of obj as constructor arguments.
-
+
Uses inspect to match the named arguments of ``cls``.
-
+
"""
-
+
names = get_cls_kwargs(cls)
kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__)
return cls(**kw)
@@ -645,13 +645,13 @@ class classproperty(property):
module, but note that the
:class:`~.sqlalchemy.ext.declarative.declared_attr`
decorator should be used for this purpose with declarative.
-
+
"""
-
+
def __init__(self, fget, *arg, **kw):
super(classproperty, self).__init__(fget, *arg, **kw)
self.__doc__ = fget.__doc__
-
+
def __get__(desc, self, cls):
return desc.fget(cls)
@@ -719,15 +719,15 @@ def warn_exception(func, *args, **kwargs):
def warn(msg, stacklevel=3):
"""Issue a warning.
-
+
If msg is a string, :class:`.exc.SAWarning` is used as
the category.
-
+
.. note:: This function is swapped out when the test suite
runs, with a compatible version that uses
warnings.warn_explicit, so that the warnings registry can
be controlled.
-
+
"""
if isinstance(msg, basestring):
warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index aeb212d4d..8f3406472 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -17,7 +17,7 @@ def sort_as_subsets(tuples, allitems):
edges = util.defaultdict(set)
for parent, child in tuples:
edges[child].add(parent)
-
+
todo = set(allitems)
while todo:
@@ -55,7 +55,7 @@ def find_cycles(tuples, allitems):
edges[parent].add(child)
output = set()
-
+
while todo:
node = todo.pop()
stack = [node]
@@ -66,7 +66,7 @@ def find_cycles(tuples, allitems):
cyc = stack[stack.index(node):]
todo.difference_update(cyc)
output.update(cyc)
-
+
if node in todo:
stack.append(node)
todo.remove(node)