summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy')
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py18
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py24
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py128
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py4
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py8
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py16
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py96
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py54
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py16
-rw-r--r--lib/sqlalchemy/interfaces.py12
-rw-r--r--lib/sqlalchemy/orm/relationships.py138
-rw-r--r--lib/sqlalchemy/types.py232
12 files changed, 373 insertions, 373 deletions
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index a0e48da2c..f3ab73467 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -135,7 +135,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR):
__visit_name__ = 'VARCHAR'
def __init__(self, length = None, **kwargs):
- super(VARCHAR, self).__init__(length=length, **kwargs)
+ super(VARCHAR, self).__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""Firebird CHAR type"""
@@ -164,7 +164,7 @@ ischema_names = {
}
-# TODO: date conversion types (should be implemented as _FBDateTime,
+# TODO: date conversion types (should be implemented as _FBDateTime,
# _FBDate, etc. as bind/result functionality is required)
class FBTypeCompiler(compiler.GenericTypeCompiler):
@@ -339,7 +339,7 @@ class FBExecutionContext(default.DefaultExecutionContext):
"""Get the next value from the sequence using ``gen_id()``."""
return self._execute_scalar(
- "SELECT gen_id(%s, 1) FROM rdb$database" %
+ "SELECT gen_id(%s, 1) FROM rdb$database" %
self.dialect.identifier_preparer.format_sequence(seq),
type_
)
@@ -418,7 +418,7 @@ class FBDialect(default.DefaultDialect):
return name
def has_table(self, connection, table_name, schema=None):
- """Return ``True`` if the given table exists, ignoring
+ """Return ``True`` if the given table exists, ignoring
the `schema`."""
tblqry = """
@@ -489,8 +489,8 @@ class FBDialect(default.DefaultDialect):
return {'constrained_columns':pkfields, 'name':None}
@reflection.cache
- def get_column_sequence(self, connection,
- table_name, column_name,
+ def get_column_sequence(self, connection,
+ table_name, column_name,
schema=None, **kw):
tablename = self.denormalize_name(table_name)
colname = self.denormalize_name(column_name)
@@ -528,7 +528,7 @@ class FBDialect(default.DefaultDialect):
COALESCE(cs.rdb$bytes_per_character,1) AS flen,
f.rdb$field_precision AS fprec,
f.rdb$field_scale AS fscale,
- COALESCE(r.rdb$default_source,
+ COALESCE(r.rdb$default_source,
f.rdb$default_source) AS fdefault
FROM rdb$relation_fields r
JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
@@ -564,7 +564,7 @@ class FBDialect(default.DefaultDialect):
coltype = sqltypes.NULLTYPE
elif colspec == 'INT64':
coltype = coltype(
- precision=row['fprec'],
+ precision=row['fprec'],
scale=row['fscale'] * -1)
elif colspec in ('VARYING', 'CSTRING'):
coltype = coltype(row['flen'])
@@ -583,7 +583,7 @@ class FBDialect(default.DefaultDialect):
if row['fdefault'] is not None:
# the value comes down as "DEFAULT 'value'": there may be
# more than one whitespace around the "DEFAULT" keyword
- # and it may also be lower case
+ # and it may also be lower case
# (see also http://tracker.firebirdsql.org/browse/CORE-356)
defexpr = row['fdefault'].lstrip()
assert defexpr[:8].rstrip().upper() == \
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index d9d749b3c..a5dc821be 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -17,20 +17,20 @@ Kinterbasedb backend specific keyword arguments are:
SQLAlchemy uses 200 with Unicode, datetime and decimal support (see
details__).
-* concurrency_level - set the backend policy with regards to threading
+* concurrency_level - set the backend policy with regards to threading
issues: by default SQLAlchemy uses policy 1 (see details__).
-* enable_rowcount - True by default, setting this to False disables
- the usage of "cursor.rowcount" with the
+* enable_rowcount - True by default, setting this to False disables
+ the usage of "cursor.rowcount" with the
Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
- after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
- ResultProxy will return -1 for result.rowcount. The rationale here is
- that Kinterbasdb requires a second round trip to the database when
- .rowcount is called - since SQLA's resultproxy automatically closes
- the cursor after a non-result-returning statement, rowcount must be
+ after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
+ ResultProxy will return -1 for result.rowcount. The rationale here is
+ that Kinterbasdb requires a second round trip to the database when
+ .rowcount is called - since SQLA's resultproxy automatically closes
+ the cursor after a non-result-returning statement, rowcount must be
called, if at all, before the result object is returned. Additionally,
cursor.rowcount may not return correct results with older versions
- of Firebird, and setting this flag to False will also cause the
+ of Firebird, and setting this flag to False will also cause the
SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
per-execution basis using the `enable_rowcount` option with
:meth:`execution_options()`::
@@ -64,7 +64,7 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric):
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
def rowcount(self):
- if self.execution_options.get('enable_rowcount',
+ if self.execution_options.get('enable_rowcount',
self.dialect.enable_rowcount):
return self.cursor.rowcount
else:
@@ -135,7 +135,7 @@ class FBDialect_kinterbasdb(FBDialect):
# that for backward compatibility reasons returns a string like
# LI-V6.3.3.12981 Firebird 2.0
# where the first version is a fake one resembling the old
- # Interbase signature.
+ # Interbase signature.
fbconn = connection.connection
version = fbconn.server_version
@@ -159,7 +159,7 @@ class FBDialect_kinterbasdb(FBDialect):
msg = str(e)
return ('Unable to complete network request to host' in msg or
'Invalid connection state' in msg or
- 'Invalid cursor state' in msg or
+ 'Invalid cursor state' in msg or
'connection shutdown' in msg)
else:
return False
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index b893541d3..e38aa8c1e 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -74,7 +74,7 @@ will yield::
SELECT TOP n
If using SQL Server 2005 or above, LIMIT with OFFSET
-support is available through the ``ROW_NUMBER OVER`` construct.
+support is available through the ``ROW_NUMBER OVER`` construct.
For versions below 2005, LIMIT with OFFSET usage will fail.
Nullability
@@ -119,14 +119,14 @@ Triggers
SQLAlchemy by default uses OUTPUT INSERTED to get at newly
generated primary key values via IDENTITY columns or other
-server side defaults. MS-SQL does not
+server side defaults. MS-SQL does not
allow the usage of OUTPUT INSERTED on tables that have triggers.
To disable the usage of OUTPUT INSERTED on a per-table basis,
specify ``implicit_returning=False`` for each :class:`.Table`
which has triggers::
- Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
+ Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
# ...,
implicit_returning=False
)
@@ -144,11 +144,11 @@ This option can also be specified engine-wide using the
Enabling Snapshot Isolation
---------------------------
-Not necessarily specific to SQLAlchemy, SQL Server has a default transaction
+Not necessarily specific to SQLAlchemy, SQL Server has a default transaction
isolation mode that locks entire tables, and causes even mildly concurrent
applications to have long held locks and frequent deadlocks.
-Enabling snapshot isolation for the database as a whole is recommended
-for modern levels of concurrency support. This is accomplished via the
+Enabling snapshot isolation for the database as a whole is recommended
+for modern levels of concurrency support. This is accomplished via the
following ALTER DATABASE commands executed at the SQL prompt::
ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON
@@ -249,7 +249,7 @@ class _MSDate(sqltypes.Date):
return value.date()
elif isinstance(value, basestring):
return datetime.date(*[
- int(x or 0)
+ int(x or 0)
for x in self._reg.match(value).groups()
])
else:
@@ -280,7 +280,7 @@ class TIME(sqltypes.TIME):
return value.time()
elif isinstance(value, basestring):
return datetime.time(*[
- int(x or 0)
+ int(x or 0)
for x in self._reg.match(value).groups()])
else:
return value
@@ -585,7 +585,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self._extend("TEXT", type_)
def visit_VARCHAR(self, type_):
- return self._extend("VARCHAR", type_,
+ return self._extend("VARCHAR", type_,
length = type_.length or 'max')
def visit_CHAR(self, type_):
@@ -595,7 +595,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
return self._extend("NCHAR", type_)
def visit_NVARCHAR(self, type_):
- return self._extend("NVARCHAR", type_,
+ return self._extend("NVARCHAR", type_,
length = type_.length or 'max')
def visit_date(self, type_):
@@ -618,8 +618,8 @@ class MSTypeCompiler(compiler.GenericTypeCompiler):
def visit_VARBINARY(self, type_):
return self._extend(
- "VARBINARY",
- type_,
+ "VARBINARY",
+ type_,
length=type_.length or 'max')
def visit_boolean(self, type_):
@@ -666,8 +666,8 @@ class MSExecutionContext(default.DefaultExecutionContext):
not self.executemany
if self._enable_identity_insert:
- self.root_connection._cursor_execute(self.cursor,
- "SET IDENTITY_INSERT %s ON" %
+ self.root_connection._cursor_execute(self.cursor,
+ "SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl),
())
@@ -677,10 +677,10 @@ class MSExecutionContext(default.DefaultExecutionContext):
conn = self.root_connection
if self._select_lastrowid:
if self.dialect.use_scope_identity:
- conn._cursor_execute(self.cursor,
+ conn._cursor_execute(self.cursor,
"SELECT scope_identity() AS lastrowid", ())
else:
- conn._cursor_execute(self.cursor,
+ conn._cursor_execute(self.cursor,
"SELECT @@identity AS lastrowid", ())
# fetchall() ensures the cursor is consumed without closing it
row = self.cursor.fetchall()[0]
@@ -691,7 +691,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
self._result_proxy = base.FullyBufferedResultProxy(self)
if self._enable_identity_insert:
- conn._cursor_execute(self.cursor,
+ conn._cursor_execute(self.cursor,
"SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.
format_table(self.compiled.statement.table),
@@ -705,7 +705,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
if self._enable_identity_insert:
try:
self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
+ "SET IDENTITY_INSERT %s OFF" %
self.dialect.identifier_preparer.\
format_table(self.compiled.statement.table)
)
@@ -748,12 +748,12 @@ class MSSQLCompiler(compiler.SQLCompiler):
def visit_concat_op(self, binary, **kw):
return "%s + %s" % \
- (self.process(binary.left, **kw),
+ (self.process(binary.left, **kw),
self.process(binary.right, **kw))
def visit_match_op(self, binary, **kw):
return "CONTAINS (%s, %s)" % (
- self.process(binary.left, **kw),
+ self.process(binary.left, **kw),
self.process(binary.right, **kw))
def get_select_precolumns(self, select):
@@ -843,7 +843,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)
def visit_rollback_to_savepoint(self, savepoint_stmt):
- return ("ROLLBACK TRANSACTION %s"
+ return ("ROLLBACK TRANSACTION %s"
% self.preparer.format_savepoint(savepoint_stmt))
def visit_column(self, column, result_map=None, **kwargs):
@@ -856,18 +856,18 @@ class MSSQLCompiler(compiler.SQLCompiler):
t, column)
if result_map is not None:
- result_map[column.name
- if self.dialect.case_sensitive
+ result_map[column.name
+ if self.dialect.case_sensitive
else column.name.lower()] = \
- (column.name, (column, ),
+ (column.name, (column, ),
column.type)
return super(MSSQLCompiler, self).\
- visit_column(converted,
+ visit_column(converted,
result_map=None, **kwargs)
- return super(MSSQLCompiler, self).visit_column(column,
- result_map=result_map,
+ return super(MSSQLCompiler, self).visit_column(column,
+ result_map=result_map,
**kwargs)
def visit_binary(self, binary, **kwargs):
@@ -876,14 +876,14 @@ class MSSQLCompiler(compiler.SQLCompiler):
"""
if (
- isinstance(binary.left, expression.BindParameter)
+ isinstance(binary.left, expression.BindParameter)
and binary.operator == operator.eq
and not isinstance(binary.right, expression.BindParameter)
):
return self.process(
- expression.BinaryExpression(binary.right,
- binary.left,
- binary.operator),
+ expression.BinaryExpression(binary.right,
+ binary.left,
+ binary.operator),
**kwargs)
return super(MSSQLCompiler, self).visit_binary(binary, **kwargs)
@@ -904,10 +904,10 @@ class MSSQLCompiler(compiler.SQLCompiler):
columns = [
self.process(
- col_label(c),
- within_columns_clause=True,
+ col_label(c),
+ within_columns_clause=True,
result_map=self.result_map
- )
+ )
for c in expression._select_iterables(returning_cols)
]
return 'OUTPUT ' + ', '.join(columns)
@@ -927,7 +927,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
label_select_column(select, column, asfrom)
def for_update_clause(self, select):
- # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which
+ # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which
# SQLAlchemy doesn't use
return ''
@@ -945,11 +945,11 @@ class MSSQLCompiler(compiler.SQLCompiler):
from_hints,
**kw):
"""Render the UPDATE..FROM clause specific to MSSQL.
-
+
In MSSQL, if the UPDATE statement involves an alias of the table to
be updated, then the table itself must be added to the FROM list as
well. Otherwise, it is optional. Here, we add it regardless.
-
+
"""
return "FROM " + ', '.join(
t._compiler_dispatch(self, asfrom=True,
@@ -969,14 +969,14 @@ class MSSQLStrictCompiler(MSSQLCompiler):
def visit_in_op(self, binary, **kw):
kw['literal_binds'] = True
return "%s IN %s" % (
- self.process(binary.left, **kw),
+ self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
def visit_notin_op(self, binary, **kw):
kw['literal_binds'] = True
return "%s NOT IN %s" % (
- self.process(binary.left, **kw),
+ self.process(binary.left, **kw),
self.process(binary.right, **kw)
)
@@ -1005,7 +1005,7 @@ class MSSQLStrictCompiler(MSSQLCompiler):
class MSDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
- colspec = (self.preparer.format_column(column) + " "
+ colspec = (self.preparer.format_column(column) + " "
+ self.dialect.type_compiler.process(column.type))
if column.nullable is not None:
@@ -1016,7 +1016,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
if column.table is None:
raise exc.CompileError(
- "mssql requires Table-bound columns "
+ "mssql requires Table-bound columns "
"in order to generate DDL")
seq_col = column.table._autoincrement_column
@@ -1051,7 +1051,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
def __init__(self, dialect):
- super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[',
+ super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[',
final_quote=']')
def _escape_identifier(self, value):
@@ -1116,7 +1116,7 @@ class MSDialect(default.DefaultDialect):
super(MSDialect, self).initialize(connection)
if self.server_version_info[0] not in range(8, 17):
# FreeTDS with version 4.2 seems to report here
- # a number like "95.10.255". Don't know what
+ # a number like "95.10.255". Don't know what
# that is. So emit warning.
util.warn(
"Unrecognized server version info '%s'. Version specific "
@@ -1217,11 +1217,11 @@ class MSDialect(default.DefaultDialect):
"join sys.schemas as sch on sch.schema_id=tab.schema_id "
"where tab.name = :tabname "
"and sch.name=:schname "
- "and ind.is_primary_key=0",
+ "and ind.is_primary_key=0",
bindparams=[
- sql.bindparam('tabname', tablename,
+ sql.bindparam('tabname', tablename,
sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
+ sql.bindparam('schname', current_schema,
sqltypes.String(convert_unicode=True))
],
typemap = {
@@ -1248,9 +1248,9 @@ class MSDialect(default.DefaultDialect):
"where tab.name=:tabname "
"and sch.name=:schname",
bindparams=[
- sql.bindparam('tabname', tablename,
+ sql.bindparam('tabname', tablename,
sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
+ sql.bindparam('schname', current_schema,
sqltypes.String(convert_unicode=True))
],
typemap = {
@@ -1278,9 +1278,9 @@ class MSDialect(default.DefaultDialect):
"views.schema_id=sch.schema_id and "
"views.name=:viewname and sch.name=:schname",
bindparams=[
- sql.bindparam('viewname', viewname,
+ sql.bindparam('viewname', viewname,
sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
+ sql.bindparam('schname', current_schema,
sqltypes.String(convert_unicode=True))
]
)
@@ -1309,7 +1309,7 @@ class MSDialect(default.DefaultDialect):
row = c.fetchone()
if row is None:
break
- (name, type, nullable, charlen,
+ (name, type, nullable, charlen,
numericprec, numericscale, default, collation) = (
row[columns.c.column_name],
row[columns.c.data_type],
@@ -1323,7 +1323,7 @@ class MSDialect(default.DefaultDialect):
coltype = self.ischema_names.get(type, None)
kwargs = {}
- if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
+ if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
MSNText, MSBinary, MSVarBinary,
sqltypes.LargeBinary):
kwargs['length'] = charlen
@@ -1335,7 +1335,7 @@ class MSDialect(default.DefaultDialect):
if coltype is None:
util.warn(
- "Did not recognize type '%s' of column '%s'" %
+ "Did not recognize type '%s' of column '%s'" %
(type, name))
coltype = sqltypes.NULLTYPE
else:
@@ -1359,7 +1359,7 @@ class MSDialect(default.DefaultDialect):
colmap[col['name']] = col
# We also run an sp_columns to check for identity columns:
cursor = connection.execute("sp_columns @table_name = '%s', "
- "@table_owner = '%s'"
+ "@table_owner = '%s'"
% (tablename, current_schema))
ic = None
while True:
@@ -1378,7 +1378,7 @@ class MSDialect(default.DefaultDialect):
if ic is not None and self.server_version_info >= MS_2005_VERSION:
table_fullname = "%s.%s" % (current_schema, tablename)
cursor = connection.execute(
- "select ident_seed('%s'), ident_incr('%s')"
+ "select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
)
@@ -1398,12 +1398,12 @@ class MSDialect(default.DefaultDialect):
RR = ischema.ref_constraints
# information_schema.table_constraints
TC = ischema.constraints
- # information_schema.constraint_column_usage:
+ # information_schema.constraint_column_usage:
# the constrained column
- C = ischema.key_constraints.alias('C')
- # information_schema.constraint_column_usage:
+ C = ischema.key_constraints.alias('C')
+ # information_schema.constraint_column_usage:
# the referenced column
- R = ischema.key_constraints.alias('R')
+ R = ischema.key_constraints.alias('R')
# Primary key constraints
s = sql.select([C.c.column_name, TC.c.constraint_type],
@@ -1425,12 +1425,12 @@ class MSDialect(default.DefaultDialect):
RR = ischema.ref_constraints
# information_schema.table_constraints
TC = ischema.constraints
- # information_schema.constraint_column_usage:
+ # information_schema.constraint_column_usage:
# the constrained column
- C = ischema.key_constraints.alias('C')
- # information_schema.constraint_column_usage:
+ C = ischema.key_constraints.alias('C')
+ # information_schema.constraint_column_usage:
# the referenced column
- R = ischema.key_constraints.alias('R')
+ R = ischema.key_constraints.alias('R')
# Foreign key constraints
s = sql.select([C.c.column_name,
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index 94f0a2cbb..15ebad1f8 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -41,7 +41,7 @@ simplistic statements.
For this reason, the mxODBC dialect uses the "native" mode by default only for
INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for
-all other statements.
+all other statements.
This behavior can be controlled via
:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the
@@ -55,7 +55,7 @@ of ``False`` will unconditionally use string-escaped parameters.
from sqlalchemy import types as sqltypes
from sqlalchemy.connectors.mxodbc import MxODBCConnector
from sqlalchemy.dialects.mssql.pyodbc import MSExecutionContext_pyodbc
-from sqlalchemy.dialects.mssql.base import (MSDialect,
+from sqlalchemy.dialects.mssql.base import (MSDialect,
MSSQLStrictCompiler,
_MSDateTime, _MSDate, TIME)
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index f84a5d2dd..0fe5d635a 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -27,7 +27,7 @@ Pooling
Google App Engine connections appear to be randomly recycled,
so the dialect does not pool connections. The :class:`.NullPool`
-implementation is installed within the :class:`.Engine` by
+implementation is installed within the :class:`.Engine` by
default.
"""
@@ -37,10 +37,10 @@ from sqlalchemy.pool import NullPool
import re
-class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
+class MySQLDialect_gaerdbms(MySQLDialect_mysqldb):
- @classmethod
- def dbapi(cls):
+ @classmethod
+ def dbapi(cls):
from google.appengine.api import rdbms
return rdbms
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index e05d50d30..8387dfecb 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -20,20 +20,20 @@ Connect string::
MySQL-Python Compatibility
--------------------------
-The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
-and targets 100% compatibility. Most behavioral notes for MySQL-python apply to
+The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
+and targets 100% compatibility. Most behavioral notes for MySQL-python apply to
the pymysql driver as well.
"""
-from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb
+from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb
-class MySQLDialect_pymysql(MySQLDialect_mysqldb):
+class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver = 'pymysql'
description_encoding = None
- @classmethod
- def dbapi(cls):
- return __import__('pymysql')
+ @classmethod
+ def dbapi(cls):
+ return __import__('pymysql')
-dialect = MySQLDialect_pymysql \ No newline at end of file
+dialect = MySQLDialect_pymysql \ No newline at end of file
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 439568dd7..cbeac7791 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -14,7 +14,7 @@ for that driver.
Connect Arguments
-----------------
-The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
+The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
affect the behavior of the dialect regardless of driver in use.
* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
@@ -29,32 +29,32 @@ Auto Increment Behavior
SQLAlchemy Table objects which include integer primary keys are usually assumed to have
"autoincrementing" behavior, meaning they can generate their own primary key values upon
-INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences
+INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences
to produce these values. With the Oracle dialect, *a sequence must always be explicitly
-specified to enable autoincrement*. This is divergent with the majority of documentation
+specified to enable autoincrement*. This is divergent with the majority of documentation
examples which assume the usage of an autoincrement-capable database. To specify sequences,
use the sqlalchemy.schema.Sequence object which is passed to a Column construct::
- t = Table('mytable', metadata,
+ t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
Column(...), ...
)
This step is also required when using table reflection, i.e. autoload=True::
- t = Table('mytable', metadata,
+ t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
autoload=True
- )
+ )
Identifier Casing
-----------------
-In Oracle, the data dictionary represents all case insensitive identifier names
+In Oracle, the data dictionary represents all case insensitive identifier names
using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier
name to be case insensitive. The Oracle dialect converts all case insensitive identifiers
to and from those two formats during schema level communication, such as reflection of
-tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a
+tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a
case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches
against data dictionary data received from Oracle, so unless identifier names have been
truly created as case sensitive (i.e. using quoted names), all lowercase names should be
@@ -72,16 +72,16 @@ Unicode
Also note that Oracle supports unicode data through the NVARCHAR and NCLOB data types.
When using the SQLAlchemy Unicode and UnicodeText types, these DDL types will be used
-within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still
+within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still
requires NLS_LANG to be set.
LIMIT/OFFSET Support
--------------------
-Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses
-a wrapped subquery approach in conjunction with ROWNUM. The exact methodology
+Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses
+a wrapped subquery approach in conjunction with ROWNUM. The exact methodology
is taken from
-http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html .
+http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html .
There are two options which affect its behavior:
@@ -89,13 +89,13 @@ There are two options which affect its behavior:
optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`.
* the values passed for the limit/offset are sent as bound parameters. Some users have observed
that Oracle produces a poor query plan when the values are sent as binds and not
- rendered literally. To render the limit/offset values literally within the SQL
+ rendered literally. To render the limit/offset values literally within the SQL
statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`.
-Some users have reported better performance when the entirely different approach of a
-window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note
-that the majority of users don't observe this). To suit this case the
-method used for LIMIT/OFFSET can be replaced entirely. See the recipe at
+Some users have reported better performance when the entirely different approach of a
+window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note
+that the majority of users don't observe this). To suit this case the
+method used for LIMIT/OFFSET can be replaced entirely. See the recipe at
http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset with
a window function.
@@ -103,11 +103,11 @@ a window function.
ON UPDATE CASCADE
-----------------
-Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution
+Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution
is available at http://asktom.oracle.com/tkyte/update_cascade/index.html .
When using the SQLAlchemy ORM, the ORM has limited ability to manually issue
-cascading updates - specify ForeignKey objects using the
+cascading updates - specify ForeignKey objects using the
"deferrable=True, initially='deferred'" keyword arguments,
and specify "passive_updates=False" on each relationship().
@@ -121,21 +121,21 @@ behaviors:
JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN
makes use of Oracle's (+) operator.
-* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
- the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued
+* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
+ the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued
instead. This because these types don't seem to work correctly on Oracle 8
- even though they are available. The :class:`~sqlalchemy.types.NVARCHAR`
+ even though they are available. The :class:`~sqlalchemy.types.NVARCHAR`
and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB.
-* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
+* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
encodes all Python unicode objects to "string" before passing in as bind parameters.
Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms that reference DBLINK-ed tables by passing the flag
-oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
+indicated by synonyms that reference DBLINK-ed tables by passing the flag
+oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
is not in use this flag should be left off.
"""
@@ -217,8 +217,8 @@ class LONG(sqltypes.Text):
class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
- def __init__(self,
- day_precision=None,
+ def __init__(self,
+ day_precision=None,
second_precision=None):
"""Construct an INTERVAL.
@@ -303,10 +303,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_INTERVAL(self, type_):
return "INTERVAL DAY%s TO SECOND%s" % (
- type_.day_precision is not None and
+ type_.day_precision is not None and
"(%d)" % type_.day_precision or
"",
- type_.second_precision is not None and
+ type_.second_precision is not None and
"(%d)" % type_.second_precision or
"",
)
@@ -340,7 +340,7 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
else:
return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale}
- def visit_string(self, type_):
+ def visit_string(self, type_):
return self.visit_VARCHAR2(type_)
def visit_VARCHAR2(self, type_):
@@ -356,10 +356,10 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
def _visit_varchar(self, type_, n, num):
if not n and self.dialect._supports_char_length:
return "VARCHAR%(two)s(%(length)s CHAR)" % {
- 'length' : type_.length,
+ 'length' : type_.length,
'two':num}
else:
- return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length,
+ return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length,
'two':num, 'n':n}
def visit_text(self, type_):
@@ -431,7 +431,7 @@ class OracleCompiler(compiler.SQLCompiler):
return ""
def default_from(self):
- """Called when a ``SELECT`` statement has no froms,
+ """Called when a ``SELECT`` statement has no froms,
and no ``FROM`` clause is to be appended.
The Oracle compiler tacks a "FROM DUAL" to the statement.
@@ -613,7 +613,7 @@ class OracleDDLCompiler(compiler.DDLCompiler):
if constraint.ondelete is not None:
text += " ON DELETE %s" % constraint.ondelete
- # oracle has no ON UPDATE CASCADE -
+ # oracle has no ON UPDATE CASCADE -
# its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html
if constraint.onupdate is not None:
util.warn(
@@ -643,8 +643,8 @@ class OracleIdentifierPreparer(compiler.IdentifierPreparer):
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
- return self._execute_scalar("SELECT " +
- self.dialect.identifier_preparer.format_sequence(seq) +
+ return self._execute_scalar("SELECT " +
+ self.dialect.identifier_preparer.format_sequence(seq) +
".nextval FROM DUAL", type_)
class OracleDialect(default.DefaultDialect):
@@ -676,9 +676,9 @@ class OracleDialect(default.DefaultDialect):
reflection_options = ('oracle_resolve_synonyms', )
- def __init__(self,
- use_ansi=True,
- optimize_limits=False,
+ def __init__(self,
+ use_ansi=True,
+ optimize_limits=False,
use_binds_for_limits=True,
**kwargs):
default.DefaultDialect.__init__(self, **kwargs)
@@ -808,8 +808,8 @@ class OracleDialect(default.DefaultDialect):
if resolve_synonyms:
actual_name, owner, dblink, synonym = self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(schema),
+ connection,
+ desired_owner=self.denormalize_name(schema),
desired_synonym=self.denormalize_name(table_name)
)
else:
@@ -876,11 +876,11 @@ class OracleDialect(default.DefaultDialect):
char_length_col = 'char_length'
else:
char_length_col = 'data_length'
-
+
c = connection.execute(sql.text(
"SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, "
"nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "
- "WHERE table_name = :table_name AND owner = :owner "
+ "WHERE table_name = :table_name AND owner = :owner "
"ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}),
table_name=table_name, owner=schema)
@@ -892,7 +892,7 @@ class OracleDialect(default.DefaultDialect):
coltype = NUMBER(precision, scale)
elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'):
coltype = self.ischema_names.get(coltype)(length)
- elif 'WITH TIME ZONE' in coltype:
+ elif 'WITH TIME ZONE' in coltype:
coltype = TIMESTAMP(timezone=True)
else:
coltype = re.sub(r'\(\d+\)', '', coltype)
@@ -929,8 +929,8 @@ class OracleDialect(default.DefaultDialect):
indexes = []
q = sql.text("""
SELECT a.index_name, a.column_name, b.uniqueness
- FROM ALL_IND_COLUMNS%(dblink)s a,
- ALL_INDEXES%(dblink)s b
+ FROM ALL_IND_COLUMNS%(dblink)s a,
+ ALL_INDEXES%(dblink)s b
WHERE
a.index_name = b.index_name
AND a.table_owner = b.table_owner
@@ -1091,8 +1091,8 @@ class OracleDialect(default.DefaultDialect):
if resolve_synonyms:
ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(remote_owner),
+ connection,
+ desired_owner=self.denormalize_name(remote_owner),
desired_table=self.denormalize_name(remote_table)
)
if ref_synonym:
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 44ed7fc91..8976ca990 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -10,7 +10,7 @@ Driver
------
The psycopg2 driver is available at http://pypi.python.org/pypi/psycopg2/ .
-The dialect has several behaviors which are specifically tailored towards compatibility
+The dialect has several behaviors which are specifically tailored towards compatibility
with this module.
Note that psycopg1 is **not** supported.
@@ -48,7 +48,7 @@ which specifies Unix-domain communication rather than TCP/IP communication::
create_engine("postgresql+psycopg2://user:password@/dbname")
By default, the socket file used is to connect to a Unix-domain socket
-in ``/tmp``, or whatever socket directory was specified when PostgreSQL
+in ``/tmp``, or whatever socket directory was specified when PostgreSQL
was built. This value can be overridden by passing a pathname to psycopg2,
using ``host`` as an additional keyword argument::
@@ -61,11 +61,11 @@ See also:
Per-Statement/Connection Execution Options
-------------------------------------------
-The following DBAPI-specific options are respected when used with
+The following DBAPI-specific options are respected when used with
:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
-* isolation_level - Set the transaction isolation level for the lifespan of a
+* isolation_level - Set the transaction isolation level for the lifespan of a
:class:`.Connection` (can only be set on a connection, not a statement or query).
This includes the options ``SERIALIZABLE``, ``READ COMMITTED``,
``READ UNCOMMITTED`` and ``REPEATABLE READ``.
@@ -79,8 +79,8 @@ By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
extension, such that the DBAPI receives and returns all strings as Python
Unicode objects directly - SQLAlchemy passes these values through without
change. Psycopg2 here will encode/decode string values based on the
-current "client encoding" setting; by default this is the value in
-the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
+current "client encoding" setting; by default this is the value in
+the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
Typically, this can be changed to ``utf-8``, as a more useful default::
#client_encoding = sql_ascii # actually, defaults to database
@@ -90,7 +90,7 @@ Typically, this can be changed to ``utf-8``, as a more useful default::
A second way to affect the client encoding is to set it within Psycopg2
locally. SQLAlchemy will call psycopg2's ``set_client_encoding()``
method (see: http://initd.org/psycopg/docs/connection.html#connection.set_client_encoding)
-on all new connections based on the value passed to
+on all new connections based on the value passed to
:func:`.create_engine` using the ``client_encoding`` parameter::
engine = create_engine("postgresql://user:pass@host/dbname", client_encoding='utf8')
@@ -102,15 +102,15 @@ This overrides the encoding specified in the Postgresql client configuration.
SQLAlchemy can also be instructed to skip the usage of the psycopg2
``UNICODE`` extension and to instead utilize it's own unicode encode/decode
-services, which are normally reserved only for those DBAPIs that don't
-fully support unicode directly. Passing ``use_native_unicode=False``
+services, which are normally reserved only for those DBAPIs that don't
+fully support unicode directly. Passing ``use_native_unicode=False``
to :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``.
-SQLAlchemy will instead encode data itself into Python bytestrings on the way
+SQLAlchemy will instead encode data itself into Python bytestrings on the way
in and coerce from bytes on the way back,
-using the value of the :func:`.create_engine` ``encoding`` parameter, which
+using the value of the :func:`.create_engine` ``encoding`` parameter, which
defaults to ``utf-8``.
SQLAlchemy's own unicode encode/decode functionality is steadily becoming
-obsolete as more DBAPIs support unicode fully along with the approach of
+obsolete as more DBAPIs support unicode fully along with the approach of
Python 3; in modern usage psycopg2 should be relied upon to handle unicode.
Transactions
@@ -132,7 +132,7 @@ at the API level what level should be used.
NOTICE logging
---------------
-The psycopg2 dialect will log Postgresql NOTICE messages via the
+The psycopg2 dialect will log Postgresql NOTICE messages via the
``sqlalchemy.dialects.postgresql`` logger::
import logging
@@ -220,8 +220,8 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
(self.compiled and isinstance(self.compiled.statement, expression.Selectable) \
or \
(
- (not self.compiled or
- isinstance(self.compiled.statement, expression.TextClause))
+ (not self.compiled or
+ isinstance(self.compiled.statement, expression.TextClause))
and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement))
)
)
@@ -249,7 +249,7 @@ class PGExecutionContext_psycopg2(PGExecutionContext):
def _log_notices(self, cursor):
for notice in cursor.connection.notices:
- # NOTICE messages have a
+ # NOTICE messages have a
# newline character at the end
logger.info(notice.rstrip())
@@ -291,7 +291,7 @@ class PGDialect_psycopg2(PGDialect):
}
)
- def __init__(self, server_side_cursors=False, use_native_unicode=True,
+ def __init__(self, server_side_cursors=False, use_native_unicode=True,
client_encoding=None, **kwargs):
PGDialect.__init__(self, **kwargs)
self.server_side_cursors = server_side_cursors
@@ -299,12 +299,12 @@ class PGDialect_psycopg2(PGDialect):
self.supports_unicode_binds = use_native_unicode
self.client_encoding = client_encoding
if self.dbapi and hasattr(self.dbapi, '__version__'):
- m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
+ m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
self.dbapi.__version__)
if m:
self.psycopg2_version = tuple(
- int(x)
- for x in m.group(1, 2, 3)
+ int(x)
+ for x in m.group(1, 2, 3)
if x is not None)
@classmethod
@@ -316,8 +316,8 @@ class PGDialect_psycopg2(PGDialect):
def _isolation_lookup(self):
extensions = __import__('psycopg2.extensions').extensions
return {
- 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED,
- 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
+ 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED,
+ 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
'REPEATABLE READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ,
'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE
}
@@ -328,9 +328,9 @@ class PGDialect_psycopg2(PGDialect):
except KeyError:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
+ "Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
connection.set_isolation_level(level)
@@ -370,8 +370,8 @@ class PGDialect_psycopg2(PGDialect):
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.OperationalError):
# these error messages from libpq: interfaces/libpq/fe-misc.c.
- # TODO: these are sent through gettext in libpq and we can't
- # check within other locales - consider using connection.closed
+ # TODO: these are sent through gettext in libpq and we can't
+ # check within other locales - consider using connection.closed
return 'closed the connection' in str(e) or \
'connection not open' in str(e) or \
'could not receive data from server' in str(e)
@@ -380,7 +380,7 @@ class PGDialect_psycopg2(PGDialect):
return 'connection already closed' in str(e) or \
'cursor already closed' in str(e)
elif isinstance(e, self.dbapi.ProgrammingError):
- # not sure where this path is originally from, it may
+ # not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
return "losed the connection unexpectedly" in str(e)
else:
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index affef974c..f551bff99 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,8 +1,8 @@
# sybase/base.py
# Copyright (C) 2010-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
-# copyright (C) 2007 Fisch Asset Management
-# AG http://www.fam.ch, with coding by Alexander Houben
+# copyright (C) 2007 Fisch Asset Management
+# AG http://www.fam.ch, with coding by Alexander Houben
# alexander.houben@thor-solutions.ch
#
# This module is part of SQLAlchemy and is released under
@@ -13,7 +13,7 @@
.. note::
The Sybase dialect functions on current SQLAlchemy versions
- but is not regularly tested, and may have many issues and
+ but is not regularly tested, and may have many issues and
caveats not currently handled. In particular, the table
and database reflection features are not implemented.
@@ -130,7 +130,7 @@ class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
class IMAGE(sqltypes.LargeBinary):
__visit_name__ = 'IMAGE'
-
+
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_):
@@ -224,12 +224,12 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
self._enable_identity_insert = False
if self._enable_identity_insert:
- self.cursor.execute("SET IDENTITY_INSERT %s ON" %
+ self.cursor.execute("SET IDENTITY_INSERT %s ON" %
self.dialect.identifier_preparer.format_table(tbl))
if self.isddl:
# TODO: to enhance this, we can detect "ddl in tran" on the
- # database settings. this error message should be improved to
+ # database settings. this error message should be improved to
# include a note about that.
if not self.should_autocommit:
raise exc.InvalidRequestError(
@@ -240,7 +240,7 @@ class SybaseExecutionContext(default.DefaultExecutionContext):
"AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
self.set_ddl_autocommit(
- self.root_connection.connection.connection,
+ self.root_connection.connection.connection,
True)
@@ -304,7 +304,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
field, self.process(extract.expr, **kw))
def for_update_clause(self, select):
- # "FOR UPDATE" is only allowed on "DECLARE CURSOR"
+ # "FOR UPDATE" is only allowed on "DECLARE CURSOR"
# which SQLAlchemy doesn't use
return ''
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 02f9105ad..8fd7d90ee 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -17,8 +17,8 @@ from . import event, util
class PoolListener(object):
"""Hooks into the lifecycle of connections in a :class:`.Pool`.
- .. note::
-
+ .. note::
+
:class:`.PoolListener` is deprecated. Please
refer to :class:`.PoolEvents`.
@@ -27,7 +27,7 @@ class PoolListener(object):
class MyListener(PoolListener):
def connect(self, dbapi_con, con_record):
'''perform connect operations'''
- # etc.
+ # etc.
# create a new pool with a listener
p = QueuePool(..., listeners=[MyListener()])
@@ -151,8 +151,8 @@ class PoolListener(object):
class ConnectionProxy(object):
"""Allows interception of statement execution by Connections.
- .. note::
-
+ .. note::
+
:class:`.ConnectionProxy` is deprecated. Please
refer to :class:`.ConnectionEvents`.
@@ -194,7 +194,7 @@ class ConnectionProxy(object):
event.listen(self, 'before_execute', adapt_execute)
- def adapt_cursor_execute(conn, cursor, statement,
+ def adapt_cursor_execute(conn, cursor, statement,
parameters,context, executemany, ):
def execute_wrapper(
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 85fe2f3ac..dd6f2442b 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -4,7 +4,7 @@
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Heuristics related to join conditions as used in
+"""Heuristics related to join conditions as used in
:func:`.relationship`.
Provides the :class:`.JoinCondition` object, which encapsulates
@@ -15,7 +15,7 @@ and `secondaryjoin` aspects of :func:`.relationship`.
from .. import sql, util, exc as sa_exc, schema
from ..sql.util import (
- ClauseAdapter,
+ ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
_deep_deannotate, find_tables
)
@@ -23,24 +23,24 @@ from ..sql import operators, expression, visitors
from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
def remote(expr):
- """Annotate a portion of a primaryjoin expression
+ """Annotate a portion of a primaryjoin expression
with a 'remote' annotation.
-
+
:func:`.remote`, :func:`.foreign`, and :func:`.remote_foreign`
- are intended to be used with
- :func:`.relationship` in conjunction with a
+ are intended to be used with
+ :func:`.relationship` in conjunction with a
``primaryjoin`` expression which contains
indirect equality conditions, meaning the comparison
of mapped columns involves extraneous SQL functions
- such as :func:`.cast`. They can also be used in
+ such as :func:`.cast`. They can also be used in
lieu of the ``foreign_keys`` and ``remote_side``
- parameters to :func:`.relationship`, if a
+ parameters to :func:`.relationship`, if a
primaryjoin expression is also being sent explicitly.
-
+
Below, a mapped class ``DNSRecord`` relates to the
``DHCPHost`` class using a primaryjoin that casts
the ``content`` column to a string. The :func:`.foreign`
- and :func:`.remote` annotation functions are used
+ and :func:`.remote` annotation functions are used
to mark with full accuracy those mapped columns that
are significant to the :func:`.relationship`, in terms
of how they are joined::
@@ -48,30 +48,30 @@ def remote(expr):
from sqlalchemy import cast, String
from sqlalchemy.orm import remote, foreign
from sqlalchemy.dialects.postgresql import INET
-
+
class DNSRecord(Base):
__tablename__ = 'dns'
-
+
id = Column(Integer, primary_key=True)
content = Column(INET)
dhcphost = relationship(DHCPHost,
- primaryjoin=cast(foreign(content), String) ==
+ primaryjoin=cast(foreign(content), String) ==
remote(DHCPHost.ip_address)
)
.. versionadded:: 0.8
See also:
-
+
* :func:`.foreign`
-
+
* :func:`.remote_foreign`
-
+
"""
return _annotate_columns(expression._clause_element_as_expr(expr), {"remote":True})
def foreign(expr):
- """Annotate a portion of a primaryjoin expression
+ """Annotate a portion of a primaryjoin expression
with a 'foreign' annotation.
See the example at :func:`.remote`.
@@ -83,16 +83,16 @@ def foreign(expr):
return _annotate_columns(expression._clause_element_as_expr(expr), {"foreign":True})
def remote_foreign(expr):
- """Annotate a portion of a primaryjoin expression
+ """Annotate a portion of a primaryjoin expression
with a 'remote' and 'foreign' annotation.
-
+
See the example at :func:`.remote`.
.. versionadded:: 0.8
"""
- return _annotate_columns(expr, {"foreign":True,
+ return _annotate_columns(expr, {"foreign":True,
"remote":True})
def _annotate_columns(element, annotations):
@@ -107,8 +107,8 @@ def _annotate_columns(element, annotations):
return element
class JoinCondition(object):
- def __init__(self,
- parent_selectable,
+ def __init__(self,
+ parent_selectable,
child_selectable,
parent_local_selectable,
child_local_selectable,
@@ -197,7 +197,7 @@ class JoinCondition(object):
if self.secondaryjoin is None:
self.secondaryjoin = \
join_condition(
- self.child_selectable,
+ self.child_selectable,
self.secondary,
a_subset=self.child_local_selectable,
consider_as_foreign_keys=\
@@ -206,8 +206,8 @@ class JoinCondition(object):
if self.primaryjoin is None:
self.primaryjoin = \
join_condition(
- self.parent_selectable,
- self.secondary,
+ self.parent_selectable,
+ self.secondary,
a_subset=self.parent_local_selectable,
consider_as_foreign_keys=\
self.consider_as_foreign_keys or None
@@ -216,8 +216,8 @@ class JoinCondition(object):
if self.primaryjoin is None:
self.primaryjoin = \
join_condition(
- self.parent_selectable,
- self.child_selectable,
+ self.parent_selectable,
+ self.child_selectable,
a_subset=self.parent_local_selectable,
consider_as_foreign_keys=\
self.consider_as_foreign_keys or None
@@ -268,14 +268,14 @@ class JoinCondition(object):
@util.memoized_property
def primaryjoin_reverse_remote(self):
- """Return the primaryjoin condition suitable for the
- "reverse" direction.
-
+ """Return the primaryjoin condition suitable for the
+ "reverse" direction.
+
If the primaryjoin was delivered here with pre-existing
"remote" annotations, the local/remote annotations
are reversed. Otherwise, the local/remote annotations
are removed.
-
+
"""
if self._has_remote_annotations:
def replace(element):
@@ -294,7 +294,7 @@ class JoinCondition(object):
else:
if self._has_foreign_annotations:
# TODO: coverage
- return _deep_deannotate(self.primaryjoin,
+ return _deep_deannotate(self.primaryjoin,
values=("local", "remote"))
else:
return _deep_deannotate(self.primaryjoin)
@@ -318,7 +318,7 @@ class JoinCondition(object):
"""Annotate the primaryjoin and secondaryjoin
structures with 'foreign' annotations marking columns
considered as foreign.
-
+
"""
if self._has_foreign_annotations:
return
@@ -394,7 +394,7 @@ class JoinCondition(object):
def _refers_to_parent_table(self):
"""Return True if the join condition contains column
comparisons where both columns are in both tables.
-
+
"""
pt = self.parent_selectable
mt = self.child_selectable
@@ -430,7 +430,7 @@ class JoinCondition(object):
"""Annotate the primaryjoin and secondaryjoin
structures with 'remote' annotations marking columns
considered as part of the 'remote' side.
-
+
"""
if self._has_remote_annotations:
return
@@ -449,7 +449,7 @@ class JoinCondition(object):
def _annotate_remote_secondary(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
when 'secondary' is present.
-
+
"""
def repl(element):
if self.secondary.c.contains_column(element):
@@ -462,7 +462,7 @@ class JoinCondition(object):
def _annotate_selfref(self, fn):
"""annotate 'remote' in primaryjoin, secondaryjoin
when the relationship is detected as self-referential.
-
+
"""
def visit_binary(binary):
equated = binary.left.compare(binary.right)
@@ -479,14 +479,14 @@ class JoinCondition(object):
self._warn_non_column_elements()
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
+ self.primaryjoin, {},
{"binary":visit_binary})
def _annotate_remote_from_args(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
when the 'remote_side' or '_local_remote_pairs'
arguments are used.
-
+
"""
if self._local_remote_pairs:
if self._remote_side:
@@ -510,15 +510,15 @@ class JoinCondition(object):
def _annotate_remote_with_overlap(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
- when the parent/child tables have some set of
+ when the parent/child tables have some set of
tables in common, though is not a fully self-referential
relationship.
-
+
"""
def visit_binary(binary):
- binary.left, binary.right = proc_left_right(binary.left,
+ binary.left, binary.right = proc_left_right(binary.left,
binary.right)
- binary.right, binary.left = proc_left_right(binary.right,
+ binary.right, binary.left = proc_left_right(binary.right,
binary.left)
def proc_left_right(left, right):
if isinstance(left, expression.ColumnClause) and \
@@ -532,14 +532,14 @@ class JoinCondition(object):
return left, right
self.primaryjoin = visitors.cloned_traverse(
- self.primaryjoin, {},
+ self.primaryjoin, {},
{"binary":visit_binary})
def _annotate_remote_distinct_selectables(self):
"""annotate 'remote' in primaryjoin, secondaryjoin
- when the parent/child tables are entirely
+ when the parent/child tables are entirely
separate.
-
+
"""
def repl(element):
if self.child_selectable.c.contains_column(element) and \
@@ -562,21 +562,21 @@ class JoinCondition(object):
)
def _annotate_local(self):
- """Annotate the primaryjoin and secondaryjoin
+ """Annotate the primaryjoin and secondaryjoin
structures with 'local' annotations.
-
- This annotates all column elements found
- simultaneously in the parent table
- and the join condition that don't have a
- 'remote' annotation set up from
+
+ This annotates all column elements found
+ simultaneously in the parent table
+ and the join condition that don't have a
+ 'remote' annotation set up from
_annotate_remote() or user-defined.
-
+
"""
if self._has_annotation(self.primaryjoin, "local"):
return
if self._local_remote_pairs:
- local_side = util.column_set([l for (l, r)
+ local_side = util.column_set([l for (l, r)
in self._local_remote_pairs])
else:
local_side = util.column_set(self.parent_selectable.c)
@@ -602,7 +602,7 @@ class JoinCondition(object):
% (self.prop, ))
def _check_foreign_cols(self, join_condition, primary):
- """Check the foreign key columns collected and emit error
+ """Check the foreign key columns collected and emit error
messages."""
can_sync = False
@@ -622,15 +622,15 @@ class JoinCondition(object):
return
# from here below is just determining the best error message
- # to report. Check for a join condition using any operator
+ # to report. Check for a join condition using any operator
# (not just ==), perhaps they need to turn on "viewonly=True".
if self.support_sync and has_foreign and not can_sync:
err = "Could not locate any simple equality expressions "\
"involving locally mapped foreign key columns for "\
"%s join condition "\
"'%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
+ primary and 'primary' or 'secondary',
+ join_condition,
self.prop
)
err += \
@@ -644,8 +644,8 @@ class JoinCondition(object):
else:
err = "Could not locate any relevant foreign key columns "\
"for %s join condition '%s' on relationship %s." % (
- primary and 'primary' or 'secondary',
- join_condition,
+ primary and 'primary' or 'secondary',
+ join_condition,
self.prop
)
err += \
@@ -656,7 +656,7 @@ class JoinCondition(object):
raise sa_exc.ArgumentError(err)
def _determine_direction(self):
- """Determine if this relationship is one to many, many to one,
+ """Determine if this relationship is one to many, many to one,
many to many.
"""
@@ -713,13 +713,13 @@ class JoinCondition(object):
"nor the child's mapped tables" % self.prop)
def _deannotate_pairs(self, collection):
- """provide deannotation for the various lists of
+ """provide deannotation for the various lists of
pairs, so that using them in hashes doesn't incur
high-overhead __eq__() comparisons against
original columns mapped.
-
+
"""
- return [(x._deannotate(), y._deannotate())
+ return [(x._deannotate(), y._deannotate())
for x, y in collection]
def _setup_pairs(self):
@@ -800,7 +800,7 @@ class JoinCondition(object):
])
- def join_targets(self, source_selectable,
+ def join_targets(self, source_selectable,
dest_selectable,
aliased,
single_crit=None):
@@ -816,10 +816,10 @@ class JoinCondition(object):
# place a barrier on the destination such that
# replacement traversals won't ever dig into it.
- # its internal structure remains fixed
+ # its internal structure remains fixed
# regardless of context.
dest_selectable = _shallow_annotate(
- dest_selectable,
+ dest_selectable,
{'no_replacement_traverse':True})
primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
@@ -827,7 +827,7 @@ class JoinCondition(object):
# adjust the join condition for single table inheritance,
# in the case that the join is to a subclass
- # this is analogous to the
+ # this is analogous to the
# "_adjust_for_single_table_inheritance()" method in Query.
if single_crit is not None:
@@ -901,7 +901,7 @@ class JoinCondition(object):
if self.deannotated_secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
- lazywhere, {}, col_to_bind)
+ lazywhere, {}, col_to_bind)
if self.deannotated_secondaryjoin is not None:
secondaryjoin = self.deannotated_secondaryjoin
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index d524e0c40..fe7e4aab5 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -13,7 +13,7 @@ For more information see the SQLAlchemy documentation on types.
"""
__all__ = [ 'TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR','TEXT', 'Text',
- 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
+ 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', 'SMALLINT',
'INTEGER', 'DATE', 'TIME', 'String', 'Integer', 'SmallInteger',
'BigInteger', 'Numeric', 'Float', 'DateTime', 'Date', 'Time',
@@ -35,7 +35,7 @@ if util.jython:
import array
class AbstractType(Visitable):
- """Base for all types - not needed except for backwards
+ """Base for all types - not needed except for backwards
compatibility."""
class TypeEngine(AbstractType):
@@ -91,15 +91,15 @@ class TypeEngine(AbstractType):
@property
def python_type(self):
"""Return the Python type object expected to be returned
- by instances of this type, if known.
-
+ by instances of this type, if known.
+
Basically, for those types which enforce a return type,
- or are known across the board to do such for all common
+ or are known across the board to do such for all common
DBAPIs (like ``int`` for example), will return that type.
-
+
If a return type is not defined, raises
``NotImplementedError``.
-
+
Note that any type also accommodates NULL in SQL which
means you can also get back ``None`` from any type
in practice.
@@ -108,7 +108,7 @@ class TypeEngine(AbstractType):
raise NotImplementedError()
def with_variant(self, type_, dialect_name):
- """Produce a new type object that will utilize the given
+ """Produce a new type object that will utilize the given
type when applied to the dialect of the given name.
e.g.::
@@ -123,13 +123,13 @@ class TypeEngine(AbstractType):
The construction of :meth:`.TypeEngine.with_variant` is always
from the "fallback" type to that which is dialect specific.
The returned type is an instance of :class:`.Variant`, which
- itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can
+ itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can
be called repeatedly.
:param type_: a :class:`.TypeEngine` that will be selected
as a variant from the originating type, when a dialect
of the given name is in use.
- :param dialect_name: base name of the dialect which uses
+ :param dialect_name: base name of the dialect which uses
this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
.. versionadded:: 0.7.2
@@ -190,7 +190,7 @@ class TypeEngine(AbstractType):
return rp
def _dialect_info(self, dialect):
- """Return a dialect-specific registry which
+ """Return a dialect-specific registry which
caches a dialect-specific implementation, bind processing
function, and one or more result processing functions."""
@@ -209,10 +209,10 @@ class TypeEngine(AbstractType):
return dialect.type_descriptor(self)
def adapt(self, cls, **kw):
- """Produce an "adapted" form of this type, given an "impl" class
- to work with.
+ """Produce an "adapted" form of this type, given an "impl" class
+ to work with.
- This method is used internally to associate generic
+ This method is used internally to associate generic
types with "implementation" types that are specific to a particular
dialect.
"""
@@ -225,7 +225,7 @@ class TypeEngine(AbstractType):
to return a type which the value should be coerced into.
The default behavior here is conservative; if the right-hand
- side is already coerced into a SQL type based on its
+ side is already coerced into a SQL type based on its
Python type, it is usually left alone.
End-user functionality extension here should generally be via
@@ -335,7 +335,7 @@ class UserDefinedType(TypeEngine):
def adapt_operator(self, op):
"""A hook which allows the given operator to be adapted
- to something new.
+ to something new.
See also UserDefinedType._adapt_expression(), an as-yet-
semi-public method with greater capability in this regard.
@@ -348,7 +348,7 @@ class TypeDecorator(TypeEngine):
to an existing type.
This method is preferred to direct subclassing of SQLAlchemy's
- built-in types as it ensures that all required functionality of
+ built-in types as it ensures that all required functionality of
the underlying type is kept in place.
Typical usage::
@@ -385,9 +385,9 @@ class TypeDecorator(TypeEngine):
mytable.c.somecol + datetime.date(2009, 5, 15)
- Above, if "somecol" is an ``Integer`` variant, it makes sense that
+ Above, if "somecol" is an ``Integer`` variant, it makes sense that
we're doing date arithmetic, where above is usually interpreted
- by databases as adding a number of days to the given date.
+ by databases as adding a number of days to the given date.
The expression system does the right thing by not attempting to
coerce the "date()" value into an integer-oriented bind parameter.
@@ -429,14 +429,14 @@ class TypeDecorator(TypeEngine):
def __init__(self, *args, **kwargs):
"""Construct a :class:`.TypeDecorator`.
- Arguments sent here are passed to the constructor
+ Arguments sent here are passed to the constructor
of the class assigned to the ``impl`` class level attribute,
assuming the ``impl`` is a callable, and the resulting
object is assigned to the ``self.impl`` instance attribute
(thus overriding the class attribute of the same name).
-
+
If the class level ``impl`` is not a callable (the unusual case),
- it will be assigned to the same instance attribute 'as-is',
+ it will be assigned to the same instance attribute 'as-is',
ignoring those arguments passed to the constructor.
Subclasses can override this to customize the generation
@@ -503,7 +503,7 @@ class TypeDecorator(TypeEngine):
This is an end-user override hook that can be used to provide
differing types depending on the given dialect. It is used
- by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
+ by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
to help determine what type should ultimately be returned
for a given :class:`.TypeDecorator`.
@@ -522,11 +522,11 @@ class TypeDecorator(TypeEngine):
Subclasses override this method to return the
value that should be passed along to the underlying
- :class:`.TypeEngine` object, and from there to the
+ :class:`.TypeEngine` object, and from there to the
DBAPI ``execute()`` method.
The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
+ behavior, such as transforming or serializing data.
This could also be used as a hook for validating logic.
This operation should be designed with the reverse operation
@@ -554,7 +554,7 @@ class TypeDecorator(TypeEngine):
from the DBAPI cursor method ``fetchone()`` or similar.
The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
+ behavior, such as transforming or serializing data.
This could also be used as a hook for validating logic.
:param value: Data to operate upon, of any type expected by
@@ -569,12 +569,12 @@ class TypeDecorator(TypeEngine):
raise NotImplementedError()
def bind_processor(self, dialect):
- """Provide a bound value processing function for the
+ """Provide a bound value processing function for the
given :class:`.Dialect`.
- This is the method that fulfills the :class:`.TypeEngine`
+ This is the method that fulfills the :class:`.TypeEngine`
contract for bound value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
+ will wrap a user-defined implementation of
:meth:`process_bind_param` here.
User-defined code can override this method directly,
@@ -606,9 +606,9 @@ class TypeDecorator(TypeEngine):
def result_processor(self, dialect, coltype):
"""Provide a result value processing function for the given :class:`.Dialect`.
- This is the method that fulfills the :class:`.TypeEngine`
+ This is the method that fulfills the :class:`.TypeEngine`
contract for result value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
+ will wrap a user-defined implementation of
:meth:`process_result_value` here.
User-defined code can override this method directly,
@@ -643,7 +643,7 @@ class TypeDecorator(TypeEngine):
"""Suggest a type for a 'coerced' Python value in an expression.
By default, returns self. This method is called by
- the expression system when an object using this type is
+ the expression system when an object using this type is
on the left or right side of an expression against a plain Python
object which does not yet have a SQLAlchemy type assigned::
@@ -665,7 +665,7 @@ class TypeDecorator(TypeEngine):
def copy(self):
"""Produce a copy of this :class:`.TypeDecorator` instance.
- This is a shallow copy and is provided to fulfill part of
+ This is a shallow copy and is provided to fulfill part of
the :class:`.TypeEngine` contract. It usually does not
need to be overridden unless the user-defined :class:`.TypeDecorator`
has local state that should be deep-copied.
@@ -679,7 +679,7 @@ class TypeDecorator(TypeEngine):
def get_dbapi_type(self, dbapi):
"""Return the DBAPI type object represented by this :class:`.TypeDecorator`.
- By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
+ By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
underlying "impl".
"""
return self.impl.get_dbapi_type(dbapi)
@@ -687,7 +687,7 @@ class TypeDecorator(TypeEngine):
def compare_values(self, x, y):
"""Given two values, compare them for equality.
- By default this calls upon :meth:`.TypeEngine.compare_values`
+ By default this calls upon :meth:`.TypeEngine.compare_values`
of the underlying "impl", which in turn usually
uses the Python equals operator ``==``.
@@ -712,21 +712,21 @@ class TypeDecorator(TypeEngine):
class Variant(TypeDecorator):
"""A wrapping type that selects among a variety of
implementations based on dialect in use.
-
+
The :class:`.Variant` type is typically constructed
using the :meth:`.TypeEngine.with_variant` method.
-
+
.. versionadded:: 0.7.2
-
+
"""
def __init__(self, base, mapping):
"""Construct a new :class:`.Variant`.
-
+
:param base: the base 'fallback' type
- :param mapping: dictionary of string dialect names to :class:`.TypeEngine`
+ :param mapping: dictionary of string dialect names to :class:`.TypeEngine`
instances.
-
+
"""
self.impl = base
self.mapping = mapping
@@ -739,13 +739,13 @@ class Variant(TypeDecorator):
def with_variant(self, type_, dialect_name):
"""Return a new :class:`.Variant` which adds the given
- type + dialect name to the mapping, in addition to the
+ type + dialect name to the mapping, in addition to the
mapping present in this :class:`.Variant`.
-
+
:param type_: a :class:`.TypeEngine` that will be selected
as a variant from the originating type, when a dialect
of the given name is in use.
- :param dialect_name: base name of the dialect which uses
+ :param dialect_name: base name of the dialect which uses
this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
"""
@@ -861,7 +861,7 @@ class String(Concatenable, TypeEngine):
__visit_name__ = 'string'
- def __init__(self, length=None, convert_unicode=False,
+ def __init__(self, length=None, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False
):
@@ -876,23 +876,23 @@ class String(Concatenable, TypeEngine):
with no length is included. Whether the value is
interpreted as bytes or characters is database specific.
- :param convert_unicode: When set to ``True``, the
+ :param convert_unicode: When set to ``True``, the
:class:`.String` type will assume that
input is to be passed as Python ``unicode`` objects,
and results returned as Python ``unicode`` objects.
If the DBAPI in use does not support Python unicode
(which is fewer and fewer these days), SQLAlchemy
- will encode/decode the value, using the
- value of the ``encoding`` parameter passed to
+ will encode/decode the value, using the
+ value of the ``encoding`` parameter passed to
:func:`.create_engine` as the encoding.
-
+
When using a DBAPI that natively supports Python
- unicode objects, this flag generally does not
+ unicode objects, this flag generally does not
need to be set. For columns that are explicitly
intended to store non-ASCII data, the :class:`.Unicode`
- or :class:`UnicodeText`
+ or :class:`UnicodeText`
types should be used regardless, which feature
- the same behavior of ``convert_unicode`` but
+ the same behavior of ``convert_unicode`` but
also indicate an underlying column type that
directly supports unicode, such as ``NVARCHAR``.
@@ -903,11 +903,11 @@ class String(Concatenable, TypeEngine):
cause SQLAlchemy's encode/decode services to be
used unconditionally.
- :param assert_unicode: Deprecated. A warning is emitted
- when a non-``unicode`` object is passed to the
- :class:`.Unicode` subtype of :class:`.String`,
- or the :class:`.UnicodeText` subtype of :class:`.Text`.
- See :class:`.Unicode` for information on how to
+ :param assert_unicode: Deprecated. A warning is emitted
+ when a non-``unicode`` object is passed to the
+ :class:`.Unicode` subtype of :class:`.String`,
+ or the :class:`.UnicodeText` subtype of :class:`.Text`.
+ See :class:`.Unicode` for information on how to
control this warning.
:param unicode_error: Optional, a method to use to handle Unicode
@@ -974,7 +974,7 @@ class String(Concatenable, TypeEngine):
def result_processor(self, dialect, coltype):
wants_unicode = self.convert_unicode or dialect.convert_unicode
needs_convert = wants_unicode and \
- (dialect.returns_unicode_strings is not True or
+ (dialect.returns_unicode_strings is not True or
self.convert_unicode == 'force')
if needs_convert:
@@ -1026,36 +1026,36 @@ class Unicode(String):
that assumes input and output as Python ``unicode`` data,
and in that regard is equivalent to the usage of the
``convert_unicode`` flag with the :class:`.String` type.
- However, unlike plain :class:`.String`, it also implies an
+ However, unlike plain :class:`.String`, it also implies an
underlying column type that is explicitly supporting of non-ASCII
data, such as ``NVARCHAR`` on Oracle and SQL Server.
- This can impact the output of ``CREATE TABLE`` statements
- and ``CAST`` functions at the dialect level, and can
+ This can impact the output of ``CREATE TABLE`` statements
+ and ``CAST`` functions at the dialect level, and can
also affect the handling of bound parameters in some
specific DBAPI scenarios.
-
+
The encoding used by the :class:`.Unicode` type is usually
- determined by the DBAPI itself; most modern DBAPIs
+ determined by the DBAPI itself; most modern DBAPIs
feature support for Python ``unicode`` objects as bound
values and result set values, and the encoding should
be configured as detailed in the notes for the target
DBAPI in the :ref:`dialect_toplevel` section.
-
+
For those DBAPIs which do not support, or are not configured
to accommodate Python ``unicode`` objects
directly, SQLAlchemy does the encoding and decoding
- outside of the DBAPI. The encoding in this scenario
- is determined by the ``encoding`` flag passed to
+ outside of the DBAPI. The encoding in this scenario
+ is determined by the ``encoding`` flag passed to
:func:`.create_engine`.
- When using the :class:`.Unicode` type, it is only appropriate
+ When using the :class:`.Unicode` type, it is only appropriate
to pass Python ``unicode`` objects, and not plain ``str``.
If a plain ``str`` is passed under Python 2, a warning
- is emitted. If you notice your application emitting these warnings but
- you're not sure of the source of them, the Python
- ``warnings`` filter, documented at
- http://docs.python.org/library/warnings.html,
- can be used to turn these warnings into exceptions
+ is emitted. If you notice your application emitting these warnings but
+ you're not sure of the source of them, the Python
+ ``warnings`` filter, documented at
+ http://docs.python.org/library/warnings.html,
+ can be used to turn these warnings into exceptions
which will illustrate a stack trace::
import warnings
@@ -1063,7 +1063,7 @@ class Unicode(String):
For an application that wishes to pass plain bytestrings
and Python ``unicode`` objects to the ``Unicode`` type
- equally, the bytestrings must first be decoded into
+ equally, the bytestrings must first be decoded into
unicode. The recipe at :ref:`coerce_to_unicode` illustrates
how this is done.
@@ -1079,7 +1079,7 @@ class Unicode(String):
def __init__(self, length=None, **kwargs):
"""
Create a :class:`.Unicode` object.
-
+
Parameters are the same as that of :class:`.String`,
with the exception that ``convert_unicode``
defaults to ``True``.
@@ -1095,8 +1095,8 @@ class UnicodeText(Text):
See :class:`.Unicode` for details on the unicode
behavior of this object.
- Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
- unicode-capable type being used on the backend, such as
+ Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
+ unicode-capable type being used on the backend, such as
``NCLOB``, ``NTEXT``.
"""
@@ -1189,8 +1189,8 @@ class Numeric(_DateAffinity, TypeEngine):
``decimal.Decimal`` objects by default, applying
conversion as needed.
- .. note::
-
+ .. note::
+
The `cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library
is a high performing alternative to Python's built-in
``decimal.Decimal`` type, which performs very poorly in high volume
@@ -1209,9 +1209,9 @@ class Numeric(_DateAffinity, TypeEngine):
import cdecimal
sys.modules["decimal"] = cdecimal
- While the global patch is a little ugly, it's particularly
- important to use just one decimal library at a time since
- Python Decimal and cdecimal Decimal objects
+ While the global patch is a little ugly, it's particularly
+ important to use just one decimal library at a time since
+ Python Decimal and cdecimal Decimal objects
are not currently compatible *with each other*::
>>> import cdecimal
@@ -1219,7 +1219,7 @@ class Numeric(_DateAffinity, TypeEngine):
>>> decimal.Decimal("10") == cdecimal.Decimal("10")
False
- SQLAlchemy will provide more natural support of
+ SQLAlchemy will provide more natural support of
cdecimal if and when it becomes a standard part of Python
installations and is supported by all DBAPIs.
@@ -1246,15 +1246,15 @@ class Numeric(_DateAffinity, TypeEngine):
that the asdecimal setting is apppropriate for the DBAPI in use -
when Numeric applies a conversion from Decimal->float or float->
Decimal, this conversion incurs an additional performance overhead
- for all result columns received.
+ for all result columns received.
- DBAPIs that return Decimal natively (e.g. psycopg2) will have
+ DBAPIs that return Decimal natively (e.g. psycopg2) will have
better accuracy and higher performance with a setting of ``True``,
as the native translation to Decimal reduces the amount of floating-
point issues at play, and the Numeric type itself doesn't need
- to apply any further conversions. However, another DBAPI which
- returns floats natively *will* incur an additional conversion
- overhead, and is still subject to floating point data loss - in
+ to apply any further conversions. However, another DBAPI which
+ returns floats natively *will* incur an additional conversion
+ overhead, and is still subject to floating point data loss - in
which case ``asdecimal=False`` will at least remove the extra
conversion overhead.
@@ -1358,10 +1358,10 @@ class Float(Numeric):
results in floating point conversion.
:param \**kwargs: deprecated. Additional arguments here are ignored
- by the default :class:`.Float` type. For database specific
- floats that support additional arguments, see that dialect's
+ by the default :class:`.Float` type. For database specific
+ floats that support additional arguments, see that dialect's
documentation for details, such as :class:`sqlalchemy.dialects.mysql.FLOAT`.
-
+
"""
self.precision = precision
self.asdecimal = asdecimal
@@ -1414,12 +1414,12 @@ class DateTime(_DateAffinity, TypeEngine):
def __init__(self, timezone=False):
"""Construct a new :class:`.DateTime`.
-
+
:param timezone: boolean. If True, and supported by the
backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends
that don't support timezone aware timestamps, has no
effect.
-
+
"""
self.timezone = timezone
@@ -1473,7 +1473,7 @@ class Date(_DateAffinity,TypeEngine):
Interval:DateTime,
# date - datetime = interval,
- # this one is not in the PG docs
+ # this one is not in the PG docs
# but works
DateTime:Interval,
},
@@ -1535,7 +1535,7 @@ class _Binary(TypeEngine):
return None
return process
- # Python 3 has native bytes() type
+ # Python 3 has native bytes() type
# both sqlite3 and pg8000 seem to return it
# (i.e. and not 'memoryview')
# Py2K
@@ -1606,7 +1606,7 @@ class SchemaType(events.SchemaEventTarget):
as well as types that are complimented by table or schema level
constraints, triggers, and other rules.
- :class:`.SchemaType` classes can also be targets for the
+ :class:`.SchemaType` classes can also be targets for the
:meth:`.DDLEvents.before_parent_attach` and :meth:`.DDLEvents.after_parent_attach`
events, where the events fire off surrounding the association of
the type object with a parent :class:`.Column`.
@@ -1704,17 +1704,17 @@ class SchemaType(events.SchemaEventTarget):
class Enum(String, SchemaType):
"""Generic Enum Type.
- The Enum type provides a set of possible string values which the
+ The Enum type provides a set of possible string values which the
column is constrained towards.
- By default, uses the backend's native ENUM type if available,
+ By default, uses the backend's native ENUM type if available,
else uses VARCHAR + a CHECK constraint.
-
+
See also:
-
+
:class:`~.postgresql.ENUM` - PostgreSQL-specific type,
which has additional functionality.
-
+
"""
__visit_name__ = 'enum'
@@ -1778,9 +1778,9 @@ class Enum(String, SchemaType):
length =max(len(x) for x in self.enums)
else:
length = 0
- String.__init__(self,
+ String.__init__(self,
length =length,
- convert_unicode=convert_unicode,
+ convert_unicode=convert_unicode,
)
SchemaType.__init__(self, **kw)
@@ -1803,9 +1803,9 @@ class Enum(String, SchemaType):
def adapt(self, impltype, **kw):
if issubclass(impltype, Enum):
- return impltype(name=self.name,
- quote=self.quote,
- schema=self.schema,
+ return impltype(name=self.name,
+ quote=self.quote,
+ schema=self.schema,
metadata=self.metadata,
convert_unicode=self.convert_unicode,
native_enum=self.native_enum,
@@ -1830,7 +1830,7 @@ class PickleType(TypeDecorator):
impl = LargeBinary
- def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
+ def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
pickler=None, comparator=None):
"""
Construct a PickleType.
@@ -1842,7 +1842,7 @@ class PickleType(TypeDecorator):
pickle-compatible ``dumps` and ``loads`` methods.
:param comparator: a 2-arg callable predicate used
- to compare values of this type. If left as ``None``,
+ to compare values of this type. If left as ``None``,
the Python "equals" operator is used to compare values.
"""
@@ -1852,8 +1852,8 @@ class PickleType(TypeDecorator):
super(PickleType, self).__init__()
def __reduce__(self):
- return PickleType, (self.protocol,
- None,
+ return PickleType, (self.protocol,
+ None,
self.comparator)
def bind_processor(self, dialect):
@@ -1908,7 +1908,7 @@ class Boolean(TypeEngine, SchemaType):
def __init__(self, create_constraint=True, name=None):
"""Construct a Boolean.
- :param create_constraint: defaults to True. If the boolean
+ :param create_constraint: defaults to True. If the boolean
is generated as an int/smallint, also create a CHECK constraint
on the table that ensures 1 or 0 as a value.
@@ -1970,22 +1970,22 @@ class Interval(_DateAffinity, TypeDecorator):
impl = DateTime
epoch = dt.datetime.utcfromtimestamp(0)
- def __init__(self, native=True,
- second_precision=None,
+ def __init__(self, native=True,
+ second_precision=None,
day_precision=None):
"""Construct an Interval object.
:param native: when True, use the actual
INTERVAL type provided by the database, if
supported (currently Postgresql, Oracle).
- Otherwise, represent the interval data as
+ Otherwise, represent the interval data as
an epoch value regardless.
:param second_precision: For native interval types
which support a "fractional seconds precision" parameter,
i.e. Oracle and Postgresql
- :param day_precision: for native interval types which
+ :param day_precision: for native interval types which
support a "day precision" parameter, i.e. Oracle.
"""
@@ -1999,8 +1999,8 @@ class Interval(_DateAffinity, TypeDecorator):
return cls._adapt_from_generic_interval(self, **kw)
else:
return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
+ native=self.native,
+ second_precision=self.second_precision,
day_precision=self.day_precision,
**kw)