summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/engine
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2007-02-25 22:44:52 +0000
committerMike Bayer <mike_mp@zzzcomputing.com>2007-02-25 22:44:52 +0000
commit962c22c9eda7d2ab7dc0b41bd1c7a52cf0c9d008 (patch)
treef0ab113c7947c80dfea42d4a1bef52217bf6ed96 /lib/sqlalchemy/engine
parent8fa3becd5fac57bb898a0090bafaac377b60f070 (diff)
downloadsqlalchemy-962c22c9eda7d2ab7dc0b41bd1c7a52cf0c9d008.tar.gz
migrated (most) docstrings to pep-257 format, docstring generator using straight <pre> + trim() func
for now. applies most of [ticket:214], compliemnts of Lele Gaifax
Diffstat (limited to 'lib/sqlalchemy/engine')
-rw-r--r--lib/sqlalchemy/engine/__init__.py163
-rw-r--r--lib/sqlalchemy/engine/base.py748
-rw-r--r--lib/sqlalchemy/engine/default.py112
-rw-r--r--lib/sqlalchemy/engine/strategies.py57
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py69
-rw-r--r--lib/sqlalchemy/engine/url.py74
6 files changed, 837 insertions, 386 deletions
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 220707eb3..c3651c88b 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -10,82 +10,115 @@ from sqlalchemy.engine import strategies
import re
def engine_descriptors():
- """provides a listing of all the database implementations supported. this data
- is provided as a list of dictionaries, where each dictionary contains the following
- key/value pairs:
-
- name : the name of the engine, suitable for use in the create_engine function
-
- description: a plain description of the engine.
-
- arguments : a dictionary describing the name and description of each parameter
- used to connect to this engine's underlying DBAPI.
-
- This function is meant for usage in automated configuration tools that wish to
- query the user for database and connection information.
+ """Provide a listing of all the database implementations supported.
+
+ This data is provided as a list of dictionaries, where each
+ dictionary contains the following key/value pairs:
+
+ name
+ the name of the engine, suitable for use in the create_engine function
+
+ description
+ a plain description of the engine.
+
+ arguments
+ a dictionary describing the name and description of each
+ parameter used to connect to this engine's underlying DBAPI.
+
+ This function is meant for usage in automated configuration tools
+ that wish to query the user for database and connection
+ information.
"""
+
result = []
#for module in sqlalchemy.databases.__all__:
for module in ['sqlite', 'postgres', 'mysql']:
module = getattr(__import__('sqlalchemy.databases.%s' % module).databases, module)
result.append(module.descriptor())
return result
-
+
default_strategy = 'plain'
def create_engine(*args, **kwargs):
- """creates a new Engine instance. Using the given strategy name,
- locates that strategy and invokes its create() method to produce the Engine.
- The strategies themselves are instances of EngineStrategy, and the built in
- ones are present in the sqlalchemy.engine.strategies module. Current implementations
- include "plain" and "threadlocal". The default used by this function is "plain".
-
- "plain" provides support for a Connection object which can be used to execute SQL queries
- with a specific underlying DBAPI connection.
-
- "threadlocal" is similar to "plain" except that it adds support for a thread-local connection and
- transaction context, which allows a group of engine operations to participate using the same
- connection and transaction without the need for explicit passing of a Connection object.
-
- The standard method of specifying the engine is via URL as the first positional
- argument, to indicate the appropriate database dialect and connection arguments, with additional
- keyword arguments sent as options to the dialect and resulting Engine.
-
- The URL is in the form <dialect>://opt1=val1&opt2=val2.
- Where <dialect> is a name such as "mysql", "oracle", "postgres", and the options indicate
- username, password, database, etc. Supported keynames include "username", "user", "password",
- "pw", "db", "database", "host", "filename".
-
- **kwargs represents options to be sent to the Engine itself as well as the components of the Engine,
- including the Dialect, the ConnectionProvider, and the Pool. A list of common options is as follows:
-
- pool=None : an instance of sqlalchemy.pool.DBProxy or sqlalchemy.pool.Pool to be used as the
- underlying source for connections (DBProxy/Pool is described in the previous section). If None,
- a default DBProxy will be created using the engine's own database module with the given
- arguments.
-
- echo=False : if True, the Engine will log all statements as well as a repr() of their
- parameter lists to the engines logger, which defaults to sys.stdout. A Engine instances'
- "echo" data member can be modified at any time to turn logging on and off. If set to the string
- 'debug', result rows will be printed to the standard output as well.
-
- logger=None : a file-like object where logging output can be sent, if echo is set to True.
- This defaults to sys.stdout.
-
- encoding='utf-8' : the encoding to be used when encoding/decoding Unicode strings
-
- convert_unicode=False : True if unicode conversion should be applied to all str types
-
- module=None : used by Oracle and Postgres, this is a reference to a DBAPI2 module to be used
- instead of the engine's default module. For Postgres, the default is psycopg2, or psycopg1 if
- 2 cannot be found. For Oracle, its cx_Oracle. For mysql, MySQLdb.
-
- use_ansi=True : used only by Oracle; when False, the Oracle driver attempts to support a
- particular "quirk" of some Oracle databases, that the LEFT OUTER JOIN SQL syntax is not
- supported, and the "Oracle join" syntax of using <column1>(+)=<column2> must be used
- in order to achieve a LEFT OUTER JOIN. Its advised that the Oracle database be configured to
- have full ANSI support instead of using this feature.
+ """Create a new Engine instance.
+
+ Using the given strategy name, locates that strategy and invokes
+ its create() method to produce the Engine. The strategies
+ themselves are instances of EngineStrategy, and the built in ones
+ are present in the sqlalchemy.engine.strategies module. Current
+ implementations include *plain* and *threadlocal*. The default
+ used by this function is *plain*.
+
+ *plain* provides support for a Connection object which can be used
+ to execute SQL queries with a specific underlying DBAPI connection.
+
+ *threadlocal* is similar to *plain* except that it adds support
+ for a thread-local connection and transaction context, which
+ allows a group of engine operations to participate using the same
+ connection and transaction without the need for explicit passing
+ of a Connection object.
+ The standard method of specifying the engine is via URL as the
+ first positional argument, to indicate the appropriate database
+ dialect and connection arguments, with additional keyword
+ arguments sent as options to the dialect and resulting Engine.
+
+ The URL is in the form ``<dialect>://opt1=val1&opt2=val2``, where
+ ``<dialect>`` is a name such as *mysql*, *oracle*, *postgres*, and the
+ options indicate username, password, database, etc. Supported
+ keynames include `username`, `user`, `password`, `pw`, `db`,
+ `database`, `host`, `filename`.
+
+ `**kwargs` represents options to be sent to the Engine itself as
+ well as the components of the Engine, including the Dialect, the
+ ConnectionProvider, and the Pool. A list of common options is as
+ follows:
+
+ pool
+ Defaults to None: an instance of ``sqlalchemy.pool.DBProxy`` or
+ ``sqlalchemy.pool.Pool`` to be used as the underlying source for
+ connections (DBProxy/Pool is described in the previous
+ section). If None, a default DBProxy will be created using the
+ engine's own database module with the given arguments.
+
+ echo
+ Defaults to False: if True, the Engine will log all statements
+ as well as a repr() of their parameter lists to the engines
+ logger, which defaults to ``sys.stdout``. A Engine instances'
+ `echo` data member can be modified at any time to turn logging
+ on and off. If set to the string 'debug', result rows will be
+ printed to the standard output as well.
+
+ logger
+ Defaults to None: a file-like object where logging output can be
+ sent, if `echo` is set to True. This defaults to
+ ``sys.stdout``.
+
+ encoding
+ Defaults to 'utf-8': the encoding to be used when
+ encoding/decoding Unicode strings.
+
+ convert_unicode
+ Defaults to False: true if unicode conversion should be applied
+ to all str types.
+
+ module
+ Defaults to None: used by Oracle and Postgres, this is a
+ reference to a DBAPI2 module to be used instead of the engine's
+ default module. For Postgres, the default is psycopg2, or
+ psycopg1 if 2 cannot be found. For Oracle, its cx_Oracle. For
+ mysql, MySQLdb.
+
+ use_ansi
+ Defaults to True: used only by Oracle; when False, the Oracle
+ driver attempts to support a particular *quirk* of some Oracle
+ databases, that the ``LEFT OUTER JOIN`` SQL syntax is not
+ supported, and the *Oracle join* syntax of using
+ ``<column1>(+)=<column2>`` must be used in order to achieve a
+ ``LEFT OUTER JOIN``. Its advised that the Oracle database be
+ configured to have full ANSI support instead of using this
+ feature.
"""
+
strategy = kwargs.pop('strategy', default_strategy)
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 1985bcec1..10001e8a3 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,257 +1,431 @@
from sqlalchemy import exceptions, sql, schema, util, types, logging
import StringIO, sys, re
+
class ConnectionProvider(object):
- """defines an interface that returns raw Connection objects (or compatible)."""
+ """Define an interface that returns raw Connection objects (or compatible)."""
+
def get_connection(self):
- """this method should return a Connection or compatible object from a DBAPI which
- also contains a close() method.
- It is not defined what context this connection belongs to. It may be newly connected,
- returned from a pool, part of some other kind of context such as thread-local,
- or can be a fixed member of this object."""
+ """Return a Connection or compatible object from a DBAPI which also contains a close() method.
+
+ It is not defined what context this connection belongs to. It
+ may be newly connected, returned from a pool, part of some
+ other kind of context such as thread-local, or can be a fixed
+ member of this object.
+ """
+
raise NotImplementedError()
+
def dispose(self):
- """releases all resources corresponding to this ConnectionProvider, such
- as any underlying connection pools."""
+ """Release all resources corresponding to this ConnectionProvider.
+
+ This includes any underlying connection pools.
+ """
+
raise NotImplementedError()
+
class Dialect(sql.AbstractDialect):
- """Defines the behavior of a specific database/DBAPI.
+ """Define the behavior of a specific database/DBAPI.
+
+ Any aspect of metadata definition, SQL query generation, execution,
+ result-set handling, or anything else which varies between
+ databases is defined under the general category of the Dialect.
+ The Dialect acts as a factory for other database-specific object
+ implementations including ExecutionContext, Compiled,
+ DefaultGenerator, and TypeEngine.
- Any aspect of metadata defintion, SQL query generation, execution, result-set handling,
- or anything else which varies between databases is defined under the general category of
- the Dialect. The Dialect acts as a factory for other database-specific object implementations
- including ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
-
All Dialects implement the following attributes:
- positional - True if the paramstyle for this Dialect is positional
+ positional
+ True if the paramstyle for this Dialect is positional
- paramstyle - the paramstyle to be used (some DBAPIs support multiple paramstyles)
+ paramstyle
+ The paramstyle to be used (some DBAPIs support multiple paramstyles)
- supports_autoclose_results - usually True; if False, indicates that rows returned by fetchone()
- might not be just plain tuples, and may be "live" proxy objects which still require the cursor
- to be open in order to be read (such as pyPgSQL which has active filehandles for BLOBs). in that
- case, an auto-closing ResultProxy cannot automatically close itself after results are consumed.
+ supports_autoclose_results
+ Usually True; if False, indicates that rows returned by
+ fetchone() might not be just plain tuples, and may be
+ "live" proxy objects which still require the cursor to be open
+ in order to be read (such as pyPgSQL which has active
+ filehandles for BLOBs). In that case, an auto-closing
+ ResultProxy cannot automatically close itself after results are
+ consumed.
- convert_unicode - True if unicode conversion should be applied to all str types
+ convert_unicode
+ True if unicode conversion should be applied to all str types
- encoding - type of encoding to use for unicode, usually defaults to 'utf-8'
+ encoding
+ type of encoding to use for unicode, usually defaults to 'utf-8'
"""
+
def create_connect_args(self, opts):
- """given a dictionary of key-valued connect parameters, returns a tuple
- consisting of a *args/**kwargs suitable to send directly to the dbapi's connect function.
- The connect args will have any number of the following keynames: host, hostname, database, dbanme,
- user,username, password, pw, passwd, filename."""
+ """Build DBAPI compatible connection arguments.
+
+ Given a dictionary of key-valued connect parameters, returns a
+ tuple consisting of a `*args`/`**kwargs` suitable to send directly
+ to the dbapi's connect function. The connect args will have
+ any number of the following keynames: host, hostname,
+ database, dbname, user, username, password, pw, passwd,
+ filename.
+ """
+
raise NotImplementedError()
+
def convert_compiled_params(self, parameters):
- """given a sql.ClauseParameters object, returns an array or dictionary suitable to pass
- directly to this Dialect's DBAPI's execute method."""
+ """Build DBAPI execute arguments from a ClauseParameters.
+
+ Given a sql.ClauseParameters object, returns an array or
+ dictionary suitable to pass directly to this Dialect's DBAPI's
+ execute method.
+ """
+
+ raise NotImplementedError()
+
def type_descriptor(self, typeobj):
- """provides a database-specific TypeEngine object, given the generic object
- which comes from the types module. Subclasses will usually use the adapt_type()
- method in the types module to make this job easy."""
+ """Trasform the type from generic to database-specific.
+
+ Provides a database-specific TypeEngine object, given the
+ generic object which comes from the types module. Subclasses
+ will usually use the adapt_type() method in the types module
+ to make this job easy.
+ """
+
raise NotImplementedError()
+
def oid_column_name(self, column):
- """return the oid column name for this dialect, or None if the dialect cant/wont support OID/ROWID.
-
- the Column instance which represents OID for the query being compiled is passed, so that the dialect
- can inspect the column and its parent selectable to determine if OID/ROWID is not selected for a particular
- selectable (i.e. oracle doesnt support ROWID for UNION, GROUP BY, DISTINCT, etc.)
+ """Return the oid column name for this dialect, or None if the dialect cant/wont support OID/ROWID.
+
+ The Column instance which represents OID for the query being
+ compiled is passed, so that the dialect can inspect the column
+ and its parent selectable to determine if OID/ROWID is not
+ selected for a particular selectable (i.e. oracle doesnt
+ support ROWID for UNION, GROUP BY, DISTINCT, etc.)
"""
+
raise NotImplementedError()
+
def supports_sane_rowcount(self):
- """Provided to indicate when MySQL is being used, which does not have standard behavior
- for the "rowcount" function on a statement handle. """
+ """Indicate whether the dialect properly implements statements rowcount.
+
+ Provided to indicate when MySQL is being used, which does not
+ have standard behavior for the "rowcount" function on a statement handle.
+ """
+
raise NotImplementedError()
+
def schemagenerator(self, engine, proxy, **params):
- """returns a schema.SchemaVisitor instance that can generate schemas, when it is
- invoked to traverse a set of schema objects.
+ """Return a ``schema.SchemaVisitor`` instance that can generate schemas.
- schemagenerator is called via the create() method on Table, Index, and others.
+ `schemagenerator()` is called via the `create()` method on Table,
+ Index, and others.
"""
+
raise NotImplementedError()
+
def schemadropper(self, engine, proxy, **params):
- """returns a schema.SchemaVisitor instance that can drop schemas, when it is
- invoked to traverse a set of schema objects.
+ """Return a ``schema.SchemaVisitor`` instance that can drop schemas.
- schemagenerator is called via the drop() method on Table, Index, and others.
+ `schemadropper()` is called via the `drop()` method on Table,
+ Index, and others.
"""
+
raise NotImplementedError()
+
def defaultrunner(self, engine, proxy, **params):
- """returns a schema.SchemaVisitor instances that can execute defaults."""
+ """Return a ``schema.SchemaVisitor`` instance that can execute defaults."""
+
raise NotImplementedError()
+
def compiler(self, statement, parameters):
- """returns a sql.ClauseVisitor which will produce a string representation of the given
- ClauseElement and parameter dictionary. This object is usually a subclass of
- ansisql.ANSICompiler.
+ """Return a ``sql.ClauseVisitor`` able to transform a ``ClauseElement`` into a string.
+
+ The returned object is usually a subclass of
+ ansisql.ANSICompiler, and will produce a string representation
+ of the given ClauseElement and `parameters` dictionary.
+
+ `compiler()` is called within the context of the compile() method.
+ """
- compiler is called within the context of the compile() method."""
raise NotImplementedError()
+
def reflecttable(self, connection, table):
- """given an Connection and a Table object, reflects its columns and properties from the database."""
+ """Load table description from the database.
+
+ Given a ``Connection`` and a ``Table`` object, reflect its
+ columns and properties from the database.
+ """
+
raise NotImplementedError()
+
def has_table(self, connection, table_name, schema=None):
+ """Check the existence of a particular table in the database.
+
+ Given a ``Connection`` object and a `table_name`, return True
+ if the given table (possibly within the specified `schema`)
+ exists in the database, False otherwise.
+ """
+
raise NotImplementedError()
+
def has_sequence(self, connection, sequence_name):
+ """Check the existence of a particular sequence in the database.
+
+ Given a ``Connection`` object and a `sequence_name`, return
+ True if the given sequence exists in the database, False
+ otherwise.
+ """
+
raise NotImplementedError()
+
def dbapi(self):
- """subclasses override this method to provide the DBAPI module used to establish
- connections."""
+ """Establish a connection to the database.
+
+ Subclasses override this method to provide the DBAPI module
+ used to establish connections.
+ """
+
raise NotImplementedError()
+
def get_default_schema_name(self, connection):
- """returns the currently selected schema given an connection"""
+ """Return the currently selected schema given a connection"""
+
raise NotImplementedError()
+
def execution_context(self):
- """returns a new ExecutionContext object."""
+ """Return a new ExecutionContext object."""
+
raise NotImplementedError()
+
def do_begin(self, connection):
- """provides an implementation of connection.begin()"""
+ """Provide an implementation of connection.begin()."""
+
raise NotImplementedError()
+
def do_rollback(self, connection):
- """provides an implementation of connection.rollback()"""
+ """Provide an implementation of connection.rollback()."""
+
raise NotImplementedError()
+
def do_commit(self, connection):
- """provides an implementation of connection.commit()"""
+ """Provide an implementation of connection.commit()"""
+
raise NotImplementedError()
+
def do_executemany(self, cursor, statement, parameters):
+ """Execute a single SQL statement looping over a sequence of parameters."""
+
raise NotImplementedError()
+
def do_execute(self, cursor, statement, parameters):
+ """Execute a single SQL statement with given parameters."""
+
raise NotImplementedError()
+
def create_cursor(self, connection):
- """return a new cursor generated from the given connection"""
+ """Return a new cursor generated from the given connection."""
+
raise NotImplementedError()
+
def create_result_proxy_args(self, connection, cursor):
- """returns a dictionary of arguments that should be passed to ResultProxy()."""
+ """Return a dictionary of arguments that should be passed to ResultProxy()."""
+
raise NotImplementedError()
+
def compile(self, clauseelement, parameters=None):
- """compile the given ClauseElement using this Dialect.
-
- a convenience method which simply flips around the compile() call
- on ClauseElement."""
+ """Compile the given ClauseElement using this Dialect.
+
+ A convenience method which simply flips around the compile()
+ call on ClauseElement.
+ """
+
return clauseelement.compile(dialect=self, parameters=parameters)
-
+
+
class ExecutionContext(object):
- """a messenger object for a Dialect that corresponds to a single execution. The Dialect
- should provide an ExecutionContext via the create_execution_context() method.
- The pre_exec and post_exec methods will be called for compiled statements, afterwhich
- it is expected that the various methods last_inserted_ids, last_inserted_params, etc.
- will contain appropriate values, if applicable."""
+ """A messenger object for a Dialect that corresponds to a single execution.
+
+ The Dialect should provide an ExecutionContext via the
+ create_execution_context() method. The `pre_exec` and `post_exec`
+ methods will be called for compiled statements, afterwhich it is
+ expected that the various methods `last_inserted_ids`,
+ `last_inserted_params`, etc. will contain appropriate values, if
+ applicable.
+ """
+
def pre_exec(self, engine, proxy, compiled, parameters):
- """called before an execution of a compiled statement. proxy is a callable that
- takes a string statement and a bind parameter list/dictionary."""
+ """Called before an execution of a compiled statement.
+
+ `proxy` is a callable that takes a string statement and a bind
+ parameter list/dictionary.
+ """
+
raise NotImplementedError()
+
def post_exec(self, engine, proxy, compiled, parameters):
- """called after the execution of a compiled statement. proxy is a callable that
- takes a string statement and a bind parameter list/dictionary."""
+ """Called after the execution of a compiled statement.
+
+ `proxy` is a callable that takes a string statement and a bind
+ parameter list/dictionary.
+ """
+
raise NotImplementedError()
+
def get_rowcount(self, cursor):
- """returns the count of rows updated/deleted for an UPDATE/DELETE statement"""
+ """Return the count of rows updated/deleted for an UPDATE/DELETE statement."""
+
raise NotImplementedError()
+
def supports_sane_rowcount(self):
- """Indicates if the "rowcount" DBAPI cursor function works properly.
-
- Currently, MySQLDB does not properly implement this function."""
+ """Indicate if the "rowcount" DBAPI cursor function works properly.
+
+ Currently, MySQLDB does not properly implement this function.
+ """
+
raise NotImplementedError()
+
def last_inserted_ids(self):
- """return the list of the primary key values for the last insert statement executed.
-
- This does not apply to straight textual clauses; only to sql.Insert objects compiled against
- a schema.Table object, which are executed via statement.execute(). The order of items in the
- list is the same as that of the Table's 'primary_key' attribute.
-
- In some cases, this method may invoke a query back to the database to retrieve the data, based on
- the "lastrowid" value in the cursor."""
+ """Return the list of the primary key values for the last insert statement executed.
+
+ This does not apply to straight textual clauses; only to
+ ``sql.Insert`` objects compiled against a ``schema.Table`` object,
+ which are executed via `statement.execute()`. The order of
+ items in the list is the same as that of the Table's
+ 'primary_key' attribute.
+
+ In some cases, this method may invoke a query back to the
+ database to retrieve the data, based on the "lastrowid" value
+ in the cursor.
+ """
+
raise NotImplementedError()
+
def last_inserted_params(self):
- """return a dictionary of the full parameter dictionary for the last compiled INSERT statement.
-
- Includes any ColumnDefaults or Sequences that were pre-executed."""
+ """Return a dictionary of the full parameter dictionary for the last compiled INSERT statement.
+
+ Includes any ColumnDefaults or Sequences that were pre-executed.
+ """
+
raise NotImplementedError()
+
def last_updated_params(self):
- """return a dictionary of the full parameter dictionary for the last compiled UPDATE statement.
-
- Includes any ColumnDefaults that were pre-executed."""
+ """Return a dictionary of the full parameter dictionary for the last compiled UPDATE statement.
+
+ Includes any ColumnDefaults that were pre-executed.
+ """
+
raise NotImplementedError()
+
def lastrow_has_defaults(self):
- """return True if the last row INSERTED via a compiled insert statement contained PassiveDefaults.
-
- The presence of PassiveDefaults indicates that the database inserted data beyond that which we
- passed to the query programmatically."""
+ """Return True if the last row INSERTED via a compiled insert statement contained PassiveDefaults.
+
+ The presence of PassiveDefaults indicates that the database
+ inserted data beyond that which we passed to the query
+ programmatically.
+ """
+
raise NotImplementedError()
+
class Connectable(object):
- """interface for an object that can provide an Engine and a Connection object which correponds to that Engine."""
+ """Interface for an object that can provide an Engine and a Connection object which correponds to that Engine."""
+
def contextual_connect(self):
- """returns a Connection object which may be part of an ongoing context."""
+ """Return a Connection object which may be part of an ongoing context."""
+
raise NotImplementedError()
+
def create(self, entity, **kwargs):
- """creates a table or index given an appropriate schema object."""
+ """Create a table or index given an appropriate schema object."""
+
raise NotImplementedError()
+
def drop(self, entity, **kwargs):
+ """Drop a table or index given an appropriate schema object."""
+
raise NotImplementedError()
+
def execute(self, object, *multiparams, **params):
raise NotImplementedError()
+
def _not_impl(self):
raise NotImplementedError()
- engine = property(_not_impl, doc="returns the Engine which this Connectable is associated with.")
+
+ engine = property(_not_impl, doc="The Engine which this Connectable is associated with.")
class Connection(Connectable):
- """represents a single DBAPI connection returned from the underlying connection pool. Provides
- execution support for string-based SQL statements as well as ClauseElement, Compiled and DefaultGenerator objects.
- provides a begin method to return Transaction objects.
-
- The Connection object is **not** threadsafe."""
+ """Represent a single DBAPI connection returned from the underlying connection pool.
+
+ Provides execution support for string-based SQL statements as well
+ as ClauseElement, Compiled and DefaultGenerator objects. Provides
+ a begin method to return Transaction objects.
+
+ The Connection object is **not** threadsafe.
+ """
+
def __init__(self, engine, connection=None, close_with_result=False):
self.__engine = engine
self.__connection = connection or engine.raw_connection()
self.__transaction = None
self.__close_with_result = close_with_result
+
def _get_connection(self):
try:
return self.__connection
except AttributeError:
raise exceptions.InvalidRequestError("This Connection is closed")
+
engine = property(lambda s:s.__engine, doc="The Engine with which this Connection is associated (read only)")
connection = property(_get_connection, doc="The underlying DBAPI connection managed by this Connection.")
should_close_with_result = property(lambda s:s.__close_with_result, doc="Indicates if this Connection should be closed when a corresponding ResultProxy is closed; this is essentially an auto-release mode.")
+
def _create_transaction(self, parent):
return Transaction(self, parent)
+
def connect(self):
"""connect() is implemented to return self so that an incoming Engine or Connection object can be treated similarly."""
return self
+
def contextual_connect(self, **kwargs):
"""contextual_connect() is implemented to return self so that an incoming Engine or Connection object can be treated similarly."""
return self
+
def begin(self):
if self.__transaction is None:
self.__transaction = self._create_transaction(None)
return self.__transaction
else:
return self._create_transaction(self.__transaction)
+
def in_transaction(self):
return self.__transaction is not None
+
def _begin_impl(self):
self.__engine.logger.info("BEGIN")
self.__engine.dialect.do_begin(self.connection)
+
def _rollback_impl(self):
self.__engine.logger.info("ROLLBACK")
self.__engine.dialect.do_rollback(self.connection)
self.__connection.close_open_cursors()
self.__transaction = None
+
def _commit_impl(self):
self.__engine.logger.info("COMMIT")
self.__engine.dialect.do_commit(self.connection)
self.__transaction = None
+
def _autocommit(self, statement):
- """when no Transaction is present, this is called after executions to provide "autocommit" behavior."""
- # TODO: have the dialect determine if autocommit can be set on the connection directly without this
+ """When no Transaction is present, this is called after executions to provide "autocommit" behavior."""
+ # TODO: have the dialect determine if autocommit can be set on the connection directly without this
# extra step
if not self.in_transaction() and re.match(r'UPDATE|INSERT|CREATE|DELETE|DROP|ALTER', statement.lstrip().upper()):
self._commit_impl()
+
def _autorollback(self):
if not self.in_transaction():
self._rollback_impl()
+
def close(self):
try:
c = self.__connection
@@ -260,12 +434,16 @@ class Connection(Connectable):
self.__connection.close()
self.__connection = None
del self.__connection
+
def scalar(self, object, *multiparams, **params):
return self.execute(object, *multiparams, **params).scalar()
+
def execute(self, object, *multiparams, **params):
return Connection.executors[type(object).__mro__[-2]](self, object, *multiparams, **params)
+
def execute_default(self, default, **kwargs):
return default.accept_schema_visitor(self.__engine.dialect.defaultrunner(self.__engine, self.proxy, **kwargs))
+
def execute_text(self, statement, *multiparams, **params):
if len(multiparams) == 0:
parameters = params
@@ -276,6 +454,7 @@ class Connection(Connectable):
cursor = self._execute_raw(statement, parameters)
rpargs = self.__engine.dialect.create_result_proxy_args(self, cursor)
return ResultProxy(self.__engine, self, cursor, **rpargs)
+
def _params_to_listofdicts(self, *multiparams, **params):
if len(multiparams) == 0:
return [params]
@@ -288,6 +467,7 @@ class Connection(Connectable):
return [multiparams[0]]
else:
return multiparams
+
def execute_clauseelement(self, elem, *multiparams, **params):
executemany = len(multiparams) > 0
if executemany:
@@ -295,8 +475,10 @@ class Connection(Connectable):
else:
param = params
return self.execute_compiled(elem.compile(engine=self.__engine, parameters=param), *multiparams, **params)
+
def execute_compiled(self, compiled, *multiparams, **params):
- """executes a sql.Compiled object."""
+ """Execute a sql.Compiled object."""
+
if not compiled.can_execute:
raise exceptions.ArgumentError("Not an executeable clause: %s" % (str(compiled)))
cursor = self.__engine.dialect.create_cursor(self.connection)
@@ -316,7 +498,7 @@ class Connection(Connectable):
context.post_exec(self.__engine, proxy, compiled, parameters)
rpargs = self.__engine.dialect.create_result_proxy_args(self, cursor)
return ResultProxy(self.__engine, self, cursor, context, typemap=compiled.typemap, columns=compiled.columns, **rpargs)
-
+
# poor man's multimethod/generic function thingy
executors = {
sql.ClauseElement : execute_clauseelement,
@@ -324,20 +506,28 @@ class Connection(Connectable):
schema.SchemaItem:execute_default,
str.__mro__[-2] : execute_text
}
-
+
def create(self, entity, **kwargs):
- """creates a table or index given an appropriate schema object."""
+ """Create a table or index given an appropriate schema object."""
+
return self.__engine.create(entity, connection=self, **kwargs)
+
def drop(self, entity, **kwargs):
- """drops a table or index given an appropriate schema object."""
+ """Drop a table or index given an appropriate schema object."""
+
return self.__engine.drop(entity, connection=self, **kwargs)
+
def reflecttable(self, table, **kwargs):
- """reflects the columns in the given table from the database."""
+ """Reflect the columns in the given table from the database."""
+
return self.__engine.reflecttable(table, connection=self, **kwargs)
+
def default_schema_name(self):
return self.__engine.dialect.get_default_schema_name(self)
+
def run_callable(self, callable_):
return callable_(self)
+
def _execute_raw(self, statement, parameters=None, cursor=None, context=None, **kwargs):
if cursor is None:
cursor = self.__engine.dialect.create_cursor(self.connection)
@@ -367,6 +557,7 @@ class Connection(Connectable):
if self.__close_with_result:
self.close()
raise exceptions.SQLError(statement, parameters, e)
+
def _executemany(self, c, statement, parameters, context=None):
try:
self.__engine.dialect.do_executemany(c, statement, parameters, context=context)
@@ -376,27 +567,36 @@ class Connection(Connectable):
if self.__close_with_result:
self.close()
raise exceptions.SQLError(statement, parameters, e)
+
def proxy(self, statement=None, parameters=None):
- """executes the given statement string and parameter object.
- the parameter object is expected to be the result of a call to compiled.get_params().
- This callable is a generic version of a connection/cursor-specific callable that
- is produced within the execute_compiled method, and is used for objects that require
- this style of proxy when outside of an execute_compiled method, primarily the DefaultRunner."""
+ """Execute the given statement string and parameter object.
+
+ The parameter object is expected to be the result of a call to
+ ``compiled.get_params()``. This callable is a generic version
+ of a connection/cursor-specific callable that is produced
+ within the execute_compiled method, and is used for objects
+ that require this style of proxy when outside of an
+ execute_compiled method, primarily the DefaultRunner.
+ """
parameters = self.__engine.dialect.convert_compiled_params(parameters)
return self._execute_raw(statement, parameters)
class Transaction(object):
- """represents a Transaction in progress.
-
- the Transaction object is **not** threadsafe."""
+ """Represent a Transaction in progress.
+
+ The Transaction object is **not** threadsafe.
+ """
+
def __init__(self, connection, parent):
self.__connection = connection
self.__parent = parent or self
self.__is_active = True
if self.__parent is self:
self.__connection._begin_impl()
+
connection = property(lambda s:s.__connection, doc="The Connection object referenced by this Transaction")
is_active = property(lambda s:s.__is_active)
+
def rollback(self):
if not self.__parent.__is_active:
return
@@ -405,6 +605,7 @@ class Transaction(object):
self.__is_active = False
else:
self.__parent.rollback()
+
def commit(self):
if not self.__parent.__is_active:
raise exceptions.InvalidRequestError("This transaction is inactive")
@@ -414,9 +615,10 @@ class Transaction(object):
class Engine(sql.Executor, Connectable):
"""
- Connects a ConnectionProvider, a Dialect and a CompilerFactory together to
+ Connects a ConnectionProvider, a Dialect and a CompilerFactory together to
provide a default implementation of SchemaEngine.
"""
+
def __init__(self, connection_provider, dialect, echo=None):
self.connection_provider = connection_provider
self.dialect=dialect
@@ -426,27 +628,35 @@ class Engine(sql.Executor, Connectable):
name = property(lambda s:sys.modules[s.dialect.__module__].descriptor()['name'])
engine = property(lambda s:s)
echo = logging.echo_property()
-
+
def dispose(self):
self.connection_provider.dispose()
+
def create(self, entity, connection=None, **kwargs):
- """creates a table or index within this engine's database connection given a schema.Table object."""
+ """Create a table or index within this engine's database connection given a schema.Table object."""
+
self._run_visitor(self.dialect.schemagenerator, entity, connection=connection, **kwargs)
+
def drop(self, entity, connection=None, **kwargs):
- """drops a table or index within this engine's database connection given a schema.Table object."""
+ """Drop a table or index within this engine's database connection given a schema.Table object."""
+
self._run_visitor(self.dialect.schemadropper, entity, connection=connection, **kwargs)
+
def execute_default(self, default, **kwargs):
connection = self.contextual_connect()
try:
return connection.execute_default(default, **kwargs)
finally:
connection.close()
-
+
def _func(self):
return sql._FunctionGenerator(self)
+
func = property(_func)
+
def text(self, text, *args, **kwargs):
- """returns a sql.text() object for performing literal queries."""
+ """Return a sql.text() object for performing literal queries."""
+
return sql.text(text, engine=self, *args, **kwargs)
def _run_visitor(self, visitorcallable, element, connection=None, **kwargs):
@@ -459,12 +669,16 @@ class Engine(sql.Executor, Connectable):
finally:
if connection is None:
conn.close()
-
+
def transaction(self, callable_, connection=None, *args, **kwargs):
- """executes the given function within a transaction boundary. this is a shortcut for
- explicitly calling begin() and commit() and optionally rollback() when execptions are raised.
- The given *args and **kwargs will be passed to the function, as well as the Connection used
- in the transaction."""
+ """Execute the given function within a transaction boundary.
+
+ This is a shortcut for explicitly calling `begin()` and `commit()`
+ and optionally `rollback()` when exceptions are raised. The
+ given `*args` and `**kwargs` will be passed to the function, as
+ well as the Connection used in the transaction.
+ """
+
if connection is None:
conn = self.contextual_connect()
else:
@@ -481,7 +695,7 @@ class Engine(sql.Executor, Connectable):
finally:
if connection is None:
conn.close()
-
+
def run_callable(self, callable_, connection=None, *args, **kwargs):
if connection is None:
conn = self.contextual_connect()
@@ -492,32 +706,37 @@ class Engine(sql.Executor, Connectable):
finally:
if connection is None:
conn.close()
-
+
def execute(self, statement, *multiparams, **params):
connection = self.contextual_connect(close_with_result=True)
return connection.execute(statement, *multiparams, **params)
def scalar(self, statement, *multiparams, **params):
return self.execute(statement, *multiparams, **params).scalar()
-
+
def execute_compiled(self, compiled, *multiparams, **params):
connection = self.contextual_connect(close_with_result=True)
return connection.execute_compiled(compiled, *multiparams, **params)
-
+
def compiler(self, statement, parameters, **kwargs):
return self.dialect.compiler(statement, parameters, engine=self, **kwargs)
def connect(self, **kwargs):
- """returns a newly allocated Connection object."""
+ """Return a newly allocated Connection object."""
+
return Connection(self, **kwargs)
-
+
def contextual_connect(self, close_with_result=False, **kwargs):
- """returns a Connection object which may be newly allocated, or may be part of some
- ongoing context. This Connection is meant to be used by the various "auto-connecting" operations."""
+ """Return a Connection object which may be newly allocated, or may be part of some ongoing context.
+
+ This Connection is meant to be used by the various "auto-connecting" operations.
+ """
+
return Connection(self, close_with_result=close_with_result, **kwargs)
-
+
def reflecttable(self, table, connection=None):
- """given a Table object, reflects its columns and properties from the database."""
+ """Given a Table object, reflects its columns and properties from the database."""
+
if connection is None:
conn = self.contextual_connect()
else:
@@ -527,34 +746,42 @@ class Engine(sql.Executor, Connectable):
finally:
if connection is None:
conn.close()
+
def has_table(self, table_name, schema=None):
return self.run_callable(lambda c: self.dialect.has_table(c, table_name, schema=schema))
-
+
def raw_connection(self):
- """returns a DBAPI connection."""
+ """Return a DBAPI connection."""
+
return self.connection_provider.get_connection()
def log(self, msg):
- """logs a message using this SQLEngine's logger stream."""
+ """Log a message using this SQLEngine's logger stream."""
+
self.logger.info(msg)
class ResultProxy(object):
- """wraps a DBAPI cursor object to provide access to row columns based on integer
- position, case-insensitive column name, or by schema.Column object. e.g.:
-
- row = fetchone()
+ """Wraps a DBAPI cursor object to provide easier access to row columns.
+
+ Individual columns may be accessed by their integer position,
+ case-insensitive column name, or by ``schema.Column``
+ object. e.g.::
+
+ row = fetchone()
- col1 = row[0] # access via integer position
+ col1 = row[0] # access via integer position
- col2 = row['col2'] # access via name
+ col2 = row['col2'] # access via name
- col3 = row[mytable.c.mycol] # access via Column object.
-
- ResultProxy also contains a map of TypeEngine objects and will invoke the appropriate
- convert_result_value() method before returning columns, as well as the ExecutionContext
- corresponding to the statement execution. It provides several methods for which
+ col3 = row[mytable.c.mycol] # access via Column object.
+
+ ResultProxy also contains a map of TypeEngine objects and will
+ invoke the appropriate ``convert_result_value()` method before
+ returning columns, as well as the ExecutionContext corresponding
+ to the statement execution. It provides several methods for which
to obtain information from the underlying ExecutionContext.
"""
+
class AmbiguousColumn(object):
def __init__(self, key):
self.key = key
@@ -562,15 +789,16 @@ class ResultProxy(object):
return self
def convert_result_value(self, arg, engine):
raise exceptions.InvalidRequestError("Ambiguous column name '%s' in result set! try 'use_labels' option on select statement." % (self.key))
-
+
def __new__(cls, *args, **kwargs):
if cls is ResultProxy and kwargs.has_key('should_prefetch') and kwargs['should_prefetch']:
return PrefetchingResultProxy(*args, **kwargs)
else:
return object.__new__(cls, *args, **kwargs)
-
+
def __init__(self, engine, connection, cursor, executioncontext=None, typemap=None, columns=None, should_prefetch=None):
"""ResultProxy objects are constructed via the execute() method on SQLEngine."""
+
self.connection = connection
self.dialect = engine.dialect
self.cursor = cursor
@@ -603,20 +831,25 @@ class ResultProxy(object):
self.keys.append(colname)
self.props[i] = rec
i+=1
+
def _executioncontext(self):
try:
return self.__executioncontext
except AttributeError:
raise exceptions.InvalidRequestError("This ResultProxy does not have an execution context with which to complete this operation. Execution contexts are not generated for literal SQL execution.")
executioncontext = property(_executioncontext)
-
+
def close(self):
- """close this ResultProxy, and the underlying DBAPI cursor corresponding to the execution.
-
- If this ResultProxy was generated from an implicit execution, the underlying Connection will
- also be closed (returns the underlying DBAPI connection to the connection pool.)
-
- This method is also called automatically when all result rows are exhausted."""
+ """Close this ResultProxy, and the underlying DBAPI cursor corresponding to the execution.
+
+ If this ResultProxy was generated from an implicit execution,
+ the underlying Connection will also be closed (returns the
+ underlying DBAPI connection to the connection pool.)
+
+ This method is also called automatically when all result rows
+ are exhausted.
+ """
+
if not self.closed:
self.closed = True
self.cursor.close()
@@ -624,8 +857,13 @@ class ResultProxy(object):
self.connection.close()
def _convert_key(self, key):
- """given a key, which could be a ColumnElement, string, etc., matches it to the
- appropriate key we got from the result set's metadata; then cache it locally for quick re-access."""
+ """Convert and cache a key.
+
+ Given a key, which could be a ColumnElement, string, etc.,
+ matches it to the appropriate key we got from the result set's
+ metadata; then cache it locally for quick re-access.
+ """
+
try:
return self.__key_cache[key]
except KeyError:
@@ -659,18 +897,18 @@ class ResultProxy(object):
raise exceptions.NoSuchColumnError("Could not locate column in row for column '%s'" % str(key))
self.__key_cache[key] = rec
return rec
-
+
def _has_key(self, row, key):
try:
self._convert_key(key)
return True
except KeyError:
return False
-
+
def _get_col(self, row, key):
rec = self._convert_key(key)
return rec[0].dialect_impl(self.dialect).convert_result_value(row[rec[1]], self.dialect)
-
+
def __iter__(self):
while True:
row = self.fetchone()
@@ -678,43 +916,59 @@ class ResultProxy(object):
raise StopIteration
else:
yield row
-
+
def last_inserted_ids(self):
- """return last_inserted_ids() from the underlying ExecutionContext.
-
- See ExecutionContext for details."""
+ """Return ``last_inserted_ids()`` from the underlying ExecutionContext.
+
+ See ExecutionContext for details.
+ """
+
return self.executioncontext.last_inserted_ids()
+
def last_updated_params(self):
- """return last_updated_params() from the underlying ExecutionContext.
-
- See ExecutionContext for details."""
+ """Return ``last_updated_params()`` from the underlying ExecutionContext.
+
+ See ExecutionContext for details.
+ """
+
return self.executioncontext.last_updated_params()
+
def last_inserted_params(self):
- """return last_inserted_params() from the underlying ExecutionContext.
-
- See ExecutionContext for details."""
+ """Return ``last_inserted_params()`` from the underlying ExecutionContext.
+
+ See ExecutionContext for details.
+ """
+
return self.executioncontext.last_inserted_params()
+
def lastrow_has_defaults(self):
- """return lastrow_has_defaults() from the underlying ExecutionContext.
-
- See ExecutionContext for details."""
+ """Return ``lastrow_has_defaults()`` from the underlying ExecutionContext.
+
+ See ExecutionContext for details.
+ """
+
return self.executioncontext.lastrow_has_defaults()
+
def supports_sane_rowcount(self):
- """return supports_sane_rowcount() from the underlying ExecutionContext.
-
- See ExecutionContext for details."""
+ """Return ``supports_sane_rowcount()`` from the underlying ExecutionContext.
+
+ See ExecutionContext for details.
+ """
+
return self.executioncontext.supports_sane_rowcount()
-
+
def fetchall(self):
- """fetch all rows, just like DBAPI cursor.fetchall()."""
+ """Fetch all rows, just like DBAPI ``cursor.fetchall()``."""
+
l = []
for row in self.cursor.fetchall():
l.append(RowProxy(self, row))
self.close()
return l
-
+
def fetchmany(self, size=None):
- """fetch many rows, juts like DBAPI cursor.fetchmany(size=cursor.arraysize)"""
+ """Fetch many rows, just like DBAPI ``cursor.fetchmany(size=cursor.arraysize)``."""
+
if size is None:
rows = self.cursor.fetchmany()
else:
@@ -725,9 +979,10 @@ class ResultProxy(object):
if len(l) == 0:
self.close()
return l
-
+
def fetchone(self):
- """fetch one row, just like DBAPI cursor.fetchone()."""
+ """Fetch one row, just like DBAPI ``cursor.fetchone()``."""
+
row = self.cursor.fetchone()
if row is not None:
return RowProxy(self, row)
@@ -736,7 +991,8 @@ class ResultProxy(object):
return None
def scalar(self):
- """fetch the first column of the first row, and close the result set."""
+ """Fetch the first column of the first row, and close the result set."""
+
row = self.cursor.fetchone()
try:
if row is not None:
@@ -745,16 +1001,17 @@ class ResultProxy(object):
return None
finally:
self.close()
-
+
class PrefetchingResultProxy(ResultProxy):
"""ResultProxy that loads all columns into memory each time fetchone() is
called. If fetchmany() or fetchall() are called, the full grid of results
is fetched.
"""
+
def _get_col(self, row, key):
rec = self._convert_key(key)
return row[rec[1]]
-
+
def fetchall(self):
l = []
while True:
@@ -764,7 +1021,7 @@ class PrefetchingResultProxy(ResultProxy):
else:
break
return l
-
+
def fetchmany(self, size=None):
if size is None:
return self.fetchall()
@@ -776,7 +1033,7 @@ class PrefetchingResultProxy(ResultProxy):
else:
break
return l
-
+
def fetchone(self):
sup = super(PrefetchingResultProxy, self)
row = self.cursor.fetchone()
@@ -786,81 +1043,114 @@ class PrefetchingResultProxy(ResultProxy):
else:
self.close()
return None
-
+
class RowProxy(object):
- """proxies a single cursor row for a parent ResultProxy. Mostly follows
- "ordered dictionary" behavior, mapping result values to the string-based column name,
- the integer position of the result in the row, as well as Column instances which
- can be mapped to the original Columns that produced this result set (for results
- that correspond to constructed SQL expressions)."""
+ """Proxie a single cursor row for a parent ResultProxy.
+
+ Mostly follows "ordered dictionary" behavior, mapping result
+ values to the string-based column name, the integer position of
+ the result in the row, as well as Column instances which can be
+ mapped to the original Columns that produced this result set (for
+ results that correspond to constructed SQL expressions).
+ """
+
def __init__(self, parent, row):
"""RowProxy objects are constructed by ResultProxy objects."""
+
self.__parent = parent
self.__row = row
if self.__parent._ResultProxy__echo:
self.__parent.engine.logger.debug("Row " + repr(row))
+
def close(self):
- """close the parent ResultProxy."""
+ """Close the parent ResultProxy."""
+
self.__parent.close()
+
def __iter__(self):
for i in range(0, len(self.__row)):
yield self.__parent._get_col(self.__row, i)
+
def __eq__(self, other):
return (other is self) or (other == tuple([self.__parent._get_col(self.__row, key) for key in range(0, len(self.__row))]))
+
def __repr__(self):
return repr(tuple([self.__parent._get_col(self.__row, key) for key in range(0, len(self.__row))]))
+
def has_key(self, key):
- """return True if this RowProxy contains the given key."""
+ """Return True if this RowProxy contains the given key."""
+
return self.__parent._has_key(self.__row, key)
+
def __getitem__(self, key):
return self.__parent._get_col(self.__row, key)
+
def __getattr__(self, name):
try:
return self.__parent._get_col(self.__row, name)
except KeyError, e:
raise AttributeError(e.args[0])
+
def items(self):
- """return a list of tuples, each tuple containing a key/value pair."""
+ """Return a list of tuples, each tuple containing a key/value pair."""
+
return [(key, getattr(self, key)) for key in self.keys()]
+
def keys(self):
- """return the list of keys as strings represented by this RowProxy."""
+ """Return the list of keys as strings represented by this RowProxy."""
+
return self.__parent.keys
+
def values(self):
- """return the values represented by this RowProxy as a list."""
+ """Return the values represented by this RowProxy as a list."""
+
return list(self)
- def __len__(self):
+
+ def __len__(self):
return len(self.__row)
class SchemaIterator(schema.SchemaVisitor):
- """a visitor that can gather text into a buffer and execute the contents of the buffer."""
+ """A visitor that can gather text into a buffer and execute the contents of the buffer."""
+
def __init__(self, engine, proxy, **params):
- """construct a new SchemaIterator.
-
- engine - the Engine used by this SchemaIterator
-
- proxy - a callable which takes a statement and bind parameters and executes it, returning
- the cursor (the actual DBAPI cursor). The callable should use the same cursor repeatedly."""
+ """Construct a new SchemaIterator.
+
+ engine
+ the Engine used by this SchemaIterator
+
+ proxy
+ a callable which takes a statement and bind parameters and
+ executes it, returning the cursor (the actual DBAPI cursor).
+ The callable should use the same cursor repeatedly.
+ """
+
self.proxy = proxy
self.engine = engine
self.buffer = StringIO.StringIO()
def append(self, s):
- """append content to the SchemaIterator's query buffer."""
+ """Append content to the SchemaIterator's query buffer."""
+
self.buffer.write(s)
def execute(self):
- """execute the contents of the SchemaIterator's buffer."""
+ """Execute the contents of the SchemaIterator's buffer."""
+
try:
return self.proxy(self.buffer.getvalue(), None)
finally:
self.buffer.truncate(0)
class DefaultRunner(schema.SchemaVisitor):
- """a visitor which accepts ColumnDefault objects, produces the dialect-specific SQL corresponding
- to their execution, and executes the SQL, returning the result value.
-
- DefaultRunners are used internally by Engines and Dialects. Specific database modules should provide
- their own subclasses of DefaultRunner to allow database-specific behavior."""
+ """A visitor which accepts ColumnDefault objects, produces the
+ dialect-specific SQL corresponding to their execution, and
+ executes the SQL, returning the result value.
+
+ DefaultRunners are used internally by Engines and Dialects.
+ Specific database modules should provide their own subclasses of
+ DefaultRunner to allow database-specific behavior.
+ """
+
def __init__(self, engine, proxy):
self.proxy = proxy
self.engine = engine
@@ -878,12 +1168,20 @@ class DefaultRunner(schema.SchemaVisitor):
return None
def visit_passive_default(self, default):
- """passive defaults by definition return None on the app side,
- and are post-fetched to get the DB-side value"""
+ """Do nothing.
+
+ Passive defaults by definition return None on the app side,
+ and are post-fetched to get the DB-side value.
+ """
+
return None
def visit_sequence(self, seq):
- """sequences are not supported by default"""
+ """Do nothing.
+
+ Sequences are not supported by default.
+ """
+
return None
def exec_default_sql(self, default):
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 40210e88f..ef0a6cc57 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -9,21 +9,23 @@ from sqlalchemy import schema, exceptions, util, sql, types
import StringIO, sys, re
from sqlalchemy.engine import base
-"""provides default implementations of the engine interfaces"""
-
+"""Provide default implementations of the engine interfaces"""
class PoolConnectionProvider(base.ConnectionProvider):
def __init__(self, pool):
self._pool = pool
+
def get_connection(self):
return self._pool.connect()
+
def dispose(self):
self._pool.dispose()
if hasattr(self, '_dbproxy'):
self._dbproxy.dispose()
-
+
class DefaultDialect(base.Dialect):
- """default implementation of Dialect"""
+ """Default implementation of Dialect"""
+
def __init__(self, convert_unicode=False, encoding='utf-8', default_paramstyle='named', **kwargs):
self.convert_unicode = convert_unicode
self.supports_autoclose_results = True
@@ -31,52 +33,75 @@ class DefaultDialect(base.Dialect):
self.positional = False
self._ischema = None
self._figure_paramstyle(default=default_paramstyle)
+
def create_execution_context(self):
return DefaultExecutionContext(self)
+
def type_descriptor(self, typeobj):
- """provides a database-specific TypeEngine object, given the generic object
- which comes from the types module. Subclasses will usually use the adapt_type()
- method in the types module to make this job easy."""
+ """Provide a database-specific ``TypeEngine`` object, given
+ the generic object which comes from the types module.
+
+ Subclasses will usually use the ``adapt_type()`` method in the
+ types module to make this job easy."""
+
if type(typeobj) is type:
typeobj = typeobj()
return typeobj
+
def oid_column_name(self, column):
return None
+
def supports_sane_rowcount(self):
return True
+
def do_begin(self, connection):
- """implementations might want to put logic here for turning autocommit on/off,
- etc."""
+ """Implementations might want to put logic here for turning
+ autocommit on/off, etc.
+ """
+
pass
+
def do_rollback(self, connection):
- """implementations might want to put logic here for turning autocommit on/off,
- etc."""
+ """Implementations might want to put logic here for turning
+ autocommit on/off, etc.
+ """
+
#print "ENGINE ROLLBACK ON ", connection.connection
connection.rollback()
+
def do_commit(self, connection):
- """implementations might want to put logic here for turning autocommit on/off, etc."""
+ """Implementations might want to put logic here for turning
+ autocommit on/off, etc.
+ """
+
#print "ENGINE COMMIT ON ", connection.connection
connection.commit()
+
def do_executemany(self, cursor, statement, parameters, **kwargs):
cursor.executemany(statement, parameters)
+
def do_execute(self, cursor, statement, parameters, **kwargs):
cursor.execute(statement, parameters)
+
def defaultrunner(self, engine, proxy):
return base.DefaultRunner(engine, proxy)
+
def create_cursor(self, connection):
return connection.cursor()
+
def create_result_proxy_args(self, connection, cursor):
return dict(should_prefetch=False)
-
+
def _set_paramstyle(self, style):
self._paramstyle = style
self._figure_paramstyle(style)
+
paramstyle = property(lambda s:s._paramstyle, _set_paramstyle)
def convert_compiled_params(self, parameters):
executemany = parameters is not None and isinstance(parameters, list)
# the bind params are a CompiledParams object. but all the DBAPI's hate
- # that object (or similar). so convert it to a clean
+ # that object (or similar). so convert it to a clean
# dictionary/list/tuple of dictionary/tuple of list
if parameters is not None:
if self.positional:
@@ -125,29 +150,40 @@ class DefaultDialect(base.Dialect):
class DefaultExecutionContext(base.ExecutionContext):
def __init__(self, dialect):
self.dialect = dialect
+
def pre_exec(self, engine, proxy, compiled, parameters):
self._process_defaults(engine, proxy, compiled, parameters)
+
def post_exec(self, engine, proxy, compiled, parameters):
pass
+
def get_rowcount(self, cursor):
if hasattr(self, '_rowcount'):
return self._rowcount
else:
return cursor.rowcount
+
def supports_sane_rowcount(self):
return self.dialect.supports_sane_rowcount()
+
def last_inserted_ids(self):
return self._last_inserted_ids
+
def last_inserted_params(self):
return self._last_inserted_params
+
def last_updated_params(self):
- return self._last_updated_params
+ return self._last_updated_params
+
def lastrow_has_defaults(self):
return self._lastrow_has_defaults
+
def set_input_sizes(self, cursor, parameters):
- """given a cursor and ClauseParameters, call the appropriate style of
- setinputsizes() on the cursor, using DBAPI types from the bind parameter's
- TypeEngine objects."""
+ """Given a cursor and ClauseParameters, call the appropriate
+ style of ``setinputsizes()`` on the cursor, using DBAPI types
+ from the bind parameter's ``TypeEngine`` objects.
+ """
+
if isinstance(parameters, list):
plist = parameters
else:
@@ -166,19 +202,27 @@ class DefaultExecutionContext(base.ExecutionContext):
typeengine = params.binds[key].type
inputsizes[key] = typeengine.get_dbapi_type(self.dialect.module)
cursor.setinputsizes(**inputsizes)
-
+
def _process_defaults(self, engine, proxy, compiled, parameters):
- """INSERT and UPDATE statements, when compiled, may have additional columns added to their
- VALUES and SET lists corresponding to column defaults/onupdates that are present on the
- Table object (i.e. ColumnDefault, Sequence, PassiveDefault). This method pre-execs those
- DefaultGenerator objects that require pre-execution and sets their values within the
- parameter list, and flags the thread-local state about
- PassiveDefault objects that may require post-fetching the row after it is inserted/updated.
- This method relies upon logic within the ANSISQLCompiler in its visit_insert and
- visit_update methods that add the appropriate column clauses to the statement when its
- being compiled, so that these parameters can be bound to the statement."""
+ """``INSERT`` and ``UPDATE`` statements, when compiled, may
+ have additional columns added to their ``VALUES`` and ``SET``
+ lists corresponding to column defaults/onupdates that are
+ present on the ``Table`` object (i.e. ``ColumnDefault``,
+ ``Sequence``, ``PassiveDefault``). This method pre-execs
+ those ``DefaultGenerator`` objects that require pre-execution
+ and sets their values within the parameter list, and flags the
+ thread-local state about ``PassiveDefault`` objects that may
+ require post-fetching the row after it is inserted/updated.
+
+ This method relies upon logic within the ``ANSISQLCompiler``
+ in its `visit_insert` and `visit_update` methods that add the
+ appropriate column clauses to the statement when its being
+ compiled, so that these parameters can be bound to the
+ statement.
+ """
+
if compiled is None: return
-
+
if getattr(compiled, "isinsert", False):
if isinstance(parameters, list):
plist = parameters
@@ -198,9 +242,9 @@ class DefaultExecutionContext(base.ExecutionContext):
if c.primary_key:
need_lastrowid = True
# check if its not present at all. see if theres a default
- # and fire it off, and add to bind parameters. if
+ # and fire it off, and add to bind parameters. if
# its a pk, add the value to our last_inserted_ids list,
- # or, if its a SQL-side default, dont do any of that, but we'll need
+ # or, if its a SQL-side default, dont do any of that, but we'll need
# the SQL-generated value after execution.
elif not param.has_key(c.key) or param[c.key] is None:
if isinstance(c.default, schema.PassiveDefault):
@@ -212,7 +256,7 @@ class DefaultExecutionContext(base.ExecutionContext):
last_inserted_ids.append(param[c.key])
elif c.primary_key:
need_lastrowid = True
- # its an explicitly passed pk value - add it to
+ # its an explicitly passed pk value - add it to
# our last_inserted_ids list.
elif c.primary_key:
last_inserted_ids.append(param[c.key])
@@ -229,7 +273,7 @@ class DefaultExecutionContext(base.ExecutionContext):
drunner = self.dialect.defaultrunner(engine, proxy)
self._lastrow_has_defaults = False
for param in plist:
- # check the "onupdate" status of each column in the table
+ # check the "onupdate" status of each column in the table
for c in compiled.statement.table.c:
# it will be populated by a SQL clause - we'll need that
# after execution.
@@ -242,5 +286,3 @@ class DefaultExecutionContext(base.ExecutionContext):
if value is not None:
param[c.key] = value
self._last_updated_params = param
-
-
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index a7f2cc003..7a7b84aa9 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,8 +1,13 @@
-"""defines different strategies for creating new instances of sql.Engine.
-by default there are two, one which is the "thread-local" strategy, one which is the "plain" strategy.
-new strategies can be added via constructing a new EngineStrategy object which will add itself to the
-list of available strategies here, or replace one of the existing name.
-this can be accomplished via a mod; see the sqlalchemy/mods package for details."""
+"""Define different strategies for creating new instances of sql.Engine.
+
+By default there are two, one which is the "thread-local" strategy,
+one which is the "plain" strategy.
+
+New strategies can be added via constructing a new EngineStrategy
+object which will add itself to the list of available strategies here,
+or replace one of the existing name. this can be accomplished via a
+mod; see the sqlalchemy/mods package for details.
+"""
from sqlalchemy.engine import base, default, threadlocal, url
@@ -12,22 +17,30 @@ from sqlalchemy import pool as poollib
strategies = {}
class EngineStrategy(object):
- """defines a function that receives input arguments and produces an instance of sql.Engine, typically
- an instance sqlalchemy.engine.base.Engine or a subclass."""
+ """Define a function that receives input arguments and produces an
+ instance of sql.Engine, typically an instance
+ sqlalchemy.engine.base.Engine or a subclass.
+ """
+
def __init__(self, name):
- """construct a new EngineStrategy object and sets it in the list of available strategies
- under this name."""
+ """Construct a new EngineStrategy object.
+
+ Sets it in the list of available strategies under this name.
+ """
+
self.name = name
strategies[self.name] = self
+
def create(self, *args, **kwargs):
- """given arguments, returns a new sql.Engine instance."""
+ """Given arguments, returns a new sql.Engine instance."""
+
raise NotImplementedError()
class DefaultEngineStrategy(EngineStrategy):
def create(self, name_or_url, **kwargs):
# create url.URL object
u = url.make_url(name_or_url)
-
+
# get module from sqlalchemy.databases
module = u.get_module()
@@ -36,7 +49,7 @@ class DefaultEngineStrategy(EngineStrategy):
for k in util.get_cls_kwargs(module.dialect):
if k in kwargs:
dialect_args[k] = kwargs.pop(k)
-
+
# create dialect
dialect = module.dialect(**dialect_args)
@@ -50,6 +63,7 @@ class DefaultEngineStrategy(EngineStrategy):
dbapi = kwargs.pop('module', dialect.dbapi())
if dbapi is None:
raise exceptions.InvalidRequestError("Cant get DBAPI module for dialect '%s'" % dialect)
+
def connect():
try:
return dbapi.connect(*cargs, **cparams)
@@ -80,41 +94,48 @@ class DefaultEngineStrategy(EngineStrategy):
for k in util.get_cls_kwargs(engineclass):
if k in kwargs:
engine_args[k] = kwargs.pop(k)
-
+
# all kwargs should be consumed
if len(kwargs):
raise TypeError("Invalid argument(s) %s sent to create_engine(), using configuration %s/%s/%s. Please check that the keyword arguments are appropriate for this combination of components." % (','.join(["'%s'" % k for k in kwargs]), dialect.__class__.__name__, pool.__class__.__name__, engineclass.__name__))
-
+
return engineclass(provider, dialect, **engine_args)
def pool_threadlocal(self):
raise NotImplementedError()
+
def get_pool_provider(self, pool):
raise NotImplementedError()
+
def get_engine_cls(self):
raise NotImplementedError()
-
+
class PlainEngineStrategy(DefaultEngineStrategy):
def __init__(self):
DefaultEngineStrategy.__init__(self, 'plain')
+
def pool_threadlocal(self):
return False
+
def get_pool_provider(self, pool):
return default.PoolConnectionProvider(pool)
+
def get_engine_cls(self):
return base.Engine
+
PlainEngineStrategy()
class ThreadLocalEngineStrategy(DefaultEngineStrategy):
def __init__(self):
DefaultEngineStrategy.__init__(self, 'threadlocal')
+
def pool_threadlocal(self):
return True
+
def get_pool_provider(self, pool):
return threadlocal.TLocalConnectionProvider(pool)
+
def get_engine_cls(self):
return threadlocal.TLEngine
-ThreadLocalEngineStrategy()
-
-
+ThreadLocalEngineStrategy()
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index beac3ee3f..2bbb1ed43 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -2,19 +2,24 @@ from sqlalchemy import schema, exceptions, util, sql, types
import StringIO, sys, re
from sqlalchemy.engine import base, default
-"""provides a thread-local transactional wrapper around the basic ComposedSQLEngine. multiple calls to engine.connect()
-will return the same connection for the same thread. also provides begin/commit methods on the engine itself
-which correspond to a thread-local transaction."""
+"""Provide a thread-local transactional wrapper around the basic ComposedSQLEngine.
+
+Multiple calls to engine.connect() will return the same connection for
+the same thread. also provides begin/commit methods on the engine
+itself which correspond to a thread-local transaction.
+"""
class TLSession(object):
def __init__(self, engine):
self.engine = engine
self.__tcount = 0
+
def get_connection(self, close_with_result=False):
try:
return self.__transaction._increment_connect()
except AttributeError:
return TLConnection(self, close_with_result=close_with_result)
+
def reset(self):
try:
self.__transaction._force_close()
@@ -23,20 +28,24 @@ class TLSession(object):
except AttributeError:
pass
self.__tcount = 0
+
def in_transaction(self):
return self.__tcount > 0
+
def begin(self):
if self.__tcount == 0:
self.__transaction = self.get_connection()
self.__trans = self.__transaction._begin()
self.__tcount += 1
return self.__trans
+
def rollback(self):
if self.__tcount > 0:
try:
self.__trans._rollback_impl()
finally:
self.reset()
+
def commit(self):
if self.__tcount == 1:
try:
@@ -45,6 +54,7 @@ class TLSession(object):
self.reset()
elif self.__tcount > 1:
self.__tcount -= 1
+
def is_begun(self):
return self.__tcount > 0
@@ -53,67 +63,96 @@ class TLConnection(base.Connection):
base.Connection.__init__(self, session.engine, close_with_result=close_with_result)
self.__session = session
self.__opencount = 1
+
session = property(lambda s:s.__session)
+
def _increment_connect(self):
self.__opencount += 1
return self
+
def _create_transaction(self, parent):
return TLTransaction(self, parent)
+
def _begin(self):
return base.Connection.begin(self)
+
def in_transaction(self):
return self.session.in_transaction()
+
def begin(self):
return self.session.begin()
+
def close(self):
if self.__opencount == 1:
base.Connection.close(self)
self.__opencount -= 1
+
def _force_close(self):
self.__opencount = 0
base.Connection.close(self)
-
+
class TLTransaction(base.Transaction):
def _commit_impl(self):
base.Transaction.commit(self)
+
def _rollback_impl(self):
base.Transaction.rollback(self)
+
def commit(self):
self.connection.session.commit()
+
def rollback(self):
self.connection.session.rollback()
-
+
class TLEngine(base.Engine):
- """an Engine that includes support for thread-local managed transactions. This engine
- is better suited to be used with threadlocal Pool object."""
+ """An Engine that includes support for thread-local managed transactions.
+
+ This engine is better suited to be used with threadlocal Pool
+ object.
+ """
+
def __init__(self, *args, **kwargs):
- """the TLEngine relies upon the ConnectionProvider having "threadlocal" behavior,
- so that once a connection is checked out for the current thread, you get that same connection
- repeatedly."""
+ """The TLEngine relies upon the ConnectionProvider having
+ "threadlocal" behavior, so that once a connection is checked out
+ for the current thread, you get that same connection
+ repeatedly.
+ """
+
super(TLEngine, self).__init__(*args, **kwargs)
self.context = util.ThreadLocal()
+
def raw_connection(self):
- """returns a DBAPI connection."""
+ """Return a DBAPI connection."""
+
return self.connection_provider.get_connection()
+
def connect(self, **kwargs):
- """returns a Connection that is not thread-locally scoped. this is the equilvalent to calling
- "connect()" on a ComposedSQLEngine."""
+ """Return a Connection that is not thread-locally scoped.
+
+ This is the equivalent to calling ``connect()`` on a
+ ComposedSQLEngine.
+ """
+
return base.Connection(self, self.connection_provider.unique_connection())
def _session(self):
if not hasattr(self.context, 'session'):
self.context.session = TLSession(self)
return self.context.session
+
session = property(_session, doc="returns the current thread's TLSession")
def contextual_connect(self, **kwargs):
- """returns a TLConnection which is thread-locally scoped."""
+ """Return a TLConnection which is thread-locally scoped."""
+
return self.session.get_connection(**kwargs)
-
+
def begin(self):
return self.session.begin()
+
def commit(self):
self.session.commit()
+
def rollback(self):
self.session.rollback()
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index 2345a399b..6a0180d62 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -3,30 +3,40 @@ import cgi
import urllib
from sqlalchemy import exceptions
-"""provides the URL object as well as the make_url parsing function."""
+"""Provide the URL object as well as the make_url parsing function."""
class URL(object):
- """represents the components of a URL used to connect to a database.
-
- This object is suitable to be passed directly to a create_engine() call.
- The fields of the URL are parsed from a string by the module-level make_url() function.
- the string format of the URL is an RFC-1738-style string.
-
+ """Represent the components of a URL used to connect to a database.
+
+ This object is suitable to be passed directly to a ``create_engine()``
+ call. The fields of the URL are parsed from a string by the
+ ``module-level make_url()`` function. the string format of the URL is
+ an RFC-1738-style string.
+
Attributes on URL include:
-
+
drivername
-
+ The name of the database backend.
+
username
-
+ The user name for the connection.
+
password
-
+ His password.
+
host
-
+ The name of the host.
+
port
-
+ The port number.
+
database
-
- query - a dictionary containing key/value pairs representing the URL's query string."""
+ The database.
+
+ query
+ A dictionary containing key/value pairs representing the URL's query string.
+ """
+
def __init__(self, drivername, username=None, password=None, host=None, port=None, database=None, query=None):
self.drivername = drivername
self.username = username
@@ -38,6 +48,7 @@ class URL(object):
self.port = None
self.database= database
self.query = query or {}
+
def __str__(self):
s = self.drivername + "://"
if self.username is not None:
@@ -56,14 +67,21 @@ class URL(object):
keys.sort()
s += '?' + "&".join(["%s=%s" % (k, self.query[k]) for k in keys])
return s
+
def get_module(self):
- """return the SQLAlchemy database module corresponding to this URL's driver name."""
+ """Return the SQLAlchemy database module corresponding to this URL's driver name."""
+
return getattr(__import__('sqlalchemy.databases.%s' % self.drivername).databases, self.drivername)
+
def translate_connect_args(self, names):
- """translate this URL's attributes into a dictionary of connection arguments.
-
- given a list of argument names corresponding to the URL attributes ('host', 'database', 'username', 'password', 'port'),
- will assemble the attribute values of this URL into the dictionary using the given names."""
+ """Translate this URL's attributes into a dictionary of connection arguments.
+
+ Given a list of argument names corresponding to the URL
+ attributes (`host`, `database`, `username`, `password`,
+ `port`), will assemble the attribute values of this URL into
+ the dictionary using the given names.
+ """
+
a = {}
attribute_names = ['host', 'database', 'username', 'password', 'port']
for n in names:
@@ -73,18 +91,19 @@ class URL(object):
if getattr(self, sname, None):
a[n] = getattr(self, sname)
return a
-
def make_url(name_or_url):
- """given a string or unicode instance, produces a new URL instance.
-
- the given string is parsed according to the rfc1738 spec.
- if an existing URL object is passed, just returns the object."""
+ """Given a string or unicode instance, produce a new URL instance.
+
+ The given string is parsed according to the rfc1738 spec. If an
+ existing URL object is passed, just returns the object.
+ """
+
if isinstance(name_or_url, basestring):
return _parse_rfc1738_args(name_or_url)
else:
return name_or_url
-
+
def _parse_rfc1738_args(name):
pattern = re.compile(r'''
(\w+)://
@@ -99,7 +118,7 @@ def _parse_rfc1738_args(name):
(?:/(.*))?
'''
, re.X)
-
+
m = pattern.match(name)
if m is not None:
(name, username, password, host, port, database) = m.group(1, 2, 3, 4, 5, 6)
@@ -124,4 +143,3 @@ def _parse_keyvalue_args(name):
return URL(name, *opts)
else:
return None
-