summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/sqlalchemy/__init__.py29
-rw-r--r--lib/sqlalchemy/cextension/processors.c287
-rw-r--r--lib/sqlalchemy/cextension/resultproxy.c109
-rw-r--r--lib/sqlalchemy/cextension/utils.c48
-rw-r--r--lib/sqlalchemy/connectors/__init__.py2
-rw-r--r--lib/sqlalchemy/connectors/mxodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/mysqldb.py7
-rw-r--r--lib/sqlalchemy/connectors/pyodbc.py2
-rw-r--r--lib/sqlalchemy/connectors/zxJDBC.py2
-rw-r--r--lib/sqlalchemy/databases/__init__.py4
-rw-r--r--lib/sqlalchemy/dialects/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/drizzle/base.py4
-rw-r--r--lib/sqlalchemy/dialects/firebird/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py33
-rw-r--r--lib/sqlalchemy/dialects/firebird/fdb.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py11
-rw-r--r--lib/sqlalchemy/dialects/informix/__init__.py9
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py590
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py69
-rw-r--r--lib/sqlalchemy/dialects/mssql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py110
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py3
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py14
-rw-r--r--lib/sqlalchemy/dialects/mssql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py350
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py7
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py28
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py87
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py46
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/postgres.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/__init__.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py252
-rw-r--r--lib/sqlalchemy/dialects/postgresql/constraints.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py13
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py199
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py52
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pypostgresql.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ranges.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py34
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py8
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py4
-rw-r--r--lib/sqlalchemy/dialects/sybase/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py2
-rw-r--r--lib/sqlalchemy/engine/__init__.py24
-rw-r--r--lib/sqlalchemy/engine/base.py139
-rw-r--r--lib/sqlalchemy/engine/ddl.py193
-rw-r--r--lib/sqlalchemy/engine/default.py165
-rw-r--r--lib/sqlalchemy/engine/interfaces.py140
-rw-r--r--lib/sqlalchemy/engine/reflection.py164
-rw-r--r--lib/sqlalchemy/engine/result.py33
-rw-r--r--lib/sqlalchemy/engine/strategies.py29
-rw-r--r--lib/sqlalchemy/engine/threadlocal.py2
-rw-r--r--lib/sqlalchemy/engine/url.py48
-rw-r--r--lib/sqlalchemy/engine/util.py24
-rw-r--r--lib/sqlalchemy/event.py735
-rw-r--r--lib/sqlalchemy/event/__init__.py10
-rw-r--r--lib/sqlalchemy/event/api.py107
-rw-r--r--lib/sqlalchemy/event/attr.py376
-rw-r--r--lib/sqlalchemy/event/base.py217
-rw-r--r--lib/sqlalchemy/event/legacy.py156
-rw-r--r--lib/sqlalchemy/event/registry.py236
-rw-r--r--lib/sqlalchemy/events.py143
-rw-r--r--lib/sqlalchemy/exc.py28
-rw-r--r--lib/sqlalchemy/ext/__init__.py2
-rw-r--r--lib/sqlalchemy/ext/associationproxy.py8
-rw-r--r--lib/sqlalchemy/ext/automap.py840
-rw-r--r--lib/sqlalchemy/ext/compiler.py4
-rw-r--r--lib/sqlalchemy/ext/declarative/__init__.py60
-rw-r--r--lib/sqlalchemy/ext/declarative/api.py106
-rw-r--r--lib/sqlalchemy/ext/declarative/base.py95
-rw-r--r--lib/sqlalchemy/ext/declarative/clsregistry.py93
-rw-r--r--lib/sqlalchemy/ext/horizontal_shard.py2
-rw-r--r--lib/sqlalchemy/ext/hybrid.py6
-rw-r--r--lib/sqlalchemy/ext/instrumentation.py10
-rw-r--r--lib/sqlalchemy/ext/mutable.py22
-rw-r--r--lib/sqlalchemy/ext/orderinglist.py2
-rw-r--r--lib/sqlalchemy/ext/serializer.py10
-rw-r--r--lib/sqlalchemy/inspection.py13
-rw-r--r--lib/sqlalchemy/interfaces.py2
-rw-r--r--lib/sqlalchemy/log.py10
-rw-r--r--lib/sqlalchemy/orm/__init__.py1639
-rw-r--r--lib/sqlalchemy/orm/attributes.py307
-rw-r--r--lib/sqlalchemy/orm/base.py453
-rw-r--r--lib/sqlalchemy/orm/collections.py150
-rw-r--r--lib/sqlalchemy/orm/dependency.py2
-rw-r--r--lib/sqlalchemy/orm/deprecated_interfaces.py6
-rw-r--r--lib/sqlalchemy/orm/descriptor_props.py216
-rw-r--r--lib/sqlalchemy/orm/dynamic.py22
-rw-r--r--lib/sqlalchemy/orm/evaluator.py2
-rw-r--r--lib/sqlalchemy/orm/events.py311
-rw-r--r--lib/sqlalchemy/orm/exc.py20
-rw-r--r--lib/sqlalchemy/orm/identity.py4
-rw-r--r--lib/sqlalchemy/orm/instrumentation.py85
-rw-r--r--lib/sqlalchemy/orm/interfaces.py452
-rw-r--r--lib/sqlalchemy/orm/loading.py28
-rw-r--r--lib/sqlalchemy/orm/mapper.py582
-rw-r--r--lib/sqlalchemy/orm/path_registry.py261
-rw-r--r--lib/sqlalchemy/orm/persistence.py134
-rw-r--r--lib/sqlalchemy/orm/properties.py1212
-rw-r--r--lib/sqlalchemy/orm/query.py562
-rw-r--r--lib/sqlalchemy/orm/relationships.py1618
-rw-r--r--lib/sqlalchemy/orm/scoping.py4
-rw-r--r--lib/sqlalchemy/orm/session.py152
-rw-r--r--lib/sqlalchemy/orm/state.py129
-rw-r--r--lib/sqlalchemy/orm/strategies.py395
-rw-r--r--lib/sqlalchemy/orm/strategy_options.py924
-rw-r--r--lib/sqlalchemy/orm/sync.py2
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py10
-rw-r--r--lib/sqlalchemy/orm/util.py536
-rw-r--r--lib/sqlalchemy/pool.py269
-rw-r--r--lib/sqlalchemy/processors.py29
-rw-r--r--lib/sqlalchemy/schema.py3682
-rw-r--r--lib/sqlalchemy/sql/__init__.py27
-rw-r--r--lib/sqlalchemy/sql/annotation.py182
-rw-r--r--lib/sqlalchemy/sql/base.py460
-rw-r--r--lib/sqlalchemy/sql/compiler.py714
-rw-r--r--lib/sqlalchemy/sql/ddl.py864
-rw-r--r--lib/sqlalchemy/sql/default_comparator.py278
-rw-r--r--lib/sqlalchemy/sql/dml.py769
-rw-r--r--lib/sqlalchemy/sql/elements.py2880
-rw-r--r--lib/sqlalchemy/sql/expression.py6688
-rw-r--r--lib/sqlalchemy/sql/functions.py296
-rw-r--r--lib/sqlalchemy/sql/operators.py25
-rw-r--r--lib/sqlalchemy/sql/schema.py3273
-rw-r--r--lib/sqlalchemy/sql/selectable.py3001
-rw-r--r--lib/sqlalchemy/sql/sqltypes.py1628
-rw-r--r--lib/sqlalchemy/sql/type_api.py1053
-rw-r--r--lib/sqlalchemy/sql/util.py410
-rw-r--r--lib/sqlalchemy/sql/visitors.py2
-rw-r--r--lib/sqlalchemy/testing/__init__.py7
-rw-r--r--lib/sqlalchemy/testing/assertions.py73
-rw-r--r--lib/sqlalchemy/testing/assertsql.py5
-rw-r--r--lib/sqlalchemy/testing/config.py6
-rw-r--r--lib/sqlalchemy/testing/engines.py18
-rw-r--r--lib/sqlalchemy/testing/entities.py6
-rw-r--r--lib/sqlalchemy/testing/exclusions.py19
-rw-r--r--lib/sqlalchemy/testing/fixtures.py6
-rw-r--r--lib/sqlalchemy/testing/mock.py10
-rw-r--r--lib/sqlalchemy/testing/pickleable.py6
-rw-r--r--lib/sqlalchemy/testing/plugin/noseplugin.py23
-rw-r--r--lib/sqlalchemy/testing/profiling.py47
-rw-r--r--lib/sqlalchemy/testing/requirements.py47
-rw-r--r--lib/sqlalchemy/testing/runner.py5
-rw-r--r--lib/sqlalchemy/testing/schema.py5
-rw-r--r--lib/sqlalchemy/testing/suite/test_insert.py33
-rw-r--r--lib/sqlalchemy/testing/suite/test_reflection.py8
-rw-r--r--lib/sqlalchemy/testing/suite/test_types.py139
-rw-r--r--lib/sqlalchemy/testing/util.py6
-rw-r--r--lib/sqlalchemy/testing/warnings.py6
-rw-r--r--lib/sqlalchemy/types.py2544
-rw-r--r--lib/sqlalchemy/util/__init__.py13
-rw-r--r--lib/sqlalchemy/util/_collections.py34
-rw-r--r--lib/sqlalchemy/util/compat.py29
-rw-r--r--lib/sqlalchemy/util/deprecations.py2
-rw-r--r--lib/sqlalchemy/util/langhelpers.py328
-rw-r--r--lib/sqlalchemy/util/queue.py21
-rw-r--r--lib/sqlalchemy/util/topological.py2
174 files changed, 26288 insertions, 20654 deletions
diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py
index 2c805e607..1d1f8c4aa 100644
--- a/lib/sqlalchemy/__init__.py
+++ b/lib/sqlalchemy/__init__.py
@@ -1,11 +1,9 @@
# sqlalchemy/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-import inspect as _inspect
-import sys
from .sql import (
alias,
@@ -23,6 +21,7 @@ from .sql import (
except_all,
exists,
extract,
+ false,
func,
insert,
intersect,
@@ -40,6 +39,7 @@ from .sql import (
select,
subquery,
text,
+ true,
tuple_,
type_coerce,
union,
@@ -97,7 +97,6 @@ from .schema import (
Column,
ColumnDefault,
Constraint,
- DDL,
DefaultClause,
FetchedValue,
ForeignKey,
@@ -110,19 +109,25 @@ from .schema import (
Table,
ThreadLocalMetaData,
UniqueConstraint,
- )
+ DDL,
+)
-from .inspection import inspect
+from .inspection import inspect
from .engine import create_engine, engine_from_config
+__version__ = '0.9.2'
-__all__ = sorted(name for name, obj in locals().items()
- if not (name.startswith('_') or _inspect.ismodule(obj)))
+def __go(lcls):
+ global __all__
-__version__ = '0.9.0'
+ from . import events
+ from . import util as _sa_util
-del _inspect, sys
+ import inspect as _inspect
+
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
-from . import util as _sa_util
-_sa_util.importlater.resolve_all()
+ _sa_util.dependencies.resolve_all("sqlalchemy")
+__go(locals()) \ No newline at end of file
diff --git a/lib/sqlalchemy/cextension/processors.c b/lib/sqlalchemy/cextension/processors.c
index 4e82ffc6b..d56817763 100644
--- a/lib/sqlalchemy/cextension/processors.c
+++ b/lib/sqlalchemy/cextension/processors.c
@@ -1,7 +1,7 @@
/*
processors.c
-Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,13 +10,15 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
#include <datetime.h>
+#define MODULE_NAME "cprocessors"
+#define MODULE_DOC "Module containing C versions of data processing functions."
+
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
#define PY_SSIZE_T_MIN INT_MIN
#endif
-
static PyObject *
int_to_boolean(PyObject *self, PyObject *arg)
{
@@ -26,7 +28,12 @@ int_to_boolean(PyObject *self, PyObject *arg)
if (arg == Py_None)
Py_RETURN_NONE;
+
+#if PY_MAJOR_VERSION >= 3
+ l = PyLong_AsLong(arg);
+#else
l = PyInt_AsLong(arg);
+#endif
if (l == 0) {
res = Py_False;
} else if (l == 1) {
@@ -65,23 +72,48 @@ to_float(PyObject *self, PyObject *arg)
static PyObject *
str_to_datetime(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int year, month, day, hour, minute, second, microsecond = 0;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse datetime string '%.200s' "
+ "- value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse datetime string '%.200s' "
"- value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -92,15 +124,30 @@ str_to_datetime(PyObject *self, PyObject *arg)
not accept "2000-01-01 00:00:00.". I don't know which is better, but they
should be coherent.
*/
- if (sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
- &hour, &minute, &second, &microsecond) < 6) {
+ numparsed = sscanf(str, "%4u-%2u-%2u %2u:%2u:%2u.%6u", &year, &month, &day,
+ &hour, &minute, &second, &microsecond);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed < 6) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse datetime string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse datetime string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -111,22 +158,47 @@ str_to_datetime(PyObject *self, PyObject *arg)
static PyObject *
str_to_time(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int hour, minute, second, microsecond = 0;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse time string '%.200s' - value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse time string '%.200s' - value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -137,15 +209,30 @@ str_to_time(PyObject *self, PyObject *arg)
not accept "00:00:00.". I don't know which is better, but they should be
coherent.
*/
- if (sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
- &microsecond) < 3) {
+ numparsed = sscanf(str, "%2u:%2u:%2u.%6u", &hour, &minute, &second,
+ &microsecond);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed < 3) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse time string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse time string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -155,34 +242,73 @@ str_to_time(PyObject *self, PyObject *arg)
static PyObject *
str_to_date(PyObject *self, PyObject *arg)
{
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+ PyObject *err_bytes;
+#endif
const char *str;
+ int numparsed;
unsigned int year, month, day;
PyObject *err_repr;
if (arg == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(arg);
+ if (bytes == NULL)
+ str = NULL;
+ else
+ str = PyBytes_AS_STRING(bytes);
+#else
str = PyString_AsString(arg);
+#endif
if (str == NULL) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse date string '%.200s' - value is not a string.",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse date string '%.200s' - value is not a string.",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
- if (sscanf(str, "%4u-%2u-%2u", &year, &month, &day) != 3) {
+ numparsed = sscanf(str, "%4u-%2u-%2u", &year, &month, &day);
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(bytes);
+#endif
+ if (numparsed != 3) {
err_repr = PyObject_Repr(arg);
if (err_repr == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(err_repr);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_ValueError,
+ "Couldn't parse date string: %.200s",
+ PyBytes_AS_STRING(err_bytes));
+ Py_DECREF(err_bytes);
+#else
PyErr_Format(
PyExc_ValueError,
"Couldn't parse date string: %.200s",
PyString_AsString(err_repr));
+#endif
Py_DECREF(err_repr);
return NULL;
}
@@ -219,17 +345,35 @@ UnicodeResultProcessor_init(UnicodeResultProcessor *self, PyObject *args,
PyObject *encoding, *errors = NULL;
static char *kwlist[] = {"encoding", "errors", NULL};
+#if PY_MAJOR_VERSION >= 3
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "U|U:__init__", kwlist,
+ &encoding, &errors))
+ return -1;
+#else
if (!PyArg_ParseTupleAndKeywords(args, kwds, "S|S:__init__", kwlist,
&encoding, &errors))
return -1;
+#endif
+#if PY_MAJOR_VERSION >= 3
+ encoding = PyUnicode_AsASCIIString(encoding);
+#else
Py_INCREF(encoding);
+#endif
self->encoding = encoding;
if (errors) {
+#if PY_MAJOR_VERSION >= 3
+ errors = PyUnicode_AsASCIIString(errors);
+#else
Py_INCREF(errors);
+#endif
} else {
+#if PY_MAJOR_VERSION >= 3
+ errors = PyBytes_FromString("strict");
+#else
errors = PyString_FromString("strict");
+#endif
if (errors == NULL)
return -1;
}
@@ -248,11 +392,58 @@ UnicodeResultProcessor_process(UnicodeResultProcessor *self, PyObject *value)
if (value == Py_None)
Py_RETURN_NONE;
+#if PY_MAJOR_VERSION >= 3
+ if (PyBytes_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyBytes_AS_STRING(self->encoding);
+ errors = PyBytes_AS_STRING(self->errors);
+#else
+ if (PyString_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyString_AS_STRING(self->encoding);
+ errors = PyString_AS_STRING(self->errors);
+#endif
+
+ return PyUnicode_Decode(str, len, encoding, errors);
+}
+
+static PyObject *
+UnicodeResultProcessor_conditional_process(UnicodeResultProcessor *self, PyObject *value)
+{
+ const char *encoding, *errors;
+ char *str;
+ Py_ssize_t len;
+
+ if (value == Py_None)
+ Py_RETURN_NONE;
+
+#if PY_MAJOR_VERSION >= 3
+ if (PyUnicode_Check(value) == 1) {
+ Py_INCREF(value);
+ return value;
+ }
+
+ if (PyBytes_AsStringAndSize(value, &str, &len))
+ return NULL;
+
+ encoding = PyBytes_AS_STRING(self->encoding);
+ errors = PyBytes_AS_STRING(self->errors);
+#else
+
+ if (PyUnicode_Check(value) == 1) {
+ Py_INCREF(value);
+ return value;
+ }
+
if (PyString_AsStringAndSize(value, &str, &len))
return NULL;
+
encoding = PyString_AS_STRING(self->encoding);
errors = PyString_AS_STRING(self->errors);
+#endif
return PyUnicode_Decode(str, len, encoding, errors);
}
@@ -262,18 +453,23 @@ UnicodeResultProcessor_dealloc(UnicodeResultProcessor *self)
{
Py_XDECREF(self->encoding);
Py_XDECREF(self->errors);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject*)self);
+#else
self->ob_type->tp_free((PyObject*)self);
+#endif
}
static PyMethodDef UnicodeResultProcessor_methods[] = {
{"process", (PyCFunction)UnicodeResultProcessor_process, METH_O,
"The value processor itself."},
+ {"conditional_process", (PyCFunction)UnicodeResultProcessor_conditional_process, METH_O,
+ "Conditional version of the value processor."},
{NULL} /* Sentinel */
};
static PyTypeObject UnicodeResultProcessorType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.cprocessors.UnicodeResultProcessor", /* tp_name */
sizeof(UnicodeResultProcessor), /* tp_basicsize */
0, /* tp_itemsize */
@@ -323,7 +519,11 @@ DecimalResultProcessor_init(DecimalResultProcessor *self, PyObject *args,
{
PyObject *type, *format;
+#if PY_MAJOR_VERSION >= 3
+ if (!PyArg_ParseTuple(args, "OU", &type, &format))
+#else
if (!PyArg_ParseTuple(args, "OS", &type, &format))
+#endif
return -1;
Py_INCREF(type);
@@ -343,11 +543,21 @@ DecimalResultProcessor_process(DecimalResultProcessor *self, PyObject *value)
if (value == Py_None)
Py_RETURN_NONE;
+ /* Decimal does not accept float values directly */
+ /* SQLite can also give us an integer here (see [ticket:2432]) */
+ /* XXX: starting with Python 3.1, we could use Decimal.from_float(f),
+ but the result wouldn't be the same */
+
args = PyTuple_Pack(1, value);
if (args == NULL)
return NULL;
+#if PY_MAJOR_VERSION >= 3
+ str = PyUnicode_Format(self->format, args);
+#else
str = PyString_Format(self->format, args);
+#endif
+
Py_DECREF(args);
if (str == NULL)
return NULL;
@@ -362,7 +572,11 @@ DecimalResultProcessor_dealloc(DecimalResultProcessor *self)
{
Py_XDECREF(self->type);
Py_XDECREF(self->format);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject*)self);
+#else
self->ob_type->tp_free((PyObject*)self);
+#endif
}
static PyMethodDef DecimalResultProcessor_methods[] = {
@@ -372,8 +586,7 @@ static PyMethodDef DecimalResultProcessor_methods[] = {
};
static PyTypeObject DecimalResultProcessorType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.DecimalResultProcessor", /* tp_name */
sizeof(DecimalResultProcessor), /* tp_basicsize */
0, /* tp_itemsize */
@@ -413,11 +626,6 @@ static PyTypeObject DecimalResultProcessorType = {
0, /* tp_new */
};
-#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
-#define PyMODINIT_FUNC void
-#endif
-
-
static PyMethodDef module_methods[] = {
{"int_to_boolean", int_to_boolean, METH_O,
"Convert an integer to a boolean."},
@@ -434,23 +642,53 @@ static PyMethodDef module_methods[] = {
{NULL, NULL, 0, NULL} /* Sentinel */
};
+#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
+};
+
+#define INITERROR return NULL
+
+PyMODINIT_FUNC
+PyInit_cprocessors(void)
+
+#else
+
+#define INITERROR return
+
PyMODINIT_FUNC
initcprocessors(void)
+
+#endif
+
{
PyObject *m;
UnicodeResultProcessorType.tp_new = PyType_GenericNew;
if (PyType_Ready(&UnicodeResultProcessorType) < 0)
- return;
+ INITERROR;
DecimalResultProcessorType.tp_new = PyType_GenericNew;
if (PyType_Ready(&DecimalResultProcessorType) < 0)
- return;
+ INITERROR;
- m = Py_InitModule3("cprocessors", module_methods,
- "Module containing C versions of data processing functions.");
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
if (m == NULL)
- return;
+ INITERROR;
PyDateTime_IMPORT;
@@ -461,5 +699,8 @@ initcprocessors(void)
Py_INCREF(&DecimalResultProcessorType);
PyModule_AddObject(m, "DecimalResultProcessor",
(PyObject *)&DecimalResultProcessorType);
-}
+#if PY_MAJOR_VERSION >= 3
+ return m;
+#endif
+}
diff --git a/lib/sqlalchemy/cextension/resultproxy.c b/lib/sqlalchemy/cextension/resultproxy.c
index b70f9c271..218c7b807 100644
--- a/lib/sqlalchemy/cextension/resultproxy.c
+++ b/lib/sqlalchemy/cextension/resultproxy.c
@@ -1,7 +1,7 @@
/*
resultproxy.c
-Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
+Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2010-2011 Gaetan de Menten gdementen@gmail.com
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,6 +9,9 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
+#define MODULE_NAME "cresultproxy"
+#define MODULE_DOC "Module containing C versions of core ResultProxy classes."
+
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
@@ -150,7 +153,11 @@ BaseRowProxy_dealloc(BaseRowProxy *self)
Py_XDECREF(self->row);
Py_XDECREF(self->processors);
Py_XDECREF(self->keymap);
+#if PY_MAJOR_VERSION >= 3
+ Py_TYPE(self)->tp_free((PyObject *)self);
+#else
self->ob_type->tp_free((PyObject *)self);
+#endif
}
static PyObject *
@@ -245,14 +252,21 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
PyObject *processor, *value, *processed_value;
PyObject *row, *record, *result, *indexobject;
PyObject *exc_module, *exception, *cstr_obj;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *bytes;
+#endif
char *cstr_key;
long index;
int key_fallback = 0;
int tuple_check = 0;
+#if PY_MAJOR_VERSION < 3
if (PyInt_CheckExact(key)) {
index = PyInt_AS_LONG(key);
- } else if (PyLong_CheckExact(key)) {
+ }
+#endif
+
+ if (PyLong_CheckExact(key)) {
index = PyLong_AsLong(key);
if ((index == -1) && PyErr_Occurred())
/* -1 can be either the actual value, or an error flag. */
@@ -305,7 +319,21 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
cstr_obj = PyObject_Str(key);
if (cstr_obj == NULL)
return NULL;
+
+/*
+ FIXME: raise encoding error exception (in both versions below)
+ if the key contains non-ascii chars, instead of an
+ InvalidRequestError without any message like in the
+ python version.
+*/
+#if PY_MAJOR_VERSION >= 3
+ bytes = PyUnicode_AsASCIIString(cstr_obj);
+ if (bytes == NULL)
+ return NULL;
+ cstr_key = PyBytes_AS_STRING(bytes);
+#else
cstr_key = PyString_AsString(cstr_obj);
+#endif
if (cstr_key == NULL) {
Py_DECREF(cstr_obj);
return NULL;
@@ -318,7 +346,11 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
return NULL;
}
+#if PY_MAJOR_VERSION >= 3
+ index = PyLong_AsLong(indexobject);
+#else
index = PyInt_AsLong(indexobject);
+#endif
if ((index == -1) && PyErr_Occurred())
/* -1 can be either the actual value, or an error flag. */
return NULL;
@@ -357,13 +389,23 @@ BaseRowProxy_subscript(BaseRowProxy *self, PyObject *key)
static PyObject *
BaseRowProxy_getitem(PyObject *self, Py_ssize_t i)
{
- return BaseRowProxy_subscript((BaseRowProxy*)self, PyInt_FromSsize_t(i));
+ PyObject *index;
+
+#if PY_MAJOR_VERSION >= 3
+ index = PyLong_FromSsize_t(i);
+#else
+ index = PyInt_FromSsize_t(i);
+#endif
+ return BaseRowProxy_subscript((BaseRowProxy*)self, index);
}
static PyObject *
BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
{
PyObject *tmp;
+#if PY_MAJOR_VERSION >= 3
+ PyObject *err_bytes;
+#endif
if (!(tmp = PyObject_GenericGetAttr((PyObject *)self, name))) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
@@ -375,11 +417,23 @@ BaseRowProxy_getattro(BaseRowProxy *self, PyObject *name)
tmp = BaseRowProxy_subscript(self, name);
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) {
+
+#if PY_MAJOR_VERSION >= 3
+ err_bytes = PyUnicode_AsASCIIString(name);
+ if (err_bytes == NULL)
+ return NULL;
+ PyErr_Format(
+ PyExc_AttributeError,
+ "Could not locate column in row for column '%.200s'",
+ PyBytes_AS_STRING(err_bytes)
+ );
+#else
PyErr_Format(
PyExc_AttributeError,
"Could not locate column in row for column '%.200s'",
PyString_AsString(name)
);
+#endif
return NULL;
}
return tmp;
@@ -565,8 +619,7 @@ static PyMappingMethods BaseRowProxy_as_mapping = {
};
static PyTypeObject BaseRowProxyType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
+ PyVarObject_HEAD_INIT(NULL, 0)
"sqlalchemy.cresultproxy.BaseRowProxy", /* tp_name */
sizeof(BaseRowProxy), /* tp_basicsize */
0, /* tp_itemsize */
@@ -606,34 +659,60 @@ static PyTypeObject BaseRowProxyType = {
0 /* tp_new */
};
+static PyMethodDef module_methods[] = {
+ {"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
+ "reconstruct a RowProxy instance from its pickled form."},
+ {NULL, NULL, 0, NULL} /* Sentinel */
+};
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
#define PyMODINIT_FUNC void
#endif
-static PyMethodDef module_methods[] = {
- {"safe_rowproxy_reconstructor", safe_rowproxy_reconstructor, METH_VARARGS,
- "reconstruct a RowProxy instance from its pickled form."},
- {NULL, NULL, 0, NULL} /* Sentinel */
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
};
+#define INITERROR return NULL
+
+PyMODINIT_FUNC
+PyInit_cresultproxy(void)
+
+#else
+
+#define INITERROR return
+
PyMODINIT_FUNC
initcresultproxy(void)
+
+#endif
+
{
PyObject *m;
BaseRowProxyType.tp_new = PyType_GenericNew;
if (PyType_Ready(&BaseRowProxyType) < 0)
- return;
+ INITERROR;
- m = Py_InitModule3("cresultproxy", module_methods,
- "Module containing C versions of core ResultProxy classes.");
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
if (m == NULL)
- return;
+ INITERROR;
Py_INCREF(&BaseRowProxyType);
PyModule_AddObject(m, "BaseRowProxy", (PyObject *)&BaseRowProxyType);
+#if PY_MAJOR_VERSION >= 3
+ return m;
+#endif
}
-
diff --git a/lib/sqlalchemy/cextension/utils.c b/lib/sqlalchemy/cextension/utils.c
index 5928c4103..377ba8a8d 100644
--- a/lib/sqlalchemy/cextension/utils.c
+++ b/lib/sqlalchemy/cextension/utils.c
@@ -1,6 +1,6 @@
/*
utils.c
-Copyright (C) 2012-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+Copyright (C) 2012-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
This module is part of SQLAlchemy and is released under
the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,6 +8,9 @@ the MIT License: http://www.opensource.org/licenses/mit-license.php
#include <Python.h>
+#define MODULE_NAME "cutils"
+#define MODULE_DOC "Module containing C versions of utility functions."
+
/*
Given arguments from the calling form *multiparams, **params,
return a list of bind parameter structures, usually a list of
@@ -172,26 +175,51 @@ distill_params(PyObject *self, PyObject *args)
}
}
-#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
-#define PyMODINIT_FUNC void
-#endif
-
-
static PyMethodDef module_methods[] = {
{"_distill_params", distill_params, METH_VARARGS,
"Distill an execute() parameter structure."},
{NULL, NULL, 0, NULL} /* Sentinel */
};
+#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
+#define PyMODINIT_FUNC void
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ MODULE_NAME,
+ MODULE_DOC,
+ -1,
+ module_methods
+ };
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+PyMODINIT_FUNC
+PyInit_cutils(void)
+#else
PyMODINIT_FUNC
initcutils(void)
+#endif
{
PyObject *m;
- m = Py_InitModule3("cutils", module_methods,
- "Internal utility functions.");
- if (m == NULL)
- return;
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3(MODULE_NAME, module_methods, MODULE_DOC);
+#endif
+#if PY_MAJOR_VERSION >= 3
+ if (m == NULL)
+ return NULL;
+ return m;
+#else
+ if (m == NULL)
+ return;
+#endif
}
diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py
index 26156a403..761024fe7 100644
--- a/lib/sqlalchemy/connectors/__init__.py
+++ b/lib/sqlalchemy/connectors/__init__.py
@@ -1,5 +1,5 @@
# connectors/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py
index ebdcd2758..e5562a25e 100644
--- a/lib/sqlalchemy/connectors/mxodbc.py
+++ b/lib/sqlalchemy/connectors/mxodbc.py
@@ -1,5 +1,5 @@
# connectors/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/mysqldb.py b/lib/sqlalchemy/connectors/mysqldb.py
index 5f4b3e4d3..33e59218b 100644
--- a/lib/sqlalchemy/connectors/mysqldb.py
+++ b/lib/sqlalchemy/connectors/mysqldb.py
@@ -1,3 +1,9 @@
+# connectors/mysqldb.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Define behaviors common to MySQLdb dialects.
Currently includes MySQL and Drizzle.
@@ -56,6 +62,7 @@ class MySQLDBConnector(Connector):
# is overridden when pymysql is used
return __import__('MySQLdb')
+
def do_executemany(self, cursor, statement, parameters, context=None):
rowcount = cursor.executemany(statement, parameters)
if context is not None:
diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py
index 6b4e3036d..284de288a 100644
--- a/lib/sqlalchemy/connectors/pyodbc.py
+++ b/lib/sqlalchemy/connectors/pyodbc.py
@@ -1,5 +1,5 @@
# connectors/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py
index 593449151..e0bbc5734 100644
--- a/lib/sqlalchemy/connectors/zxJDBC.py
+++ b/lib/sqlalchemy/connectors/zxJDBC.py
@@ -1,5 +1,5 @@
# connectors/zxJDBC.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/databases/__init__.py b/lib/sqlalchemy/databases/__init__.py
index e6821b009..915eefa4a 100644
--- a/lib/sqlalchemy/databases/__init__.py
+++ b/lib/sqlalchemy/databases/__init__.py
@@ -1,5 +1,5 @@
# databases/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -15,7 +15,6 @@ from ..dialects.mysql import base as mysql
from ..dialects.drizzle import base as drizzle
from ..dialects.oracle import base as oracle
from ..dialects.firebird import base as firebird
-from ..dialects.informix import base as informix
from ..dialects.mssql import base as mssql
from ..dialects.sybase import base as sybase
@@ -23,7 +22,6 @@ from ..dialects.sybase import base as sybase
__all__ = (
'drizzle',
'firebird',
- 'informix',
'mssql',
'mysql',
'postgresql',
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index 7f5d34707..974d4f787 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -1,5 +1,5 @@
# dialects/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,6 @@
__all__ = (
'drizzle',
'firebird',
-# 'informix',
'mssql',
'mysql',
'oracle',
diff --git a/lib/sqlalchemy/dialects/drizzle/base.py b/lib/sqlalchemy/dialects/drizzle/base.py
index efad13549..b5addb422 100644
--- a/lib/sqlalchemy/dialects/drizzle/base.py
+++ b/lib/sqlalchemy/dialects/drizzle/base.py
@@ -1,5 +1,5 @@
# drizzle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
#
# This module is part of SQLAlchemy and is released under
@@ -417,6 +417,7 @@ class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
pass
+@log.class_logger
class DrizzleDialect(mysql_dialect.MySQLDialect):
"""Details of the Drizzle dialect.
@@ -495,4 +496,3 @@ class DrizzleDialect(mysql_dialect.MySQLDialect):
self._backslash_escapes = False
-log.class_logger(DrizzleDialect)
diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py
index e57457a39..094ac3e83 100644
--- a/lib/sqlalchemy/dialects/firebird/__init__.py
+++ b/lib/sqlalchemy/dialects/firebird/__init__.py
@@ -1,5 +1,5 @@
# firebird/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index dcaa68f4e..21db57b68 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -1,5 +1,5 @@
# firebird/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -359,6 +359,7 @@ class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
+ illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_'])
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
@@ -401,6 +402,8 @@ class FBDialect(default.DefaultDialect):
colspecs = colspecs
ischema_names = ischema_names
+ construct_arguments = []
+
# defaults to dialect ver. 3,
# will be autodetected off upon
# first connect
@@ -474,18 +477,34 @@ class FBDialect(default.DefaultDialect):
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
+ # there are two queries commonly mentioned for this.
+ # this one, using view_blr, is at the Firebird FAQ among other places:
+ # http://www.firebirdfaq.org/faq174/
s = """
- SELECT DISTINCT rdb$relation_name
- FROM rdb$relation_fields
- WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
+
+ # the other query is this one. It's not clear if there's really
+ # any difference between these two. This link:
+ # http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
+ # states them as interchangeable. Some discussion at [ticket:2898]
+ # SELECT DISTINCT rdb$relation_name
+ # FROM rdb$relation_fields
+ # WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
+ # see http://www.firebirdfaq.org/faq174/
s = """
- SELECT distinct rdb$view_name
- FROM rdb$view_relations
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is not null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@@ -700,7 +719,7 @@ class FBDialect(default.DefaultDialect):
ic.rdb$index_name
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
AND rdb$relation_constraints.rdb$constraint_type IS NULL
- ORDER BY index_name, field_name
+ ORDER BY index_name, ic.rdb$field_position
"""
c = connection.execute(qry, [self.denormalize_name(table_name)])
diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py
index 36b424d49..4d94ef0d5 100644
--- a/lib/sqlalchemy/dialects/firebird/fdb.py
+++ b/lib/sqlalchemy/dialects/firebird/fdb.py
@@ -1,5 +1,5 @@
# firebird/fdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index c8d8e986f..b8a83a07b 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -1,5 +1,5 @@
# firebird/kinterbasdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -42,7 +42,7 @@ from re import match
import decimal
-class _FBNumeric_kinterbasdb(sqltypes.Numeric):
+class _kinterbasdb_numeric(object):
def bind_processor(self, dialect):
def process(value):
if isinstance(value, decimal.Decimal):
@@ -51,6 +51,12 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric):
return value
return process
+class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
+ pass
+
+class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
+ pass
+
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
@@ -74,6 +80,7 @@ class FBDialect_kinterbasdb(FBDialect):
FBDialect.colspecs,
{
sqltypes.Numeric: _FBNumeric_kinterbasdb,
+ sqltypes.Float: _FBFloat_kinterbasdb,
}
)
diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py
deleted file mode 100644
index a55277c9f..000000000
--- a/lib/sqlalchemy/dialects/informix/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# informix/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.informix import base, informixdb
-
-base.dialect = informixdb.dialect
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
deleted file mode 100644
index e13ea8819..000000000
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ /dev/null
@@ -1,590 +0,0 @@
-# informix/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-# coding: gbk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-.. dialect:: informix
- :name: Informix
-
-.. note::
-
- The Informix dialect functions on current SQLAlchemy versions
- but is not regularly tested, and may have many issues and
- caveats not currently handled.
-
-"""
-
-
-import datetime
-
-from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler, text
-from sqlalchemy.engine import default, reflection
-from sqlalchemy import types as sqltypes
-from functools import reduce
-
-RESERVED_WORDS = set(
- ["abs", "absolute", "access", "access_method", "acos", "active", "add",
- "address", "add_months", "admin", "after", "aggregate", "alignment",
- "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append",
- "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
- "attributes", "audit", "authentication", "authid", "authorization",
- "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
- "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
- "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
- "avoid_star_join", "avoid_subqf", "based", "before", "begin",
- "between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
- "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
- "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
- "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
- "character", "character_length", "char_length", "check", "class",
- "class_origin", "client", "clob", "clobdir", "close", "cluster",
- "clustersize", "cobol", "codeset", "collation", "collection",
- "column", "columns", "commit", "committed", "commutator", "component",
- "components", "concat", "concurrent", "connect", "connection",
- "connection_name", "connect_by_iscycle", "connect_by_isleaf",
- "connect_by_rootconst", "constraint", "constraints", "constructor",
- "context", "continue", "copy", "cos", "costfunc", "count", "crcols",
- "create", "cross", "current", "current_role", "currval", "cursor",
- "cycle", "database", "datafiles", "dataskip", "date", "datetime",
- "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
- "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
- "decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
- "dec_t", "default", "default_role", "deferred", "deferred_prepare",
- "define", "delay", "delete", "deleting", "delimited", "delimiter",
- "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
- "directives", "dirty", "disable", "disabled", "disconnect", "disk",
- "distinct", "distributebinary", "distributesreferences",
- "distributions", "document", "domain", "donotdistribute", "dormant",
- "double", "drop", "dtime_t", "each", "elif", "else", "enabled",
- "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
- "environment", "error", "escape", "exception", "exclusive", "exec",
- "execute", "executeanywhere", "exemption", "exists", "exit", "exp",
- "explain", "explicit", "express", "expression", "extdirectives",
- "extend", "extent", "external", "fact", "false", "far", "fetch",
- "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
- "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
- "force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
- "format_units", "fortran", "found", "fraction", "fragment",
- "fragments", "free", "from", "full", "function", "general", "get",
- "gethint", "global", "go", "goto", "grant", "greaterthan",
- "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
- "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
- "idslbacreadset", "idslbacreadtree", "idslbacrules",
- "idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
- "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
- "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
- "implicit", "implicit_pdq", "in", "inactive", "increment", "index",
- "indexes", "index_all", "index_sj", "indicator", "informix", "init",
- "initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
- "int", "int8", "integ", "integer", "internal", "internallength",
- "interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
- "item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
- "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
- "labeltostring", "language", "last", "last_day", "leading", "left",
- "length", "lessthan", "lessthanorequal", "let", "level", "like",
- "limit", "list", "listing", "load", "local", "locator", "lock", "locks",
- "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
- "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
- "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
- "memory", "memory_resident", "merge", "message_length", "message_text",
- "middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
- "modify", "module", "money", "month", "months_between", "mounting",
- "multiset", "multi_index", "name", "nchar", "negator", "new", "next",
- "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
- "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
- "normal", "not", "notemplatearg", "notequal", "null", "nullif",
- "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
- "nvl", "octet_length", "of", "off", "old", "on", "online", "only",
- "opaque", "opclass", "open", "optcompind", "optical", "optimization",
- "option", "or", "order", "ordered", "out", "outer", "output",
- "override", "page", "parallelizable", "parameter", "partition",
- "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
- "pipe", "pli", "pload", "policy", "pow", "power", "precision",
- "prepare", "previous", "primary", "prior", "private", "privileges",
- "procedure", "properties", "public", "put", "raise", "range", "raw",
- "read", "real", "recordend", "references", "referencing", "register",
- "rejectfile", "relative", "release", "remainder", "rename",
- "reoptimization", "repeatable", "replace", "replication", "reserve",
- "resolution", "resource", "restart", "restrict", "resume", "retain",
- "retainupdatelocks", "return", "returned_sqlstate", "returning",
- "returns", "reuse", "revoke", "right", "robin", "role", "rollback",
- "rollforward", "root", "round", "routine", "row", "rowid", "rowids",
- "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
- "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
- "seclabel_by_name", "seclabel_to_char", "second", "secondary",
- "section", "secured", "security", "selconst", "select", "selecting",
- "selfunc", "selfuncargs", "sequence", "serial", "serial8",
- "serializable", "serveruuid", "server_name", "session", "set",
- "setsessionauth", "share", "short", "siblings", "signed", "sin",
- "sitename", "size", "skall", "skinhibit", "skip", "skshow",
- "smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
- "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
- "stability", "stack", "standard", "start", "star_join", "statchange",
- "statement", "static", "statistics", "statlevel", "status", "stdev",
- "step", "stop", "storage", "store", "strategies", "string",
- "stringtolabel", "struct", "style", "subclass_origin", "substr",
- "substring", "sum", "support", "sync", "synonym", "sysdate",
- "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
- "tables", "tan", "task", "temp", "template", "test", "text", "then",
- "time", "timeout", "to", "today", "to_char", "to_date",
- "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
- "transaction", "transition", "tree", "trigger", "triggers", "trim",
- "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
- "typename", "typeof", "uid", "uncommitted", "under", "union",
- "unique", "units", "unknown", "unload", "unlock", "unsigned",
- "update", "updating", "upon", "upper", "usage", "use",
- "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
- "using", "value", "values", "var", "varchar", "variable", "variance",
- "variant", "varying", "vercols", "view", "violations", "void",
- "volatile", "wait", "warning", "weekday", "when", "whenever", "where",
- "while", "with", "without", "work", "write", "writedown", "writeup",
- "xadatasource", "xid", "xload", "xunload", "year"
- ])
-
-
-class InfoDateTime(sqltypes.DateTime):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
-
-class InfoTime(sqltypes.Time):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
- else:
- return value
- return process
-
-colspecs = {
- sqltypes.DateTime: InfoDateTime,
- sqltypes.TIMESTAMP: InfoDateTime,
- sqltypes.Time: InfoTime,
-}
-
-
-ischema_names = {
- 0: sqltypes.CHAR, # CHAR
- 1: sqltypes.SMALLINT, # SMALLINT
- 2: sqltypes.INTEGER, # INT
- 3: sqltypes.FLOAT, # Float
- 3: sqltypes.Float, # SmallFloat
- 5: sqltypes.DECIMAL, # DECIMAL
- 6: sqltypes.Integer, # Serial
- 7: sqltypes.DATE, # DATE
- 8: sqltypes.Numeric, # MONEY
- 10: sqltypes.DATETIME, # DATETIME
- 11: sqltypes.LargeBinary, # BYTE
- 12: sqltypes.TEXT, # TEXT
- 13: sqltypes.VARCHAR, # VARCHAR
- 15: sqltypes.NCHAR, # NCHAR
- 16: sqltypes.NVARCHAR, # NVARCHAR
- 17: sqltypes.Integer, # INT8
- 18: sqltypes.Integer, # Serial8
- 43: sqltypes.String, # LVARCHAR
- -1: sqltypes.BLOB, # BLOB
- -1: sqltypes.CLOB, # CLOB
-}
-
-
-class InfoTypeCompiler(compiler.GenericTypeCompiler):
- def visit_DATETIME(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_TIME(self, type_):
- return "DATETIME HOUR TO SECOND"
-
- def visit_TIMESTAMP(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_large_binary(self, type_):
- return "BYTE"
-
- def visit_boolean(self, type_):
- return "SMALLINT"
-
-
-class InfoSQLCompiler(compiler.SQLCompiler):
-
- def default_from(self):
- return " from systables where tabname = 'systables' "
-
- def get_select_precolumns(self, select):
- s = ""
- if select._offset:
- s += "SKIP %s " % select._offset
- if select._limit:
- s += "FIRST %s " % select._limit
- s += select._distinct and "DISTINCT " or ""
- return s
-
- def visit_select(self, select, asfrom=False, parens=True, **kw):
- text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
- if asfrom and parens and self.dialect.server_version_info < (11,):
- #assuming that 11 version doesn't need this, not tested
- return "table(multiset" + text + ")"
- else:
- return text
-
- def limit_clause(self, select):
- return ""
-
- def visit_function(self, func, **kw):
- if func.name.lower() == 'current_date':
- return "today"
- elif func.name.lower() == 'current_time':
- return "CURRENT HOUR TO SECOND"
- elif func.name.lower() in ('current_timestamp', 'now'):
- return "CURRENT YEAR TO SECOND"
- else:
- return compiler.SQLCompiler.visit_function(self, func, **kw)
-
- def visit_mod_binary(self, binary, operator, **kw):
- return "MOD(%s, %s)" % (self.process(binary.left, **kw),
- self.process(binary.right, **kw))
-
-
-class InfoDDLCompiler(compiler.DDLCompiler):
-
- def visit_add_constraint(self, create):
- preparer = self.preparer
- return "ALTER TABLE %s ADD CONSTRAINT %s" % (
- self.preparer.format_table(create.element.table),
- self.process(create.element)
- )
-
- def get_column_specification(self, column, **kw):
- colspec = self.preparer.format_column(column)
- first = None
- if column.primary_key and column.autoincrement:
- try:
- first = [c for c in column.table.primary_key.columns
- if (c.autoincrement and
- isinstance(c.type, sqltypes.Integer) and
- not c.foreign_keys)].pop(0)
- except IndexError:
- pass
-
- if column is first:
- colspec += " SERIAL"
- else:
- colspec += " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- return colspec
-
- def get_column_default_string(self, column):
- if (isinstance(column.server_default, schema.DefaultClause) and
- isinstance(column.server_default.arg, util.string_types)):
- if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
- return self.sql_compiler.process(text(column.server_default.arg))
-
- return super(InfoDDLCompiler, self).get_column_default_string(column)
-
- ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
- def visit_primary_key_constraint(self, constraint):
- if len(constraint) == 0:
- return ''
- text = "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
- return text
-
- def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
- remote_table = list(constraint._elements.values())[0].column.table
- text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
- for f in constraint._elements.values()),
- preparer.format_table(remote_table),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
- for f in constraint._elements.values())
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s " % \
- preparer.format_constraint(constraint)
- return text
-
- def visit_unique_constraint(self, constraint):
- text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
- return text
-
-
-class InformixIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
-
-class InformixDialect(default.DefaultDialect):
- name = 'informix'
-
- max_identifier_length = 128 # adjusts at runtime based on server version
-
- type_compiler = InfoTypeCompiler
- statement_compiler = InfoSQLCompiler
- ddl_compiler = InfoDDLCompiler
- colspecs = colspecs
- ischema_names = ischema_names
- preparer = InformixIdentifierPreparer
- default_paramstyle = 'qmark'
-
- def initialize(self, connection):
- super(InformixDialect, self).initialize(connection)
-
- # http://www.querix.com/support/knowledge-base/error_number_message/error_200
- if self.server_version_info < (9, 2):
- self.max_identifier_length = 18
- else:
- self.max_identifier_length = 128
-
- def _get_table_names(self, connection, schema, type, **kw):
- schema = schema or self.default_schema_name
- s = "select tabname, owner from systables where owner=? and tabtype=?"
- return [row[0] for row in connection.execute(s, schema, type)]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'T', **kw)
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'V', **kw)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = "select owner from systables"
- return [row[0] for row in connection.execute(s)]
-
- def has_table(self, connection, table_name, schema=None):
- schema = schema or self.default_schema_name
- cursor = connection.execute(
- """select tabname from systables where tabname=? and owner=?""",
- table_name, schema)
- return cursor.first() is not None
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select colname, coltype, collength, t3.default, t1.colno from
- syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
- and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table_name, schema)
-
- pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw)
- primary_cols = pk_constraint['constrained_columns']
-
- columns = []
- rows = c.fetchall()
- for name, colattr, collength, default, colno in rows:
- name = name.lower()
-
- autoincrement = False
- primary_key = False
-
- if name in primary_cols:
- primary_key = True
-
- # in 7.31, coltype = 0x000
- # ^^-- column type
- # ^-- 1 not null, 0 null
- not_nullable, coltype = divmod(colattr, 256)
- if coltype not in (0, 13) and default:
- default = default.split()[-1]
-
- if coltype == 6: # Serial, mark as autoincrement
- autoincrement = True
-
- if coltype == 0 or coltype == 13: # char, varchar
- coltype = ischema_names[coltype](collength)
- if default:
- default = "'%s'" % default
- elif coltype == 5: # decimal
- precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
- if scale == 255:
- scale = 0
- coltype = sqltypes.Numeric(precision, scale)
- else:
- try:
- coltype = ischema_names[coltype]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, name))
- coltype = sqltypes.NULLTYPE
-
- column_info = dict(name=name, type=coltype, nullable=not not_nullable,
- default=default, autoincrement=autoincrement,
- primary_key=primary_key)
- columns.append(column_info)
- return columns
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- schema_sel = schema or self.default_schema_name
- c = connection.execute(
- """select t1.constrname as cons_name,
- t4.colname as local_column, t7.tabname as remote_table,
- t6.colname as remote_column, t7.owner as remote_owner
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4 ,
- sysreferences as t5 , syscolumns as t6 , systables as t7 ,
- sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
- t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
- t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
- and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
- t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
- t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
- t8.idxname
- and t7.tabid = t5.ptabid""", table_name, schema_sel)
-
- def fkey_rec():
- return {
- 'name': None,
- 'constrained_columns': [],
- 'referred_schema': None,
- 'referred_table': None,
- 'referred_columns': []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- rows = c.fetchall()
- for cons_name, local_column, \
- remote_table, remote_column, remote_owner in rows:
-
- rec = fkeys[cons_name]
- rec['name'] = cons_name
- local_cols, remote_cols = \
- rec['constrained_columns'], rec['referred_columns']
-
- if not rec['referred_table']:
- rec['referred_table'] = remote_table
- if schema is not None:
- rec['referred_schema'] = remote_owner
-
- if local_column not in local_cols:
- local_cols.append(local_column)
- if remote_column not in remote_cols:
- remote_cols.append(remote_column)
-
- return list(fkeys.values())
-
- @reflection.cache
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
-
- # Select the column positions from sysindexes for sysconstraints
- data = connection.execute(
- """select t2.*
- from systables as t1, sysindexes as t2, sysconstraints as t3
- where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
- and t2.idxname=t3.idxname and t3.constrtype='P'""",
- table_name, schema
- ).fetchall()
-
- colpositions = set()
-
- for row in data:
- colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
- colpositions |= colpos
-
- if not len(colpositions):
- return {'constrained_columns': [], 'name': None}
-
- # Select the column names using the columnpositions
- # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
- place_holder = ','.join('?' * len(colpositions))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colpositions
- ).fetchall()
-
- cols = reduce(lambda x, y: list(x) + list(y), c, [])
- return {'constrained_columns': cols, 'name': None}
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema, **kw):
- # TODO: schema...
- c = connection.execute(
- """select t1.*
- from sysindexes as t1 , systables as t2
- where t1.tabid = t2.tabid and t2.tabname=?""",
- table_name)
-
- indexes = []
- for row in c.fetchall():
- colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
- colnames = [x for x in colnames if x]
- place_holder = ','.join('?' * len(colnames))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colnames
- ).fetchall()
- c = reduce(lambda x, y: list(x) + list(y), c, [])
- indexes.append({
- 'name': row.idxname,
- 'unique': row.idxtype.lower() == 'u',
- 'column_names': c
- })
- return indexes
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select t1.viewtext
- from sysviews as t1 , systables as t2
- where t1.tabid=t2.tabid and t2.tabname=?
- and t2.owner=? order by seqno""",
- view_name, schema).fetchall()
-
- return ''.join([row[0] for row in c])
-
- def _get_default_schema_name(self, connection):
- return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
deleted file mode 100644
index f2f0d3e80..000000000
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# informix/informixdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-
-.. dialect:: informix+informixdb
- :name: informixdb
- :dbapi: informixdb
- :connectstring: informix+informixdb://user:password@host/dbname
- :url: http://informixdb.sourceforge.net/
-
-"""
-
-import re
-
-from sqlalchemy.dialects.informix.base import InformixDialect
-from sqlalchemy.engine import default
-
-VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
-
-
-class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
-
- def post_exec(self):
- if self.isinsert:
- self._lastrowid = self.cursor.sqlerrd[1]
-
- def get_lastrowid(self):
- return self._lastrowid
-
-
-class InformixDialect_informixdb(InformixDialect):
- driver = 'informixdb'
- execution_ctx_cls = InformixExecutionContext_informixdb
-
- @classmethod
- def dbapi(cls):
- return __import__('informixdb')
-
- def create_connect_args(self, url):
- if url.host:
- dsn = '%s@%s' % (url.database, url.host)
- else:
- dsn = url.database
-
- if url.username:
- opt = {'user': url.username, 'password': url.password}
- else:
- opt = {}
-
- return ([dsn], opt)
-
- def _get_server_version_info(self, connection):
- # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- v = VERSION_RE.split(connection.connection.dbms_version)
- return (int(v[1]), int(v[2]), v[3])
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- return 'closed the connection' in str(e) \
- or 'connection not open' in str(e)
- else:
- return False
-
-
-dialect = InformixDialect_informixdb
diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py
index 0b81d6df9..7a2dfa60b 100644
--- a/lib/sqlalchemy/dialects/mssql/__init__.py
+++ b/lib/sqlalchemy/dialects/mssql/__init__.py
@@ -1,5 +1,5 @@
# mssql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index 167b4e807..95cf42423 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -1,5 +1,5 @@
# mssql/adodbapi.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 7621f4aab..522cb5ce3 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1,5 +1,5 @@
# mssql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -103,22 +103,49 @@ for these types will be issued as DATETIME.
.. _mssql_indexes:
-MSSQL-Specific Index Options
------------------------------
-
-The MSSQL dialect supports special options for :class:`.Index`.
+Clustered Index Support
+-----------------------
-CLUSTERED
-^^^^^^^^^^
+The MSSQL dialect supports clustered indexes (and primary keys) via the
+``mssql_clustered`` option. This option is available to :class:`.Index`,
+:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`.
-The ``mssql_clustered`` option adds the CLUSTERED keyword to the index::
+To generate a clustered index::
Index("my_index", table.c.x, mssql_clustered=True)
-would render the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``
+which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``.
.. versionadded:: 0.8
+To generate a clustered primary key use::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x", "y", mssql_clustered=True))
+
+which will render the table, for example, as::
+
+ CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y))
+
+Similarly, we can generate a clustered unique constraint using::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x"),
+ UniqueConstraint("y", mssql_clustered=True),
+ )
+
+ .. versionadded:: 0.9.2
+
+MSSQL-Specific Index Options
+-----------------------------
+
+In addition to clustering, the MSSQL dialect supports other special options
+for :class:`.Index`.
+
INCLUDE
^^^^^^^
@@ -991,7 +1018,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
# handle clustering option
- if index.kwargs.get("mssql_clustered"):
+ if index.dialect_options['mssql']['clustered']:
text += "CLUSTERED "
text += "INDEX %s ON %s (%s)" \
@@ -1001,29 +1028,61 @@ class MSDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table),
', '.join(
self.sql_compiler.process(expr,
- include_table=False) for
+ include_table=False, literal_binds=True) for
expr in index.expressions)
)
# handle other included columns
- if index.kwargs.get("mssql_include"):
+ if index.dialect_options['mssql']['include']:
inclusions = [index.table.c[col]
if isinstance(col, util.string_types) else col
- for col in index.kwargs["mssql_include"]]
+ for col in index.dialect_options['mssql']['include']]
text += " INCLUDE (%s)" \
- % ', '.join([preparer.quote(c.name, c.quote)
+ % ', '.join([preparer.quote(c.name)
for c in inclusions])
return text
def visit_drop_index(self, drop):
- return "\nDROP INDEX %s.%s" % (
- self.preparer.quote_identifier(drop.element.table.name),
- self._prepared_index_name(drop.element,
- include_schema=True)
+ return "\nDROP INDEX %s ON %s" % (
+ self._prepared_index_name(drop.element, include_schema=False),
+ self.preparer.format_table(drop.element.table)
)
+ def visit_primary_key_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "PRIMARY KEY "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "UNIQUE "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
@@ -1035,7 +1094,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
def _escape_identifier(self, value):
return value
- def quote_schema(self, schema, force=True):
+ def quote_schema(self, schema, force=None):
"""Prepare a quoted table and schema name."""
result = '.'.join([self.quote(x, force) for x in schema.split('.')])
return result
@@ -1105,6 +1164,19 @@ class MSDialect(default.DefaultDialect):
type_compiler = MSTypeCompiler
preparer = MSIdentifierPreparer
+ construct_arguments = [
+ (sa_schema.PrimaryKeyConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.UniqueConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.Index, {
+ "clustered": False,
+ "include": None
+ })
+ ]
+
def __init__(self,
query_timeout=None,
use_scope_identity=True,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 7ac1b703e..26e70f7f0 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,5 +1,5 @@
# mssql/information_schema.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ddf31100c..5b686c47a 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -1,5 +1,5 @@
# mssql/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b916612fb..021219cb9 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -1,5 +1,5 @@
# mssql/pymssql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -86,6 +86,7 @@ class MSDialect_pymssql(MSDialect):
def is_disconnect(self, e, connection, cursor):
for msg in (
"Adaptive Server connection timed out",
+ "Net-Lib error during Connection reset by peer",
"message 20003", # connection timeout
"Error 10054",
"Not connected to any MS SQL server",
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 5a359d179..8c43eb8a1 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -1,5 +1,5 @@
# mssql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -116,8 +116,8 @@ from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
+class _ms_numeric_pyodbc(object):
-class _MSNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
The routines here are needed for older pyodbc versions
@@ -127,7 +127,7 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
def bind_processor(self, dialect):
- super_process = super(_MSNumeric_pyodbc, self).\
+ super_process = super(_ms_numeric_pyodbc, self).\
bind_processor(dialect)
if not dialect._need_decimal_fix:
@@ -180,6 +180,11 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
[str(s) for s in _int][0:value.adjusted() + 1]))
return result
+class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
+ pass
+
+class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
+ pass
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
@@ -238,7 +243,8 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
colspecs = util.update_copy(
MSDialect.colspecs,
{
- sqltypes.Numeric: _MSNumeric_pyodbc
+ sqltypes.Numeric: _MSNumeric_pyodbc,
+ sqltypes.Float: _MSFloat_pyodbc
}
)
diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
index 9bf31e20c..706eef3a4 100644
--- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
@@ -1,5 +1,5 @@
# mssql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index 2bb636ff3..4eb8cc6d2 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -1,5 +1,5 @@
# mysql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index d5e33c802..e45f6ecd8 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1,5 +1,5 @@
# mysql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -20,6 +20,8 @@ example, they won't work in SQLAlchemy either.
See the official MySQL documentation for detailed information about features
supported in any given server release.
+.. _mysql_connection_timeouts:
+
Connection Timeouts
-------------------
@@ -263,6 +265,41 @@ http://dev.mysql.com/doc/refman/5.0/en/create-index.html
http://dev.mysql.com/doc/refman/5.0/en/create-table.html
+.. _mysql_foreign_keys:
+
+MySQL Foreign Key Options
+-------------------------
+
+MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY",
+or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with
+:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being
+rendered in a DDL expression, which will then raise an error on MySQL.
+In order to use these keywords on a foreign key while having them ignored
+on a MySQL backend, use a custom compile rule::
+
+ from sqlalchemy.ext.compiler import compiles
+ from sqlalchemy.schema import ForeignKeyConstraint
+
+ @compiles(ForeignKeyConstraint, "mysql")
+ def process(element, compiler, **kw):
+ element.deferrable = element.initially = None
+ return compiler.visit_foreign_key_constraint(element, **kw)
+
+.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores
+ the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint`
+ and :class:`.ForeignKey`.
+
+The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
+by SQLAlchemy in conjunction with the MySQL backend. This argument is silently
+ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options
+also being ignored by the backend. Therefore MATCH should never be used with the
+MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation
+rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time.
+
+.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError`
+ when the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
+ or :class:`.ForeignKey`.
+
"""
import datetime
@@ -318,10 +355,20 @@ RESERVED_WORDS = set(
'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use',
'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary',
'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with',
+
'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0
+
'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1
+
'accessible', 'linear', 'master_ssl_verify_server_cert', 'range',
'read_only', 'read_write', # 5.1
+
+ 'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue',
+ 'resignal', 'signal', 'slow', # 5.5
+
+ 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot',
+ 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6
+
])
AUTOCOMMIT_RE = re.compile(
@@ -333,13 +380,21 @@ SET_RE = re.compile(
class _NumericType(object):
- """Base for MySQL numeric types."""
+ """Base for MySQL numeric types.
+
+ This is the base both for NUMERIC as well as INTEGER, hence
+ it's a mixin.
+
+ """
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_NumericType, sqltypes.Numeric])
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
@@ -351,22 +406,27 @@ class _FloatType(_NumericType, sqltypes.Float):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
-
super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_FloatType, _NumericType, sqltypes.Float])
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super(_IntegerType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_IntegerType, _NumericType, sqltypes.Integer])
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(self, charset=None, collation=None,
- ascii=False, binary=False,
+ ascii=False, binary=False, unicode=False,
national=False, **kw):
self.charset = charset
@@ -374,16 +434,14 @@ class _StringType(sqltypes.String):
kw.setdefault('collation', kw.pop('collate', collation))
self.ascii = ascii
- # We have to munge the 'unicode' param strictly as a dict
- # otherwise 2to3 will turn it into str.
- self.__dict__['unicode'] = kw.get('unicode', False)
- # sqltypes.String does not accept the 'unicode' arg at all.
- if 'unicode' in kw:
- del kw['unicode']
+ self.unicode = unicode
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_StringType, sqltypes.String])
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
@@ -443,6 +501,14 @@ class DOUBLE(_FloatType):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
+ .. note::
+
+ The :class:`.DOUBLE` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -468,6 +534,14 @@ class REAL(_FloatType, sqltypes.REAL):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
+ .. note::
+
+ The :class:`.REAL` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -902,6 +976,25 @@ class CHAR(_StringType, sqltypes.CHAR):
"""
super(CHAR, self).__init__(length=length, **kwargs)
+ @classmethod
+ def _adapt_string_for_cast(self, type_):
+ # copy the given string type into a CHAR
+ # for the purposes of rendering a CAST expression
+ type_ = sqltypes.to_instance(type_)
+ if isinstance(type_, sqltypes.CHAR):
+ return type_
+ elif isinstance(type_, _StringType):
+ return CHAR(
+ length=type_.length,
+ charset=type_.charset,
+ collation=type_.collation,
+ ascii=type_.ascii,
+ binary=type_.binary,
+ unicode=type_.unicode,
+ national=False # not supported in CAST
+ )
+ else:
+ return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
@@ -972,8 +1065,49 @@ class LONGBLOB(sqltypes._Binary):
__visit_name__ = 'LONGBLOB'
+class _EnumeratedValues(_StringType):
+ def _init_values(self, values, kw):
+ self.quoting = kw.pop('quoting', 'auto')
-class ENUM(sqltypes.Enum, _StringType):
+ if self.quoting == 'auto' and len(values):
+ # What quoting character are we using?
+ q = None
+ for e in values:
+ if len(e) == 0:
+ self.quoting = 'unquoted'
+ break
+ elif q is None:
+ q = e[0]
+
+ if len(e) == 1 or e[0] != q or e[-1] != q:
+ self.quoting = 'unquoted'
+ break
+ else:
+ self.quoting = 'quoted'
+
+ if self.quoting == 'quoted':
+ util.warn_deprecated(
+ 'Manually quoting %s value literals is deprecated. Supply '
+ 'unquoted values and use the quoting= option in cases of '
+ 'ambiguity.' % self.__class__.__name__)
+
+ values = self._strip_values(values)
+
+ self._enumerated_values = values
+ length = max([len(v) for v in values] + [0])
+ return values, length
+
+ @classmethod
+ def _strip_values(cls, values):
+ strip_values = []
+ for a in values:
+ if a[0:1] == '"' or a[0:1] == "'":
+ # strip enclosing quotes and unquote interior
+ a = a[1:-1].replace(a[0] * 2, a[0])
+ strip_values.append(a)
+ return strip_values
+
+class ENUM(sqltypes.Enum, _EnumeratedValues):
"""MySQL ENUM type."""
__visit_name__ = 'ENUM'
@@ -981,9 +1115,9 @@ class ENUM(sqltypes.Enum, _StringType):
def __init__(self, *enums, **kw):
"""Construct an ENUM.
- Example:
+ E.g.::
- Column('myenum', MSEnum("foo", "bar", "baz"))
+ Column('myenum', ENUM("foo", "bar", "baz"))
:param enums: The range of valid values for this ENUM. Values will be
quoted when generating the schema according to the quoting flag (see
@@ -1027,33 +1161,8 @@ class ENUM(sqltypes.Enum, _StringType):
literals for you. This is a transitional option.
"""
- self.quoting = kw.pop('quoting', 'auto')
-
- if self.quoting == 'auto' and len(enums):
- # What quoting character are we using?
- q = None
- for e in enums:
- if len(e) == 0:
- self.quoting = 'unquoted'
- break
- elif q is None:
- q = e[0]
-
- if e[0] != q or e[-1] != q:
- self.quoting = 'unquoted'
- break
- else:
- self.quoting = 'quoted'
-
- if self.quoting == 'quoted':
- util.warn_deprecated(
- 'Manually quoting ENUM value literals is deprecated. Supply '
- 'unquoted values and use the quoting= option in cases of '
- 'ambiguity.')
- enums = self._strip_enums(enums)
-
+ values, length = self._init_values(enums, kw)
self.strict = kw.pop('strict', False)
- length = max([len(v) for v in enums] + [0])
kw.pop('metadata', None)
kw.pop('schema', None)
kw.pop('name', None)
@@ -1061,17 +1170,11 @@ class ENUM(sqltypes.Enum, _StringType):
kw.pop('native_enum', None)
kw.pop('inherit_schema', None)
_StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *enums)
+ sqltypes.Enum.__init__(self, *values)
- @classmethod
- def _strip_enums(cls, enums):
- strip_enums = []
- for a in enums:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_enums.append(a)
- return strip_enums
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[ENUM, _StringType, sqltypes.Enum])
def bind_processor(self, dialect):
super_convert = super(ENUM, self).bind_processor(dialect)
@@ -1091,7 +1194,7 @@ class ENUM(sqltypes.Enum, _StringType):
return sqltypes.Enum.adapt(self, impltype, **kw)
-class SET(_StringType):
+class SET(_EnumeratedValues):
"""MySQL SET type."""
__visit_name__ = 'SET'
@@ -1099,15 +1202,16 @@ class SET(_StringType):
def __init__(self, *values, **kw):
"""Construct a SET.
- Example::
+ E.g.::
- Column('myset', MSSet("'foo'", "'bar'", "'baz'"))
+ Column('myset', SET("foo", "bar", "baz"))
:param values: The range of valid values for this SET. Values will be
- used exactly as they appear when generating schemas. Strings must
- be quoted, as in the example above. Single-quotes are suggested for
- ANSI compatibility and are required for portability to servers with
- ANSI_QUOTES enabled.
+ quoted when generating the schema according to the quoting flag (see
+ below).
+
+ .. versionchanged:: 0.9.0 quoting is applied automatically to
+ :class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
@@ -1126,18 +1230,27 @@ class SET(_StringType):
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
- """
- self._ddl_values = values
+ :param quoting: Defaults to 'auto': automatically determine enum value
+ quoting. If all enum values are surrounded by the same quoting
+ character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
- strip_values = []
- for a in values:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_values.append(a)
+ 'quoted': values in enums are already quoted, they will be used
+ directly when generating the schema - this usage is deprecated.
+
+ 'unquoted': values in enums are not quoted, they will be escaped and
+ surrounded by single quotes when generating the schema.
+
+ Previous versions of this type always required manually quoted
+ values to be supplied; future versions will always quote the string
+ literals for you. This is a transitional option.
+
+ .. versionadded:: 0.9.0
+
+ """
+ values, length = self._init_values(values, kw)
+ self.values = tuple(values)
- self.values = strip_values
- kw.setdefault('length', max([len(v) for v in strip_values] + [0]))
+ kw.setdefault('length', length)
super(SET, self).__init__(**kw)
def result_processor(self, dialect, coltype):
@@ -1209,6 +1322,9 @@ MSFloat = FLOAT
MSInteger = INTEGER
colspecs = {
+ _IntegerType: _IntegerType,
+ _NumericType: _NumericType,
+ _FloatType: _FloatType,
sqltypes.Numeric: NUMERIC,
sqltypes.Float: FLOAT,
sqltypes.Time: TIME,
@@ -1300,14 +1416,9 @@ class MySQLCompiler(compiler.SQLCompiler):
elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime,
sqltypes.Date, sqltypes.Time)):
return self.dialect.type_compiler.process(type_)
- elif isinstance(type_, sqltypes.Text):
- return 'CHAR'
- elif (isinstance(type_, sqltypes.String) and not
- isinstance(type_, (ENUM, SET))):
- if getattr(type_, 'length'):
- return 'CHAR(%s)' % type_.length
- else:
- return 'CHAR'
+ elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)):
+ adapted = CHAR._adapt_string_for_cast(type_)
+ return self.dialect.type_compiler.process(adapted)
elif isinstance(type_, sqltypes._Binary):
return 'BINARY'
elif isinstance(type_, sqltypes.NUMERIC):
@@ -1359,10 +1470,10 @@ class MySQLCompiler(compiler.SQLCompiler):
self.process(join.onclause, **kwargs)))
def for_update_clause(self, select):
- if select.for_update == 'read':
- return ' LOCK IN SHARE MODE'
+ if select._for_update_arg.read:
+ return " LOCK IN SHARE MODE"
else:
- return super(MySQLCompiler, self).for_update_clause(select)
+ return " FOR UPDATE"
def limit_clause(self, select):
# MySQL supports:
@@ -1426,9 +1537,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string = super(
MySQLDDLCompiler, self).create_table_constraints(table)
- engine_key = '%s_engine' % self.dialect.name
- is_innodb = engine_key in table.kwargs and \
- table.kwargs[engine_key].lower() == 'innodb'
+ # why self.dialect.name and not 'mysql'? because of drizzle
+ is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \
+ table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb'
auto_inc_column = table._autoincrement_column
@@ -1439,7 +1550,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string += ", \n\t"
constraint_string += "KEY %s (%s)" % (
self.preparer.quote(
- "idx_autoinc_%s" % auto_inc_column.name, None
+ "idx_autoinc_%s" % auto_inc_column.name
),
self.preparer.format_column(auto_inc_column)
)
@@ -1511,7 +1622,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
self._verify_index_table(index)
preparer = self.preparer
table = preparer.format_table(index.table)
- columns = [self.sql_compiler.process(expr, include_table=False)
+ columns = [self.sql_compiler.process(expr, include_table=False,
+ literal_binds=True)
for expr in index.expressions]
name = self._prepared_index_name(index)
@@ -1521,8 +1633,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
text += "INDEX %s ON %s " % (name, table)
- if 'mysql_length' in index.kwargs:
- length = index.kwargs['mysql_length']
+ length = index.dialect_options['mysql']['length']
+ if length is not None:
if isinstance(length, dict):
# length value can be a (column_name --> integer value) mapping
@@ -1543,19 +1655,18 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
columns = ', '.join(columns)
text += '(%s)' % columns
- if 'mysql_using' in index.kwargs:
- using = index.kwargs['mysql_using']
- text += " USING %s" % (preparer.quote(using, index.quote))
+ using = index.dialect_options['mysql']['using']
+ if using is not None:
+ text += " USING %s" % (preparer.quote(using))
return text
def visit_primary_key_constraint(self, constraint):
text = super(MySQLDDLCompiler, self).\
visit_primary_key_constraint(constraint)
- if "mysql_using" in constraint.kwargs:
- using = constraint.kwargs['mysql_using']
- text += " USING %s" % (
- self.preparer.quote(using, constraint.quote))
+ using = constraint.dialect_options['mysql']['using']
+ if using:
+ text += " USING %s" % (self.preparer.quote(using))
return text
def visit_drop_index(self, drop):
@@ -1584,7 +1695,11 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
(self.preparer.format_table(constraint.table),
qual, const)
- def define_constraint_deferrability(self, constraint):
+ def define_constraint_match(self, constraint):
+ if constraint.match is not None:
+ raise exc.CompileError(
+ "MySQL ignores the 'MATCH' keyword while at the same time "
+ "causes ON UPDATE/ON DELETE clauses to be ignored.")
return ""
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
@@ -1818,7 +1933,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
if not type_.native_enum:
return super(MySQLTypeCompiler, self).visit_enum(type_)
else:
- return self.visit_ENUM(type_)
+ return self._visit_enumerated_values("ENUM", type_, type_.enums)
def visit_BLOB(self, type_):
if type_.length:
@@ -1835,16 +1950,21 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_LONGBLOB(self, type_):
return "LONGBLOB"
- def visit_ENUM(self, type_):
+ def _visit_enumerated_values(self, name, type_, enumerated_values):
quoted_enums = []
- for e in type_.enums:
+ for e in enumerated_values:
quoted_enums.append("'%s'" % e.replace("'", "''"))
- return self._extend_string(type_, {}, "ENUM(%s)" %
- ",".join(quoted_enums))
+ return self._extend_string(type_, {}, "%s(%s)" % (
+ name, ",".join(quoted_enums))
+ )
+
+ def visit_ENUM(self, type_):
+ return self._visit_enumerated_values("ENUM", type_,
+ type_._enumerated_values)
def visit_SET(self, type_):
- return self._extend_string(type_, {}, "SET(%s)" %
- ",".join(type_._ddl_values))
+ return self._visit_enumerated_values("SET", type_,
+ type_._enumerated_values)
def visit_BOOLEAN(self, type):
return "BOOL"
@@ -1871,6 +1991,7 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
return tuple([self.quote_identifier(i) for i in ids if i is not None])
+@log.class_logger
class MySQLDialect(default.DefaultDialect):
"""Details of the MySQL dialect. Not used directly in application code."""
@@ -1902,6 +2023,22 @@ class MySQLDialect(default.DefaultDialect):
_backslash_escapes = True
_server_ansiquotes = False
+ construct_arguments = [
+ (sa_schema.Table, {
+ "*": None
+ }),
+ (sql.Update, {
+ "limit": None
+ }),
+ (sa_schema.PrimaryKeyConstraint, {
+ "using": None
+ }),
+ (sa_schema.Index, {
+ "using": None,
+ "length": None,
+ })
+ ]
+
def __init__(self, isolation_level=None, **kwargs):
kwargs.pop('use_ansiquotes', None) # legacy
default.DefaultDialect.__init__(self, **kwargs)
@@ -2058,7 +2195,6 @@ class MySQLDialect(default.DefaultDialect):
rs.close()
def initialize(self, connection):
- default.DefaultDialect.initialize(self, connection)
self._connection_charset = self._detect_charset(connection)
self._detect_ansiquotes(connection)
if self._server_ansiquotes:
@@ -2067,6 +2203,8 @@ class MySQLDialect(default.DefaultDialect):
self.identifier_preparer = self.preparer(self,
server_ansiquotes=self._server_ansiquotes)
+ default.DefaultDialect.initialize(self, connection)
+
@property
def _supports_cast(self):
return self.server_version_info is None or \
@@ -2163,7 +2301,7 @@ class MySQLDialect(default.DefaultDialect):
ref_names = spec['foreign']
con_kw = {}
- for opt in ('name', 'onupdate', 'ondelete'):
+ for opt in ('onupdate', 'ondelete'):
if spec.get(opt, False):
con_kw[opt] = spec[opt]
@@ -2336,6 +2474,7 @@ class MySQLDialect(default.DefaultDialect):
# as of MySQL 5.0.1
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
+
def _show_create_table(self, connection, table, charset=None,
full_name=None):
"""Run SHOW CREATE TABLE for a ``Table``."""
@@ -2394,6 +2533,7 @@ class ReflectedState(object):
self.constraints = []
+@log.class_logger
class MySQLTableDefinitionParser(object):
"""Parses the results of a SHOW CREATE TABLE statement."""
@@ -2558,8 +2698,8 @@ class MySQLTableDefinitionParser(object):
if spec.get(kw, False):
type_kw[kw] = spec[kw]
- if type_ == 'enum':
- type_args = ENUM._strip_enums(type_args)
+ if issubclass(col_type, _EnumeratedValues):
+ type_args = _EnumeratedValues._strip_values(type_args)
type_instance = col_type(*type_args, **type_kw)
@@ -2733,7 +2873,7 @@ class MySQLTableDefinitionParser(object):
#
# unique constraints come back as KEYs
kw = quotes.copy()
- kw['on'] = 'RESTRICT|CASCASDE|SET NULL|NOACTION'
+ kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
self._re_constraint = _re_compile(
r' '
r'CONSTRAINT +'
@@ -2796,8 +2936,6 @@ class MySQLTableDefinitionParser(object):
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
'PASSWORD', 'CONNECTION')
-log.class_logger(MySQLTableDefinitionParser)
-log.class_logger(MySQLDialect)
class _DecodingRowProxy(object):
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index deb2de449..e81a79b85 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -1,5 +1,5 @@
# mysql/cymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -37,7 +37,9 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = 'cymysql'
description_encoding = None
- supports_sane_rowcount = False
+ supports_sane_rowcount = True
+ supports_sane_multi_rowcount = False
+ supports_unicode_statements = True
colspecs = util.update_copy(
MySQLDialect.colspecs,
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index c479e25e0..13203fce3 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -1,5 +1,5 @@
# mysql/gaerdbms.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index b1906d3b9..b6e7c75fb 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -1,5 +1,5 @@
# mysql/mysqlconnector.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -79,12 +79,13 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
+
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
- opts['buffered'] = True
- opts['raise_on_warnings'] = True
+ opts.setdefault('buffered', True)
+ opts.setdefault('raise_on_warnings', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 60e68e5ee..84e8299d5 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -1,5 +1,5 @@
# mysql/mysqldb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -56,7 +56,8 @@ from ...connectors.mysqldb import (
MySQLDBIdentifierPreparer,
MySQLDBConnector
)
-
+from .base import TEXT
+from ... import sql
class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
pass
@@ -75,4 +76,27 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
+ def _check_unicode_returns(self, connection):
+ # work around issue fixed in
+ # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
+ # specific issue w/ the utf8_bin collation and unicode returns
+
+ has_utf8_bin = connection.scalar(
+ "show collation where %s = 'utf8' and %s = 'utf8_bin'"
+ % (
+ self.identifier_preparer.quote("Charset"),
+ self.identifier_preparer.quote("Collation")
+ ))
+ if has_utf8_bin:
+ additional_tests = [
+ sql.collate(sql.cast(
+ sql.literal_column(
+ "'test collated returns'"),
+ TEXT(charset='utf8')), "utf8_bin")
+ ]
+ else:
+ additional_tests = []
+ return super(MySQLDBConnector, self)._check_unicode_returns(
+ connection, additional_tests)
+
dialect = MySQLDialect_mysqldb
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index 77370f91d..e6b50f335 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -1,5 +1,5 @@
# mysql/oursql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index ba48017ac..74de09c4d 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -1,5 +1,5 @@
# mysql/pymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index af3579665..e60e39cea 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -1,5 +1,5 @@
# mysql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 20f2e7359..b5fcfbdaf 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -1,5 +1,5 @@
# mysql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index 576790707..070e387d0 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -1,5 +1,5 @@
# oracle/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 272bd1740..218a7ccfc 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -1,5 +1,5 @@
# oracle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,12 +16,12 @@ Connect Arguments
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
affect the behavior of the dialect regardless of driver in use.
-* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
+* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
-* *optimize_limits* - defaults to ``False``. see the section on LIMIT/OFFSET.
+* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET.
-* *use_binds_for_limits* - defaults to ``True``. see the section on LIMIT/OFFSET.
+* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
@@ -99,6 +99,41 @@ http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset with
a window function.
+RETURNING Support
+-----------------
+
+The Oracle database supports a limited form of RETURNING, in order to retrieve result
+sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's
+RETURNING..INTO syntax only supports one row being returned, as it relies upon
+OUT parameters in order to function. In addition, supported DBAPIs have further
+limitations (see :ref:`cx_oracle_returning`).
+
+SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT
+and sometimes an UPDATE statement in order to fetch newly generated primary key values
+and other SQL defaults and expressions, is normally enabled on the Oracle
+backend. By default, "implicit returning" typically only fetches the value of a
+single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment
+a sequence within an INSERT statement and get the value back at the same time.
+To disable this feature across the board, specify ``implicit_returning=False`` to
+:func:`.create_engine`::
+
+ engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
+
+Implicit returning can also be disabled on a table-by-table basis as a table option::
+
+ # Core Table
+ my_table = Table("my_table", metadata, ..., implicit_returning=False)
+
+
+ # declarative
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ __table_args__ = {"implicit_returning": False}
+
+.. seealso::
+
+ :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning.
+
ON UPDATE CASCADE
-----------------
@@ -133,9 +168,10 @@ Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms that reference DBLINK-ed tables by passing the flag
-oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
-is not in use this flag should be left off.
+indicated by synonyms, either in local or remote schemas or accessed over DBLINK,
+by passing the flag oracle_resolve_synonyms=True as a
+keyword argument to the Table construct. If synonyms are not in use
+this flag should be left off.
"""
@@ -145,7 +181,7 @@ from sqlalchemy import util, sql
from sqlalchemy.engine import default, base, reflection
from sqlalchemy.sql import compiler, visitors, expression
from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
-from sqlalchemy import types as sqltypes
+from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
BLOB, CLOB, TIMESTAMP, FLOAT
@@ -362,7 +398,9 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
return self._visit_varchar(type_, '', '')
def _visit_varchar(self, type_, n, num):
- if not n and self.dialect._supports_char_length:
+ if not type_.length:
+ return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n}
+ elif not n and self.dialect._supports_char_length:
varchar = "VARCHAR%(two)s(%(length)s CHAR)"
return varchar % {'length': type_.length, 'two': num}
else:
@@ -521,7 +559,6 @@ class OracleCompiler(compiler.SQLCompiler):
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
-
columns = []
binds = []
for i, column in enumerate(expression._select_iterables(returning_cols)):
@@ -595,7 +632,7 @@ class OracleCompiler(compiler.SQLCompiler):
# If needed, add the ora_rn, and wrap again with offset.
if select._offset is None:
- limitselect.for_update = select.for_update
+ limitselect._for_update_arg = select._for_update_arg
select = limitselect
else:
limitselect = limitselect.column(
@@ -614,7 +651,7 @@ class OracleCompiler(compiler.SQLCompiler):
offsetselect.append_whereclause(
sql.literal_column("ora_rn") > offset_value)
- offsetselect.for_update = select.for_update
+ offsetselect._for_update_arg = select._for_update_arg
select = offsetselect
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
@@ -626,10 +663,19 @@ class OracleCompiler(compiler.SQLCompiler):
def for_update_clause(self, select):
if self.is_subquery():
return ""
- elif select.for_update == "nowait":
- return " FOR UPDATE NOWAIT"
- else:
- return super(OracleCompiler, self).for_update_clause(select)
+
+ tmp = ' FOR UPDATE'
+
+ if select._for_update_arg.of:
+ tmp += ' OF ' + ', '.join(
+ self.process(elem) for elem in
+ select._for_update_arg.of
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
class OracleDDLCompiler(compiler.DDLCompiler):
@@ -708,6 +754,10 @@ class OracleDialect(default.DefaultDialect):
reflection_options = ('oracle_resolve_synonyms', )
+ construct_arguments = [
+ (sa_schema.Table, {"resolve_synonyms": False})
+ ]
+
def __init__(self,
use_ansi=True,
optimize_limits=False,
@@ -800,14 +850,15 @@ class OracleDialect(default.DefaultDialect):
returns the actual name, owner, dblink name, and synonym name if found.
"""
- q = "SELECT owner, table_owner, table_name, db_link, synonym_name FROM all_synonyms WHERE "
+ q = "SELECT owner, table_owner, table_name, db_link, "\
+ "synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
clauses.append("synonym_name = :synonym_name")
params['synonym_name'] = desired_synonym
if desired_owner:
- clauses.append("table_owner = :desired_owner")
+ clauses.append("owner = :desired_owner")
params['desired_owner'] = desired_owner
if desired_table:
clauses.append("table_name = :tname")
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index e013799db..599eb21a3 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -1,5 +1,5 @@
# oracle/cx_oracle.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,11 +17,11 @@ Additional Connect Arguments
When connecting with ``dbname`` present, the host, port, and dbname tokens are
converted to a TNS name using
-the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken
+the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken
directly as a TNS name.
Additional arguments which may be specified either as query string arguments
-on the URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
+on the URL, or as keyword arguments to :func:`.create_engine()` are:
* allow_twophase - enable two-phase transactions. Defaults to ``True``.
@@ -65,6 +65,27 @@ of the encoding to be used.
Note that this behavior is disabled when Oracle 8 is detected, as it has been
observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
+.. _cx_oracle_returning:
+
+RETURNING Support
+-----------------
+
+cx_oracle supports a limited subset of Oracle's already limited RETURNING support.
+Typically, results can only be guaranteed for at most one column being returned;
+this is the typical case when SQLAlchemy uses RETURNING to get just the value of a
+primary-key-associated sequence value. Additional column expressions will
+cause problems in a non-determinative way, due to cx_oracle's lack of support for
+the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios.
+
+.. seealso::
+
+ http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING
+
+ http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary
+
+
+
+
LOB Objects
-----------
@@ -75,7 +96,7 @@ like result.fetchmany() and result.fetchall(). This means that by default, LOB
objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
cursor is broken.
-To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
+To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`.
Two Phase Transaction Support
-----------------------------
@@ -108,7 +129,7 @@ the application can make one of several choices:
* For ad-hoc two-phase operations without disabling pooling, the DBAPI
connection in use can be evicted from the connection pool using the
- :class:`.Connection.detach` method.
+ :meth:`.Connection.detach` method.
.. versionchanged:: 0.8.0b2,0.7.10
Support for cx_oracle prepared transactions has been implemented
@@ -211,10 +232,7 @@ class _OracleNumeric(sqltypes.Numeric):
if dialect.supports_native_decimal:
if self.asdecimal:
- if self.scale is None:
- fstring = "%.10f"
- else:
- fstring = "%%.%df" % self.scale
+ fstring = "%%.%df" % self._effective_decimal_return_scale
def to_decimal(value):
if value is None:
@@ -362,7 +380,8 @@ class _OracleRowid(oracle.ROWID):
class OracleCompiler_cx_oracle(OracleCompiler):
- def bindparam_string(self, name, quote=None, **kw):
+ def bindparam_string(self, name, **kw):
+ quote = getattr(name, 'quote', None)
if quote is True or quote is not False and \
self.preparer._bindparam_requires_quotes(name):
quoted_name = '"%s"' % name
@@ -506,7 +525,6 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy):
def _cursor_description(self):
returning = self.context.compiled.returning
-
return [
("ret_%d" % i, None)
for i, col in enumerate(returning)
@@ -730,9 +748,6 @@ class OracleDialect_cx_oracle(OracleDialect):
255,
outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
- # allow all strings to come back natively as Unicode
- elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
- return cursor.var(util.text_type, size, cursor.arraysize)
def on_connect(conn):
conn.outputtypehandler = output_type_handler
@@ -805,8 +820,9 @@ class OracleDialect_cx_oracle(OracleDialect):
# ORA-03113: end-of-file on communication channel
# ORA-03135: connection lost contact
# ORA-01033: ORACLE initialization or shutdown in progress
+ # ORA-02396: exceeded maximum idle time, please connect again
# TODO: Others ?
- return error.code in (28, 3114, 3113, 3135, 1033)
+ return error.code in (28, 3114, 3113, 3135, 1033, 2396)
else:
return False
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index ad53b89a1..710645b23 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -1,5 +1,5 @@
# oracle/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
index 82d1a39c2..6ed7e18bc 100644
--- a/lib/sqlalchemy/dialects/postgres.py
+++ b/lib/sqlalchemy/dialects/postgres.py
@@ -1,5 +1,5 @@
# dialects/postgres.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py
index 408b67846..180e9fc7e 100644
--- a/lib/sqlalchemy/dialects/postgresql/__init__.py
+++ b/lib/sqlalchemy/dialects/postgresql/__init__.py
@@ -1,5 +1,5 @@
# postgresql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,9 +11,11 @@ base.dialect = psycopg2.dialect
from .base import \
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
INET, CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME, \
- DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All
+ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
+ TSVECTOR
from .constraints import ExcludeConstraint
from .hstore import HSTORE, hstore
+from .json import JSON, JSONElement
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
TSTZRANGE
@@ -23,5 +25,5 @@ __all__ = (
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
- 'TSRANGE', 'TSTZRANGE'
+ 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONElement'
)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 6ccf7190e..11bd3830d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1,5 +1,5 @@
# postgresql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -131,6 +131,44 @@ use the :meth:`._UpdateBase.returning` method on a per-statement basis::
where(table.c.name=='foo')
print result.fetchall()
+.. _postgresql_match:
+
+Full Text Search
+----------------
+
+SQLAlchemy makes available the Postgresql ``@@`` operator via the
+:meth:`.ColumnElement.match` method on any textual column expression.
+On a Postgresql dialect, an expression like the following::
+
+ select([sometable.c.text.match("search string")])
+
+will emit to the database::
+
+ SELECT text @@ to_tsquery('search string') FROM table
+
+The Postgresql text search functions such as ``to_tsquery()``
+and ``to_tsvector()`` are available
+explicitly using the standard :attr:`.func` construct. For example::
+
+ select([
+ func.to_tsvector('fat cats ate rats').match('cat & rat')
+ ])
+
+Emits the equivalent of::
+
+ SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat')
+
+The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST::
+
+ from sqlalchemy.dialects.postgresql import TSVECTOR
+ from sqlalchemy import select, cast
+ select([cast("some text", TSVECTOR)])
+
+produces a statement equivalent to::
+
+ SELECT CAST('some text' AS TSVECTOR) AS anon_1
+
+
FROM ONLY ...
------------------------
@@ -210,7 +248,7 @@ import re
from ... import sql, schema, exc, util
from ...engine import default, reflection
-from ...sql import compiler, expression, util as sql_util, operators
+from ...sql import compiler, expression, operators
from ... import types as sqltypes
try:
@@ -230,7 +268,7 @@ RESERVED_WORDS = set(
"default", "deferrable", "desc", "distinct", "do", "else", "end",
"except", "false", "fetch", "for", "foreign", "from", "grant", "group",
"having", "in", "initially", "intersect", "into", "leading", "limit",
- "localtime", "localtimestamp", "new", "not", "null", "off", "offset",
+ "localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset",
"old", "on", "only", "or", "order", "placing", "primary", "references",
"returning", "select", "session_user", "some", "symmetric", "table",
"then", "to", "trailing", "true", "union", "unique", "user", "using",
@@ -368,6 +406,23 @@ class UUID(sqltypes.TypeEngine):
PGUuid = UUID
+class TSVECTOR(sqltypes.TypeEngine):
+ """The :class:`.postgresql.TSVECTOR` type implements the Postgresql
+ text search type TSVECTOR.
+
+ It can be used to do full text queries on natural language
+ documents.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`postgresql_match`
+
+ """
+ __visit_name__ = 'TSVECTOR'
+
+
class _Slice(expression.ColumnElement):
__visit_name__ = 'slice'
@@ -913,6 +968,7 @@ ischema_names = {
'interval': INTERVAL,
'interval year to month': INTERVAL,
'interval day to second': INTERVAL,
+ 'tsvector' : TSVECTOR
}
@@ -954,25 +1010,30 @@ class PGCompiler(compiler.SQLCompiler):
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
+
return '%s ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def render_literal_value(self, value, type_):
value = super(PGCompiler, self).render_literal_value(value, type_)
- # TODO: need to inspect "standard_conforming_strings"
+
if self.dialect._backslash_escapes:
value = value.replace('\\', '\\\\')
return value
@@ -1009,14 +1070,25 @@ class PGCompiler(compiler.SQLCompiler):
return ""
def for_update_clause(self, select):
- if select.for_update == 'nowait':
- return " FOR UPDATE NOWAIT"
- elif select.for_update == 'read':
- return " FOR SHARE"
- elif select.for_update == 'read_nowait':
- return " FOR SHARE NOWAIT"
+
+ if select._for_update_arg.read:
+ tmp = " FOR SHARE"
else:
- return super(PGCompiler, self).for_update_clause(select)
+ tmp = " FOR UPDATE"
+
+ if select._for_update_arg.of:
+ tables = util.OrderedSet(
+ c.table if isinstance(c, expression.ColumnClause)
+ else c for c in select._for_update_arg.of)
+ tmp += " OF " + ", ".join(
+ self.process(table, ashint=True)
+ for table in tables
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
def returning_clause(self, stmt, returning_cols):
@@ -1039,12 +1111,15 @@ class PGCompiler(compiler.SQLCompiler):
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
+
colspec = self.preparer.format_column(column)
impl_type = column.type.dialect_impl(self.dialect)
if column.primary_key and \
column is column.table._autoincrement_column and \
- not isinstance(impl_type, sqltypes.SmallInteger) and \
(
+ self.dialect.supports_smallserial or
+ not isinstance(impl_type, sqltypes.SmallInteger)
+ ) and (
column.default is None or
(
isinstance(column.default, schema.Sequence) and
@@ -1052,6 +1127,8 @@ class PGDDLCompiler(compiler.DDLCompiler):
)):
if isinstance(impl_type, sqltypes.BigInteger):
colspec += " BIGSERIAL"
+ elif isinstance(impl_type, sqltypes.SmallInteger):
+ colspec += " SMALLSERIAL"
else:
colspec += " SERIAL"
else:
@@ -1069,7 +1146,9 @@ class PGDDLCompiler(compiler.DDLCompiler):
return "CREATE TYPE %s AS ENUM (%s)" % (
self.preparer.format_type(type_),
- ",".join("'%s'" % e for e in type_.enums)
+ ", ".join(
+ self.sql_compiler.process(sql.literal(e), literal_binds=True)
+ for e in type_.enums)
)
def visit_drop_enum_type(self, drop):
@@ -1092,31 +1171,29 @@ class PGDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table)
)
- if 'postgresql_using' in index.kwargs:
- using = index.kwargs['postgresql_using']
- text += "USING %s " % preparer.quote(using, index.quote)
+ using = index.dialect_options['postgresql']['using']
+ if using:
+ text += "USING %s " % preparer.quote(using)
- ops = index.kwargs.get('postgresql_ops', {})
+ ops = index.dialect_options["postgresql"]["ops"]
text += "(%s)" \
% (
', '.join([
- self.sql_compiler.process(expr, include_table=False) +
-
-
+ self.sql_compiler.process(
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr,
+ include_table=False, literal_binds=True) +
(c.key in ops and (' ' + ops[c.key]) or '')
-
-
for expr, c in zip(index.expressions, index.columns)])
)
- if 'postgresql_where' in index.kwargs:
- whereclause = index.kwargs['postgresql_where']
- else:
- whereclause = None
+ whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
- whereclause = sql_util.expression_as_ddl(whereclause)
- where_compiled = self.sql_compiler.process(whereclause)
+ where_compiled = self.sql_compiler.process(
+ whereclause, include_table=False,
+ literal_binds=True)
text += " WHERE " + where_compiled
return text
@@ -1128,16 +1205,20 @@ class PGDDLCompiler(compiler.DDLCompiler):
elements = []
for c in constraint.columns:
op = constraint.operators[c.name]
- elements.append(self.preparer.quote(c.name, c.quote)+' WITH '+op)
+ elements.append(self.preparer.quote(c.name) + ' WITH '+op)
text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements))
if constraint.where is not None:
- sqltext = sql_util.expression_as_ddl(constraint.where)
- text += ' WHERE (%s)' % self.sql_compiler.process(sqltext)
+ text += ' WHERE (%s)' % self.sql_compiler.process(
+ constraint.where,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
class PGTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_TSVECTOR(self, type):
+ return "TSVECTOR"
+
def visit_INET(self, type_):
return "INET"
@@ -1162,6 +1243,9 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_HSTORE(self, type_):
return "HSTORE"
+ def visit_JSON(self, type_):
+ return "JSON"
+
def visit_INT4RANGE(self, type_):
return "INT4RANGE"
@@ -1250,9 +1334,9 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
if not type_.name:
raise exc.CompileError("Postgresql ENUM type requires a name.")
- name = self.quote(type_.name, type_.quote)
+ name = self.quote(type_.name)
if not self.omit_schema and use_schema and type_.schema is not None:
- name = self.quote_schema(type_.schema, type_.quote) + "." + name
+ name = self.quote_schema(type_.schema) + "." + name
return name
@@ -1328,6 +1412,7 @@ class PGDialect(default.DefaultDialect):
supports_native_enum = True
supports_native_boolean = True
+ supports_smallserial = True
supports_sequences = True
sequences_optional = True
@@ -1349,12 +1434,22 @@ class PGDialect(default.DefaultDialect):
inspector = PGInspector
isolation_level = None
- # TODO: need to inspect "standard_conforming_strings"
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": {}
+ })
+ ]
+
_backslash_escapes = True
- def __init__(self, isolation_level=None, **kwargs):
+ def __init__(self, isolation_level=None, json_serializer=None,
+ json_deserializer=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
+ self._json_deserializer = json_deserializer
+ self._json_serializer = json_serializer
def initialize(self, connection):
super(PGDialect, self).initialize(connection)
@@ -1368,6 +1463,13 @@ class PGDialect(default.DefaultDialect):
# psycopg2, others may have placed ENUM here as well
self.colspecs.pop(ENUM, None)
+ # http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
+ self.supports_smallserial = self.server_version_info >= (9, 2)
+
+ self._backslash_escapes = connection.scalar(
+ "show standard_conforming_strings"
+ ) == 'off'
+
def on_connect(self):
if self.isolation_level is not None:
def connect(conn):
@@ -1515,12 +1617,6 @@ class PGDialect(default.DefaultDialect):
return bool(cursor.first())
def has_type(self, connection, type_name, schema=None):
- bindparams = [
- sql.bindparam('typname',
- util.text_type(type_name), type_=sqltypes.Unicode),
- sql.bindparam('nspname',
- util.text_type(schema), type_=sqltypes.Unicode),
- ]
if schema is not None:
query = """
SELECT EXISTS (
@@ -1530,6 +1626,7 @@ class PGDialect(default.DefaultDialect):
AND n.nspname = :nspname
)
"""
+ query = sql.text(query)
else:
query = """
SELECT EXISTS (
@@ -1538,13 +1635,23 @@ class PGDialect(default.DefaultDialect):
AND pg_type_is_visible(t.oid)
)
"""
- cursor = connection.execute(sql.text(query, bindparams=bindparams))
+ query = sql.text(query)
+ query = query.bindparams(
+ sql.bindparam('typname',
+ util.text_type(type_name), type_=sqltypes.Unicode),
+ )
+ if schema is not None:
+ query = query.bindparams(
+ sql.bindparam('nspname',
+ util.text_type(schema), type_=sqltypes.Unicode),
+ )
+ cursor = connection.execute(query)
return bool(cursor.scalar())
def _get_server_version_info(self, connection):
v = connection.execute("select version()").scalar()
m = re.match(
- '(?:PostgreSQL|EnterpriseDB) '
+ '.*(?:PostgreSQL|EnterpriseDB) '
'(\d+)\.(\d+)(?:\.(\d+))?(?:\.\d+)?(?:devel)?',
v)
if not m:
@@ -1578,12 +1685,10 @@ class PGDialect(default.DefaultDialect):
table_name = util.text_type(table_name)
if schema is not None:
schema = util.text_type(schema)
- s = sql.text(query, bindparams=[
- sql.bindparam('table_name', type_=sqltypes.Unicode),
- sql.bindparam('schema', type_=sqltypes.Unicode)
- ],
- typemap={'oid': sqltypes.Integer}
- )
+ s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
+ s = s.columns(oid=sqltypes.Integer)
+ if schema:
+ s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode))
c = connection.execute(s, table_name=table_name, schema=schema)
table_oid = c.scalar()
if table_oid is None:
@@ -1675,8 +1780,7 @@ class PGDialect(default.DefaultDialect):
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
- (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
- for 128)
+ (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
AND a.atthasdef)
@@ -1883,6 +1987,15 @@ class PGDialect(default.DefaultDialect):
n.oid = c.relnamespace
ORDER BY 1
"""
+ # http://www.postgresql.org/docs/9.0/static/sql-createtable.html
+ FK_REGEX = re.compile(
+ r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
+ r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
+ r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
+ r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
+ )
t = sql.text(FK_SQL, typemap={
'conname': sqltypes.Unicode,
@@ -1890,15 +2003,18 @@ class PGDialect(default.DefaultDialect):
c = connection.execute(t, table=table_oid)
fkeys = []
for conname, condef, conschema in c.fetchall():
- m = re.search('FOREIGN KEY \((.*?)\) REFERENCES '
- '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
+ m = re.search(FK_REGEX, condef).groups()
constrained_columns, referred_schema, \
- referred_table, referred_columns = m
+ referred_table, referred_columns, \
+ _, match, _, onupdate, _, ondelete, \
+ deferrable, _, initially = m
+ if deferrable is not None:
+ deferrable = True if deferrable == 'DEFERRABLE' else False
constrained_columns = [preparer._unquote_identifier(x)
for x in re.split(r'\s*,\s*', constrained_columns)]
if referred_schema:
- referred_schema =\
+ referred_schema = \
preparer._unquote_identifier(referred_schema)
elif schema is not None and schema == conschema:
# no schema was returned by pg_get_constraintdef(). This
@@ -1916,7 +2032,14 @@ class PGDialect(default.DefaultDialect):
'constrained_columns': constrained_columns,
'referred_schema': referred_schema,
'referred_table': referred_table,
- 'referred_columns': referred_columns
+ 'referred_columns': referred_columns,
+ 'options': {
+ 'onupdate': onupdate,
+ 'ondelete': ondelete,
+ 'deferrable': deferrable,
+ 'initially': initially,
+ 'match': match
+ }
}
fkeys.append(fkey_d)
return fkeys
@@ -1926,11 +2049,14 @@ class PGDialect(default.DefaultDialect):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
+ # cast indkey as varchar since it's an int2vector,
+ # returned as a list by some drivers such as pypostgresql
+
IDX_SQL = """
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey
+ a.attname, a.attnum, ix.indkey::varchar
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py
index 5b8bbe643..f45cef1a2 100644
--- a/lib/sqlalchemy/dialects/postgresql/constraints.py
+++ b/lib/sqlalchemy/dialects/postgresql/constraints.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
:param using:
Optional string. If set, emit USING <index_method> when issuing DDL
for this constraint. Defaults to 'gist'.
-
+
:param where:
Optional string. If set, emit WHERE <predicate> when issuing DDL
for this constraint.
@@ -60,7 +60,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
where = kw.get('where')
if where:
self.where = expression._literal_as_text(where)
-
+
def copy(self, **kw):
elements = [(col, self.operators[col])
for col in self.columns.keys()]
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index c645e25d2..76562088d 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -1,5 +1,5 @@
# postgresql/hstore.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -144,8 +144,10 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
For usage with the SQLAlchemy ORM, it may be desirable to combine
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
now part of the :mod:`sqlalchemy.ext.mutable`
- extension. This extension will allow in-place changes to dictionary
- values to be detected by the unit of work::
+ extension. This extension will allow "in-place" changes to the
+ dictionary, e.g. addition of new keys or replacement/removal of existing
+ keys to/from the current dictionary, to produce events which will be detected
+ by the unit of work::
from sqlalchemy.ext.mutable import MutableDict
@@ -163,6 +165,11 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
session.commit()
+ When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
+ will not be alerted to any changes to the contents of an existing dictionary,
+ unless that dictionary value is re-assigned to the HSTORE-attribute itself,
+ thus generating a change event.
+
.. versionadded:: 0.8
.. seealso::
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
new file mode 100644
index 000000000..2e29185e8
--- /dev/null
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -0,0 +1,199 @@
+# postgresql/json.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import absolute_import
+
+import json
+
+from .base import ischema_names
+from ... import types as sqltypes
+from ...sql.operators import custom_op
+from ... import sql
+from ...sql import elements
+from ... import util
+
+__all__ = ('JSON', 'JSONElement')
+
+
+class JSONElement(elements.BinaryExpression):
+ """Represents accessing an element of a :class:`.JSON` value.
+
+ The :class:`.JSONElement` is produced whenever using the Python index
+ operator on an expression that has the type :class:`.JSON`::
+
+ expr = mytable.c.json_data['some_key']
+
+ The expression typically compiles to a JSON access such as ``col -> key``.
+ Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast`
+ and :attr:`.JSONElement.astext`.
+
+ """
+ def __init__(self, left, right, astext=False, opstring=None, result_type=None):
+ self._astext = astext
+ if opstring is None:
+ if hasattr(right, '__iter__') and \
+ not isinstance(right, util.string_types):
+ opstring = "#>"
+ right = "{%s}" % (", ".join(util.text_type(elem) for elem in right))
+ else:
+ opstring = "->"
+
+ self._json_opstring = opstring
+ operator = custom_op(opstring, precedence=5)
+ right = left._check_literal(left, operator, right)
+ super(JSONElement, self).__init__(left, right, operator, type_=result_type)
+
+ @property
+ def astext(self):
+ """Convert this :class:`.JSONElement` to use the 'astext' operator
+ when evaluated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].astext])
+
+ .. seealso::
+
+ :meth:`.JSONElement.cast`
+
+ """
+ if self._astext:
+ return self
+ else:
+ return JSONElement(
+ self.left,
+ self.right,
+ astext=True,
+ opstring=self._json_opstring + ">",
+ result_type=sqltypes.String(convert_unicode=True)
+ )
+
+ def cast(self, type_):
+ """Convert this :class:`.JSONElement` to apply both the 'astext' operator
+ as well as an explicit type cast when evaulated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].cast(Integer)])
+
+ .. seealso::
+
+ :attr:`.JSONElement.astext`
+
+ """
+ if not self._astext:
+ return self.astext.cast(type_)
+ else:
+ return sql.cast(self, type_)
+
+
+class JSON(sqltypes.TypeEngine):
+ """Represent the Postgresql JSON type.
+
+ The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
+
+ data_table = Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', JSON)
+ )
+
+ with engine.connect() as conn:
+ conn.execute(
+ data_table.insert(),
+ data = {"key1": "value1", "key2": "value2"}
+ )
+
+ :class:`.JSON` provides several operations:
+
+ * Index operations::
+
+ data_table.c.data['some key']
+
+ * Index operations returning text (required for text comparison)::
+
+ data_table.c.data['some key'].astext == 'some value'
+
+ * Index operations with a built-in CAST call::
+
+ data_table.c.data['some key'].cast(Integer) == 5
+
+ * Path index operations::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
+
+ * Path index operations returning text (required for text comparison)::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
+
+ Index operations return an instance of :class:`.JSONElement`, which represents
+ an expression such as ``column -> index``. This element then defines
+ methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
+ for setting up type behavior.
+
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
+ in-place mutations to the structure. In order to detect these, the
+ :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
+ allow "in-place" changes to the datastructure to produce events which
+ will be detected by the unit of work. See the example at :class:`.HSTORE`
+ for a simple example involving a dictionary.
+
+ Custom serializers and deserializers are specified at the dialect level,
+ that is using :func:`.create_engine`. The reason for this is that when
+ using psycopg2, the DBAPI only allows serializers at the per-cursor
+ or per-connection level. E.g.::
+
+ engine = create_engine("postgresql://scott:tiger@localhost/test",
+ json_serializer=my_serialize_fn,
+ json_deserializer=my_deserialize_fn
+ )
+
+ When using the psycopg2 dialect, the json_deserializer is registered
+ against the database using ``psycopg2.extras.register_default_json``.
+
+ .. versionadded:: 0.9
+
+ """
+
+ __visit_name__ = 'JSON'
+
+ class comparator_factory(sqltypes.Concatenable.Comparator):
+ """Define comparison operations for :class:`.JSON`."""
+
+ def __getitem__(self, other):
+ """Get the value at a given key."""
+
+ return JSONElement(self.expr, other)
+
+ def _adapt_expression(self, op, other_comparator):
+ if isinstance(op, custom_op):
+ if op.opstring == '->':
+ return op, sqltypes.Text
+ return sqltypes.Concatenable.Comparator.\
+ _adapt_expression(self, op, other_comparator)
+
+ def bind_processor(self, dialect):
+ json_serializer = dialect._json_serializer or json.dumps
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_serializer(value).encode(encoding)
+ else:
+ def process(value):
+ return json_serializer(value)
+ return process
+
+ def result_processor(self, dialect, coltype):
+ json_deserializer = dialect._json_deserializer or json.loads
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_deserializer(value.decode(encoding))
+ else:
+ def process(value):
+ return json_deserializer(value)
+ return process
+
+
+ischema_names['json'] = JSON
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 0e503746c..bc73f9757 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -1,5 +1,5 @@
# postgresql/pg8000.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 02eda094e..e9f64f829 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -1,5 +1,5 @@
# postgresql/psycopg2.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -179,6 +179,7 @@ from .base import PGDialect, PGCompiler, \
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
_INT_TYPES
from .hstore import HSTORE
+from .json import JSON
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
@@ -191,7 +192,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
@@ -210,23 +213,13 @@ class _PGNumeric(sqltypes.Numeric):
class _PGEnum(ENUM):
- def __init__(self, *arg, **kw):
- super(_PGEnum, self).__init__(*arg, **kw)
- if util.py2k:
- if self.convert_unicode:
- self.convert_unicode = "force"
-
-
-class _PGArray(ARRAY):
- def __init__(self, *arg, **kw):
- super(_PGArray, self).__init__(*arg, **kw)
- if util.py2k:
- # FIXME: this check won't work for setups that
- # have convert_unicode only on their create_engine().
- if isinstance(self.item_type, sqltypes.String) and \
- self.item_type.convert_unicode:
- self.item_type.convert_unicode = "force"
-
+ def result_processor(self, dialect, coltype):
+ if util.py2k and self.convert_unicode is True:
+ # we can't easily use PG's extensions here because
+ # the OID is on the fly, and we need to give it a python
+ # function anyway - not really worth it.
+ self.convert_unicode = "force_nocheck"
+ return super(_PGEnum, self).result_processor(dialect, coltype)
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
@@ -241,6 +234,15 @@ class _PGHStore(HSTORE):
else:
return super(_PGHStore, self).result_processor(dialect, coltype)
+
+class _PGJSON(JSON):
+
+ def result_processor(self, dialect, coltype):
+ if dialect._has_native_json:
+ return None
+ else:
+ return super(_PGJSON, self).result_processor(dialect, coltype)
+
# When we're handed literal SQL, ensure it's a SELECT-query. Since
# 8.3, combining cursors and "FOR UPDATE" has been fine.
SERVER_SIDE_CURSOR_RE = re.compile(
@@ -325,6 +327,7 @@ class PGDialect_psycopg2(PGDialect):
psycopg2_version = (0, 0)
_has_native_hstore = False
+ _has_native_json = False
colspecs = util.update_copy(
PGDialect.colspecs,
@@ -332,8 +335,8 @@ class PGDialect_psycopg2(PGDialect):
sqltypes.Numeric: _PGNumeric,
ENUM: _PGEnum, # needs force_unicode
sqltypes.Enum: _PGEnum, # needs force_unicode
- ARRAY: _PGArray, # needs force_unicode
HSTORE: _PGHStore,
+ JSON: _PGJSON
}
)
@@ -361,6 +364,7 @@ class PGDialect_psycopg2(PGDialect):
self._has_native_hstore = self.use_native_hstore and \
self._hstore_oids(connection.connection) \
is not None
+ self._has_native_json = self.psycopg2_version >= (2, 5)
@classmethod
def dbapi(cls):
@@ -369,7 +373,7 @@ class PGDialect_psycopg2(PGDialect):
@util.memoized_property
def _isolation_lookup(self):
- extensions = __import__('psycopg2.extensions').extensions
+ from psycopg2 import extensions
return {
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
@@ -407,6 +411,7 @@ class PGDialect_psycopg2(PGDialect):
if self.dbapi and self.use_native_unicode:
def on_connect(conn):
extensions.register_type(extensions.UNICODE, conn)
+ extensions.register_type(extensions.UNICODEARRAY, conn)
fns.append(on_connect)
if self.dbapi and self.use_native_hstore:
@@ -423,6 +428,11 @@ class PGDialect_psycopg2(PGDialect):
array_oid=array_oid)
fns.append(on_connect)
+ if self.dbapi and self._json_deserializer:
+ def on_connect(conn):
+ extras.register_default_json(conn, loads=self._json_deserializer)
+ fns.append(on_connect)
+
if fns:
def on_connect(conn):
for fn in fns:
diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
index 289bef114..f030d2c1b 100644
--- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
@@ -1,5 +1,5 @@
# postgresql/pypostgresql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py
index d03f948a7..57b0c4c30 100644
--- a/lib/sqlalchemy/dialects/postgresql/ranges.py
+++ b/lib/sqlalchemy/dialects/postgresql/ranges.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
index 583afc23f..67e7d53e6 100644
--- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
@@ -1,5 +1,5 @@
# postgresql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 0d06160ae..a9b23575b 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -1,5 +1,5 @@
# sqlite/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index fb7d968be..579a61046 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -1,5 +1,5 @@
# sqlite/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -130,14 +130,14 @@ for new connections through the usage of events::
import datetime
import re
-from sqlalchemy import sql, exc
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy import util
-from sqlalchemy.sql import compiler
-from sqlalchemy import processors
+from ... import sql, exc
+from ...engine import default, reflection
+from ... import types as sqltypes, schema as sa_schema
+from ... import util
+from ...sql import compiler
+from ... import processors
-from sqlalchemy.types import BIGINT, BLOB, BOOLEAN, CHAR,\
+from ...types import BIGINT, BLOB, BOOLEAN, CHAR,\
DECIMAL, FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT,\
TIMESTAMP, VARCHAR
@@ -160,6 +160,13 @@ class _DateTimeMixin(object):
kw["regexp"] = self._reg
return util.constructor_copy(self, cls, **kw)
+ def literal_processor(self, dialect):
+ bp = self.bind_processor(dialect)
+ def process(value):
+ return "'%s'" % bp(value)
+ return process
+
+
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"""Represent a Python datetime object in SQLite using a string.
@@ -211,6 +218,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"%(hour)02d:%(minute)02d:%(second)02d"
)
+
def bind_processor(self, dialect):
datetime_datetime = datetime.datetime
datetime_date = datetime.date
@@ -491,7 +499,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
colspec += " NOT NULL"
if (column.primary_key and
- column.table.kwargs.get('sqlite_autoincrement', False) and
+ column.table.dialect_options['sqlite']['autoincrement'] and
len(column.table.primary_key.columns) == 1 and
issubclass(column.type._type_affinity, sqltypes.Integer) and
not column.foreign_keys):
@@ -506,7 +514,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
if len(constraint.columns) == 1:
c = list(constraint)[0]
if c.primary_key and \
- c.table.kwargs.get('sqlite_autoincrement', False) and \
+ c.table.dialect_options['sqlite']['autoincrement'] and \
issubclass(c.type._type_affinity, sqltypes.Integer) and \
not c.foreign_keys:
return None
@@ -615,6 +623,12 @@ class SQLiteDialect(default.DefaultDialect):
supports_cast = True
supports_default_values = True
+ construct_arguments = [
+ (sa_schema.Table, {
+ "autoincrement": False
+ })
+ ]
+
_broken_fk_pragma_quotes = False
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index ad0dd5292..b53f4d4a0 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -1,5 +1,5 @@
# sqlite/pysqlite.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -97,6 +97,8 @@ or result processing. Execution of "func.current_date()" will return a string.
"func.current_timestamp()" is registered as returning a DATETIME type in
SQLAlchemy, so this function still receives SQLAlchemy-level result processing.
+.. _pysqlite_threading_pooling:
+
Threading/Pooling Behavior
---------------------------
@@ -160,8 +162,8 @@ Using Temporary Tables with SQLite
Due to the way SQLite deals with temporary tables, if you wish to use a
temporary table in a file-based SQLite database across multiple checkouts
from the connection pool, such as when using an ORM :class:`.Session` where
-the temporary table should continue to remain after :meth:`.commit` or
-:meth:`.rollback` is called, a pool which maintains a single connection must
+the temporary table should continue to remain after :meth:`.Session.commit` or
+:meth:`.Session.rollback` is called, a pool which maintains a single connection must
be used. Use :class:`.SingletonThreadPool` if the scope is only needed
within the current thread, or :class:`.StaticPool` is scope is needed within
multiple threads for this case::
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index f61352ceb..85f9dd9c9 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -1,5 +1,5 @@
# sybase/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 1c42d4846..501270778 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,5 +1,5 @@
# sybase/base.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
# copyright (C) 2007 Fisch Asset Management
# AG http://www.fam.ch, with coding by Alexander Houben
@@ -440,6 +440,8 @@ class SybaseDialect(default.DefaultDialect):
preparer = SybaseIdentifierPreparer
inspector = SybaseInspector
+ construct_arguments = []
+
def _get_default_schema_name(self, connection):
return connection.scalar(
text("SELECT user_name() as user_name",
diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py
index 829132bdc..f14d1c420 100644
--- a/lib/sqlalchemy/dialects/sybase/mxodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py
@@ -1,5 +1,5 @@
# sybase/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 283c60da3..f773e5a6d 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -1,5 +1,5 @@
# sybase/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index 8961ce8ef..664bd9ac0 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -1,5 +1,5 @@
# sybase/pysybase.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py
index 0a5a96784..890c76645 100644
--- a/lib/sqlalchemy/engine/__init__.py
+++ b/lib/sqlalchemy/engine/__init__.py
@@ -1,5 +1,5 @@
# engine/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -50,14 +50,13 @@ url.py
within a URL.
"""
-# not sure what this was used for
-#import sqlalchemy.databases
-
from .interfaces import (
- Compiled,
Connectable,
Dialect,
ExecutionContext,
+
+ # backwards compat
+ Compiled,
TypeCompiler
)
@@ -83,8 +82,12 @@ from .util import (
connection_memoize
)
+
from . import util, strategies
+# backwards compat
+from ..sql import ddl
+
default_strategy = 'plain'
@@ -345,10 +348,13 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
arguments.
"""
- opts = util._coerce_config(configuration, prefix)
- opts.update(kwargs)
- url = opts.pop('url')
- return create_engine(url, **opts)
+ options = dict((key[len(prefix):], configuration[key])
+ for key in configuration
+ if key.startswith(prefix))
+ options['_coerce_config'] = True
+ options.update(kwargs)
+ url = options.pop('url')
+ return create_engine(url, **options)
__all__ = (
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index f69bd3d4b..1f2b7a3e5 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1,5 +1,5 @@
# engine/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,8 +11,8 @@ from __future__ import with_statement
import sys
-from .. import exc, schema, util, log, interfaces
-from ..sql import expression, util as sql_util
+from .. import exc, util, log, interfaces
+from ..sql import expression, util as sql_util, schema, ddl
from .interfaces import Connectable, Compiled
from .util import _distill_params
import contextlib
@@ -303,20 +303,40 @@ class Connection(Connectable):
def invalidate(self, exception=None):
"""Invalidate the underlying DBAPI connection associated with
- this Connection.
+ this :class:`.Connection`.
- The underlying DB-API connection is literally closed (if
+ The underlying DBAPI connection is literally closed (if
possible), and is discarded. Its source connection pool will
typically lazily create a new connection to replace it.
- Upon the next usage, this Connection will attempt to reconnect
- to the pool with a new connection.
+ Upon the next use (where "use" typically means using the
+ :meth:`.Connection.execute` method or similar),
+ this :class:`.Connection` will attempt to
+ procure a new DBAPI connection using the services of the
+ :class:`.Pool` as a source of connectivty (e.g. a "reconnection").
+
+ If a transaction was in progress (e.g. the
+ :meth:`.Connection.begin` method has been called) when
+ :meth:`.Connection.invalidate` method is called, at the DBAPI
+ level all state associated with this transaction is lost, as
+ the DBAPI connection is closed. The :class:`.Connection`
+ will not allow a reconnection to proceed until the :class:`.Transaction`
+ object is ended, by calling the :meth:`.Transaction.rollback`
+ method; until that point, any attempt at continuing to use the
+ :class:`.Connection` will raise an
+ :class:`~sqlalchemy.exc.InvalidRequestError`.
+ This is to prevent applications from accidentally
+ continuing an ongoing transactional operations despite the
+ fact that the transaction has been lost due to an
+ invalidation.
+
+ The :meth:`.Connection.invalidate` method, just like auto-invalidation,
+ will at the connection pool level invoke the :meth:`.PoolEvents.invalidate`
+ event.
- Transactions in progress remain in an "opened" state (even though the
- actual transaction is gone); these must be explicitly rolled back
- before a reconnect on this Connection can proceed. This is to prevent
- applications from accidentally continuing their transactional
- operations in a non-transactional state.
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
"""
if self.invalidated:
@@ -403,7 +423,6 @@ class Connection(Connectable):
See also :meth:`.Connection.begin`,
:meth:`.Connection.begin_twophase`.
"""
-
if self.__transaction is None:
self.__transaction = RootTransaction(self)
else:
@@ -450,7 +469,7 @@ class Connection(Connectable):
return self.__transaction is not None
- def _begin_impl(self):
+ def _begin_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN (implicit)")
@@ -459,6 +478,8 @@ class Connection(Connectable):
try:
self.engine.dialect.do_begin(self.connection)
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
@@ -471,9 +492,12 @@ class Connection(Connectable):
self.engine.logger.info("ROLLBACK")
try:
self.engine.dialect.do_rollback(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
else:
self.__transaction = None
@@ -485,9 +509,12 @@ class Connection(Connectable):
self.engine.logger.info("COMMIT")
try:
self.engine.dialect.do_commit(self.connection)
- self.__transaction = None
except Exception as e:
self._handle_dbapi_exception(e, None, None, None, None)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
def _savepoint_impl(self, name=None):
if self._has_events:
@@ -516,14 +543,17 @@ class Connection(Connectable):
self.engine.dialect.do_release_savepoint(self, name)
self.__transaction = context
- def _begin_twophase_impl(self, xid):
+ def _begin_twophase_impl(self, transaction):
if self._echo:
self.engine.logger.info("BEGIN TWOPHASE (implicit)")
if self._has_events:
- self.dispatch.begin_twophase(self, xid)
+ self.dispatch.begin_twophase(self, transaction.xid)
if self._still_open_and_connection_is_valid:
- self.engine.dialect.do_begin_twophase(self, xid)
+ self.engine.dialect.do_begin_twophase(self, transaction.xid)
+
+ if self.connection._reset_agent is None:
+ self.connection._reset_agent = transaction
def _prepare_twophase_impl(self, xid):
if self._has_events:
@@ -539,8 +569,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _commit_twophase_impl(self, xid, is_prepared):
if self._has_events:
@@ -548,8 +584,14 @@ class Connection(Connectable):
if self._still_open_and_connection_is_valid:
assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
- self.__transaction = None
+ try:
+ self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
+ finally:
+ if self.connection._reset_agent is self.__transaction:
+ self.connection._reset_agent = None
+ self.__transaction = None
+ else:
+ self.__transaction = None
def _autorollback(self):
if not self.in_transaction():
@@ -581,6 +623,8 @@ class Connection(Connectable):
else:
if not self.__branch:
conn.close()
+ if conn._reset_agent is self.__transaction:
+ conn._reset_agent = None
del self.__connection
self.__can_reconnect = False
self.__transaction = None
@@ -652,17 +696,16 @@ class Connection(Connectable):
DBAPI-agnostic way, use the :func:`~.expression.text` construct.
"""
- for c in type(object).__mro__:
- if c in Connection.executors:
- return Connection.executors[c](
- self,
- object,
- multiparams,
- params)
- else:
+ if isinstance(object, util.string_types[0]):
+ return self._execute_text(object, multiparams, params)
+ try:
+ meth = object._execute_on_connection
+ except AttributeError:
raise exc.InvalidRequestError(
"Unexecutable object type: %s" %
type(object))
+ else:
+ return meth(self, multiparams, params)
def _execute_function(self, func, multiparams, params):
"""Execute a sql.FunctionElement object."""
@@ -825,7 +868,7 @@ class Connection(Connectable):
context = constructor(dialect, self, conn, *args)
except Exception as e:
self._handle_dbapi_exception(e,
- str(statement), parameters,
+ util.text_type(statement), parameters,
None, None)
if context.compiled:
@@ -898,6 +941,11 @@ class Connection(Connectable):
elif not context._is_explicit_returning:
result.close(_autoclose_connection=False)
result._metadata = None
+ elif context.isupdate and context._is_implicit_returning:
+ context._fetch_implicit_update_returning(result)
+ result.close(_autoclose_connection=False)
+ result._metadata = None
+
elif result._metadata is None:
# no results, get rowcount
# (which requires open cursor on some drivers
@@ -1033,16 +1081,6 @@ class Connection(Connectable):
if self.should_close_with_result:
self.close()
- # poor man's multimethod/generic function thingy
- executors = {
- expression.FunctionElement: _execute_function,
- expression.ClauseElement: _execute_clauseelement,
- Compiled: _execute_compiled,
- schema.SchemaItem: _execute_default,
- schema.DDLElement: _execute_ddl,
- util.string_types[0]: _execute_text
- }
-
def default_schema_name(self):
return self.engine.dialect.get_default_schema_name(self)
@@ -1210,7 +1248,7 @@ class Transaction(object):
class RootTransaction(Transaction):
def __init__(self, connection):
super(RootTransaction, self).__init__(connection, None)
- self.connection._begin_impl()
+ self.connection._begin_impl(self)
def _do_rollback(self):
if self.is_active:
@@ -1259,7 +1297,7 @@ class TwoPhaseTransaction(Transaction):
super(TwoPhaseTransaction, self).__init__(connection, None)
self._is_prepared = False
self.xid = xid
- self.connection._begin_twophase_impl(self.xid)
+ self.connection._begin_twophase_impl(self)
def prepare(self):
"""Prepare this :class:`.TwoPhaseTransaction`.
@@ -1423,7 +1461,7 @@ class Engine(Connectable, log.Identified):
echo = log.echo_property()
def __repr__(self):
- return 'Engine(%s)' % str(self.url)
+ return 'Engine(%r)' % self.url
def dispose(self):
"""Dispose of the connection pool used by this :class:`.Engine`.
@@ -1667,6 +1705,17 @@ class Engine(Connectable, log.Identified):
return self.dialect.get_table_names(conn, schema)
def has_table(self, table_name, schema=None):
+ """Return True if the given backend has a table of the given name.
+
+ .. seealso::
+
+ :ref:`metadata_reflection_inspector` - detailed schema inspection using
+ the :class:`.Inspector` interface.
+
+ :class:`.quoted_name` - used to pass quoting information along
+ with a schema identifier.
+
+ """
return self.run_callable(self.dialect.has_table, table_name, schema)
def raw_connection(self):
diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py
deleted file mode 100644
index 6daa9be6b..000000000
--- a/lib/sqlalchemy/engine/ddl.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# engine/ddl.py
-# Copyright (C) 2009-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Routines to handle CREATE/DROP workflow."""
-
-from .. import schema
-from ..sql import util as sql_util
-
-
-class DDLBase(schema.SchemaVisitor):
- def __init__(self, connection):
- self.connection = connection
-
-
-class SchemaGenerator(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaGenerator, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def _can_create_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_create_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- (
- (not self.dialect.sequences_optional or
- not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
- )
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
- collection = [t for t in sql_util.sort_tables(tables)
- if self._can_create_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_create_sequence(s)]
-
- metadata.dispatch.before_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for seq in seq_coll:
- self.traverse_single(seq, create_ok=True)
-
- for table in collection:
- self.traverse_single(table, create_ok=True)
-
- metadata.dispatch.after_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_table(self, table, create_ok=False):
- if not create_ok and not self._can_create_table(table):
- return
-
- table.dispatch.before_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.CreateTable(table))
-
- if hasattr(table, 'indexes'):
- for index in table.indexes:
- self.traverse_single(index)
-
- table.dispatch.after_create(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, create_ok=False):
- if not create_ok and not self._can_create_sequence(sequence):
- return
- self.connection.execute(schema.CreateSequence(sequence))
-
- def visit_index(self, index):
- self.connection.execute(schema.CreateIndex(index))
-
-
-class SchemaDropper(DDLBase):
-
- def __init__(self, dialect, connection, checkfirst=False,
- tables=None, **kwargs):
- super(SchemaDropper, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
- self.memo = {}
-
- def visit_metadata(self, metadata):
- if self.tables is not None:
- tables = self.tables
- else:
- tables = list(metadata.tables.values())
-
- collection = [
- t
- for t in reversed(sql_util.sort_tables(tables))
- if self._can_drop_table(t)
- ]
-
- seq_coll = [
- s
- for s in metadata._sequences.values()
- if s.column is None and self._can_drop_sequence(s)
- ]
-
- metadata.dispatch.before_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- for table in collection:
- self.traverse_single(table, drop_ok=True)
-
- for seq in seq_coll:
- self.traverse_single(seq, drop_ok=True)
-
- metadata.dispatch.after_drop(
- metadata, self.connection, tables=collection,
- checkfirst=self.checkfirst, _ddl_runner=self)
-
- def _can_drop_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_drop_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema))
- )
-
- def visit_index(self, index):
- self.connection.execute(schema.DropIndex(index))
-
- def visit_table(self, table, drop_ok=False):
- if not drop_ok and not self._can_drop_table(table):
- return
-
- table.dispatch.before_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.DropTable(table))
-
- table.dispatch.after_drop(table, self.connection,
- checkfirst=self.checkfirst,
- _ddl_runner=self)
-
- def visit_sequence(self, sequence, drop_ok=False):
- if not drop_ok and not self._can_drop_sequence(sequence):
- return
- self.connection.execute(schema.DropSequence(sequence))
diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py
index 3e8e96a42..ed975b8cf 100644
--- a/lib/sqlalchemy/engine/default.py
+++ b/lib/sqlalchemy/engine/default.py
@@ -1,5 +1,5 @@
# engine/default.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,7 +16,8 @@ import re
import random
from . import reflection, interfaces, result
from ..sql import compiler, expression
-from .. import exc, types as sqltypes, util, pool, processors
+from .. import types as sqltypes
+from .. import exc, util, pool, processors
import codecs
import weakref
from .. import event
@@ -26,6 +27,7 @@ AUTOCOMMIT_REGEXP = re.compile(
re.I | re.UNICODE)
+
class DefaultDialect(interfaces.Dialect):
"""Default implementation of Dialect"""
@@ -57,6 +59,18 @@ class DefaultDialect(interfaces.Dialect):
supports_simple_order_by_label = True
+ engine_config_types = util.immutabledict([
+ ('convert_unicode', util.bool_or_str('force')),
+ ('pool_timeout', int),
+ ('echo', util.bool_or_str('debug')),
+ ('echo_pool', util.bool_or_str('debug')),
+ ('pool_recycle', int),
+ ('pool_size', int),
+ ('max_overflow', int),
+ ('pool_threadlocal', bool),
+ ('use_native_unicode', bool),
+ ])
+
# if the NUMERIC type
# returns decimal.Decimal.
# *not* the FLOAT type however.
@@ -97,6 +111,33 @@ class DefaultDialect(interfaces.Dialect):
server_version_info = None
+ construct_arguments = None
+ """Optional set of argument specifiers for various SQLAlchemy
+ constructs, typically schema items.
+
+ To
+ implement, establish as a series of tuples, as in::
+
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": None
+ })
+ ]
+
+ If the above construct is established on the Postgresql dialect,
+ the ``Index`` construct will now accept additional keyword arguments
+ such as ``postgresql_using``, ``postgresql_where``, etc. Any kind of
+ ``postgresql_XYZ`` argument not corresponding to the above template will
+ be rejected with an ``ArgumentError`, for all those SQLAlchemy constructs
+ which implement the :class:`.DialectKWArgs` class.
+
+ The default is ``None``; older dialects which don't implement the argument
+ will have the old behavior of un-validated kwargs to schema/SQL constructs.
+
+ """
+
# indicates symbol names are
# UPPERCASEd if they are case insensitive
# within the database.
@@ -111,6 +152,7 @@ class DefaultDialect(interfaces.Dialect):
implicit_returning=None,
supports_right_nested_joins=None,
case_sensitive=True,
+ supports_native_boolean=None,
label_length=None, **kwargs):
if not getattr(self, 'ported_sqla_06', True):
@@ -136,7 +178,8 @@ class DefaultDialect(interfaces.Dialect):
self.type_compiler = self.type_compiler(self)
if supports_right_nested_joins is not None:
self.supports_right_nested_joins = supports_right_nested_joins
-
+ if supports_native_boolean is not None:
+ self.supports_native_boolean = supports_native_boolean
self.case_sensitive = case_sensitive
if label_length and label_length > self.max_identifier_length:
@@ -159,6 +202,8 @@ class DefaultDialect(interfaces.Dialect):
self._encoder = codecs.getencoder(self.encoding)
self._decoder = processors.to_unicode_processor_factory(self.encoding)
+
+
@util.memoized_property
def _type_memos(self):
return weakref.WeakKeyDictionary()
@@ -191,6 +236,10 @@ class DefaultDialect(interfaces.Dialect):
self.returns_unicode_strings = self._check_unicode_returns(connection)
+ if self.description_encoding is not None and \
+ self._check_unicode_description(connection):
+ self._description_decoder = self.description_encoding = None
+
self.do_rollback(connection.connection)
def on_connect(self):
@@ -207,46 +256,78 @@ class DefaultDialect(interfaces.Dialect):
"""
return None
- def _check_unicode_returns(self, connection):
+ def _check_unicode_returns(self, connection, additional_tests=None):
if util.py2k and not self.supports_unicode_statements:
cast_to = util.binary_type
else:
cast_to = util.text_type
- def check_unicode(formatstr, type_):
- cursor = connection.connection.cursor()
+ if self.positional:
+ parameters = self.execute_sequence_format()
+ else:
+ parameters = {}
+
+ def check_unicode(test):
+ statement = cast_to(expression.select([test]).compile(dialect=self))
try:
- try:
- cursor.execute(
- cast_to(
- expression.select(
- [expression.cast(
- expression.literal_column(
- "'test %s returns'" % formatstr),
- type_)
- ]).compile(dialect=self)
- )
- )
- row = cursor.fetchone()
-
- return isinstance(row[0], util.text_type)
- except self.dbapi.Error as de:
- util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
- return False
- finally:
+ cursor = connection.connection.cursor()
+ connection._cursor_execute(cursor, statement, parameters)
+ row = cursor.fetchone()
cursor.close()
+ except exc.DBAPIError as de:
+ # note that _cursor_execute() will have closed the cursor
+ # if an exception is thrown.
+ util.warn("Exception attempting to "
+ "detect unicode returns: %r" % de)
+ return False
+ else:
+ return isinstance(row[0], util.text_type)
+
+ tests = [
+ # detect plain VARCHAR
+ expression.cast(
+ expression.literal_column("'test plain returns'"),
+ sqltypes.VARCHAR(60)
+ ),
+ # detect if there's an NVARCHAR type with different behavior available
+ expression.cast(
+ expression.literal_column("'test unicode returns'"),
+ sqltypes.Unicode(60)
+ ),
+ ]
+
+ if additional_tests:
+ tests += additional_tests
+
+ results = set([check_unicode(test) for test in tests])
+
+ if results.issuperset([True, False]):
+ return "conditional"
+ else:
+ return results == set([True])
- # detect plain VARCHAR
- unicode_for_varchar = check_unicode("plain", sqltypes.VARCHAR(60))
-
- # detect if there's an NVARCHAR type with different behavior available
- unicode_for_unicode = check_unicode("unicode", sqltypes.Unicode(60))
+ def _check_unicode_description(self, connection):
+ # all DBAPIs on Py2K return cursor.description as encoded,
+ # until pypy2.1beta2 with sqlite, so let's just check it -
+ # it's likely others will start doing this too in Py2k.
- if unicode_for_unicode and not unicode_for_varchar:
- return "conditional"
+ if util.py2k and not self.supports_unicode_statements:
+ cast_to = util.binary_type
else:
- return unicode_for_varchar
+ cast_to = util.text_type
+
+ cursor = connection.connection.cursor()
+ try:
+ cursor.execute(
+ cast_to(
+ expression.select([
+ expression.literal_column("'x'").label("some_label")
+ ]).compile(dialect=self)
+ )
+ )
+ return isinstance(cursor.description[0][0], util.text_type)
+ finally:
+ cursor.close()
def type_descriptor(self, typeobj):
"""Provide a database-specific :class:`.TypeEngine` object, given
@@ -259,8 +340,7 @@ class DefaultDialect(interfaces.Dialect):
"""
return sqltypes.adapt_type(typeobj, self.colspecs)
- def reflecttable(self, connection, table, include_columns,
- exclude_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
insp = reflection.Inspector.from_engine(connection)
return insp.reflecttable(table, include_columns, exclude_columns)
@@ -368,6 +448,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
statement = None
postfetch_cols = None
prefetch_cols = None
+ returning_cols = None
_is_implicit_returning = False
_is_explicit_returning = False
@@ -464,6 +545,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
if self.isinsert or self.isupdate:
self.postfetch_cols = self.compiled.postfetch
self.prefetch_cols = self.compiled.prefetch
+ self.returning_cols = self.compiled.returning
self.__process_defaults()
processors = compiled._bind_processors
@@ -722,6 +804,11 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
ipk.append(row[c])
self.inserted_primary_key = ipk
+ self.returned_defaults = row
+
+ def _fetch_implicit_update_returning(self, resultproxy):
+ row = resultproxy.fetchone()
+ self.returned_defaults = row
def lastrow_has_defaults(self):
return (self.isinsert or self.isupdate) and \
@@ -808,6 +895,8 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
and generate inserted_primary_key collection.
"""
+ key_getter = self.compiled._key_getters_for_crud_column[2]
+
if self.executemany:
if len(self.compiled.prefetch):
scalar_defaults = {}
@@ -831,7 +920,7 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
else:
val = self.get_update_default(c)
if val is not None:
- param[c.key] = val
+ param[key_getter(c)] = val
del self.current_parameters
else:
self.current_parameters = compiled_parameters = \
@@ -844,12 +933,12 @@ class DefaultExecutionContext(interfaces.ExecutionContext):
val = self.get_update_default(c)
if val is not None:
- compiled_parameters[c.key] = val
+ compiled_parameters[key_getter(c)] = val
del self.current_parameters
if self.isinsert:
self.inserted_primary_key = [
- self.compiled_parameters[0].get(c.key, None)
+ self.compiled_parameters[0].get(key_getter(c), None)
for c in self.compiled.\
statement.table.primary_key
]
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index 750aa2fcd..5c44933e8 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1,13 +1,15 @@
# engine/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define core interfaces used by the engine system."""
-from .. import util, event, events
+from .. import util, event
+# backwards compat
+from ..sql.compiler import Compiled, TypeCompiler
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
@@ -191,19 +193,21 @@ class Dialect(object):
pass
- def reflecttable(self, connection, table, include_columns=None):
+ def reflecttable(self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
- properties from the database. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
+ properties from the database.
- The default implementation uses the
- :class:`~sqlalchemy.engine.reflection.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
+ The implementation of this method is provided by
+ :meth:`.DefaultDialect.reflecttable`, which makes use of
+ :class:`.Inspector` to retrieve column information.
+
+ Dialects should **not** seek to implement this method, and should
+ instead implement individual schema inspection operations such as
+ :meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`,
+ etc.
"""
@@ -246,7 +250,7 @@ class Dialect(object):
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
- instead implement this method directly.
+ instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
"""
@@ -338,7 +342,7 @@ class Dialect(object):
raise NotImplementedError()
- def get_unique_constraints(self, table_name, schema=None, **kw):
+ def get_unique_constraints(self, connection, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
@@ -769,110 +773,6 @@ class ExecutionContext(object):
raise NotImplementedError()
-class Compiled(object):
- """Represent a compiled SQL or DDL expression.
-
- The ``__str__`` method of the ``Compiled`` object should produce
- the actual text of the statement. ``Compiled`` objects are
- specific to their underlying database dialect, and also may
- or may not be specific to the columns referenced within a
- particular set of bind parameters. In no case should the
- ``Compiled`` object be dependent on the actual values of those
- bind parameters, even though it may reference those values as
- defaults.
- """
-
- def __init__(self, dialect, statement, bind=None,
- compile_kwargs=util.immutabledict()):
- """Construct a new ``Compiled`` object.
-
- :param dialect: ``Dialect`` to compile against.
-
- :param statement: ``ClauseElement`` to be compiled.
-
- :param bind: Optional Engine or Connection to compile this
- statement against.
-
- :param compile_kwargs: additional kwargs that will be
- passed to the initial call to :meth:`.Compiled.process`.
-
- .. versionadded:: 0.8
-
- """
-
- self.dialect = dialect
- self.bind = bind
- if statement is not None:
- self.statement = statement
- self.can_execute = statement.supports_execution
- self.string = self.process(self.statement, **compile_kwargs)
-
- @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
- def compile(self):
- """Produce the internal string representation of this element."""
- pass
-
- @property
- def sql_compiler(self):
- """Return a Compiled that is capable of processing SQL expressions.
-
- If this compiler is one, it would likely just return 'self'.
-
- """
-
- raise NotImplementedError()
-
- def process(self, obj, **kwargs):
- return obj._compiler_dispatch(self, **kwargs)
-
- def __str__(self):
- """Return the string text of the generated SQL or DDL."""
-
- return self.string or ''
-
- def construct_params(self, params=None):
- """Return the bind params for this compiled object.
-
- :param params: a dict of string/object pairs whose values will
- override bind values compiled in to the
- statement.
- """
-
- raise NotImplementedError()
-
- @property
- def params(self):
- """Return the bind params for this compiled object."""
- return self.construct_params()
-
- def execute(self, *multiparams, **params):
- """Execute this compiled object."""
-
- e = self.bind
- if e is None:
- raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
- return e._execute_compiled(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Execute this compiled object and return the result's
- scalar value."""
-
- return self.execute(*multiparams, **params).scalar()
-
-
-class TypeCompiler(object):
- """Produces DDL specification for TypeEngine objects."""
-
- def __init__(self, dialect):
- self.dialect = dialect
-
- def process(self, type_):
- return type_._compiler_dispatch(self)
-
-
class Connectable(object):
"""Interface for an object which supports execution of SQL constructs.
@@ -884,8 +784,6 @@ class Connectable(object):
"""
- dispatch = event.dispatcher(events.ConnectionEvents)
-
def connect(self, **kwargs):
"""Return a :class:`.Connection` object.
@@ -914,7 +812,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.create`, "
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
def create(self, entity, **kwargs):
- """Emit CREATE statements for the given schema entity."""
+ """Emit CREATE statements for the given schema entity.
+ """
raise NotImplementedError()
@@ -923,7 +822,8 @@ class Connectable(object):
"object directly, i.e. :meth:`.Table.drop`, "
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
def drop(self, entity, **kwargs):
- """Emit DROP statements for the given schema entity."""
+ """Emit DROP statements for the given schema entity.
+ """
raise NotImplementedError()
diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py
index 50b3f774c..45f100518 100644
--- a/lib/sqlalchemy/engine/reflection.py
+++ b/lib/sqlalchemy/engine/reflection.py
@@ -1,5 +1,5 @@
# engine/reflection.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -25,9 +25,9 @@ methods such as get_table_names, get_columns, etc.
"""
from .. import exc, sql
-from .. import schema as sa_schema
+from ..sql import schema as sa_schema
from .. import util
-from ..types import TypeEngine
+from ..sql.type_api import TypeEngine
from ..util import deprecated
from ..util import topological
from .. import inspection
@@ -161,7 +161,7 @@ class Inspector(object):
"""Return all table names in referred to within a particular schema.
The names are expected to be real tables only, not views.
- Views are instead returned using the :meth:`.get_view_names`
+ Views are instead returned using the :meth:`.Inspector.get_view_names`
method.
@@ -169,7 +169,7 @@ class Inspector(object):
database's default schema is
used, else the named schema is searched. If the database does not
support named schemas, behavior is undefined if ``schema`` is not
- passed as ``None``.
+ passed as ``None``. For special quoting, use :class:`.quoted_name`.
:param order_by: Optional, may be the string "foreign_key" to sort
the result on foreign key dependencies.
@@ -206,6 +206,13 @@ class Inspector(object):
This currently includes some options that apply to MySQL tables.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
if hasattr(self.dialect, 'get_table_options'):
return self.dialect.get_table_options(
@@ -217,6 +224,8 @@ class Inspector(object):
"""Return all view names in `schema`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_names(self.bind, schema,
@@ -226,6 +235,8 @@ class Inspector(object):
"""Return definition for `view_name`.
:param schema: Optional, retrieve names from a non-default schema.
+ For special quoting, use :class:`.quoted_name`.
+
"""
return self.dialect.get_view_definition(
@@ -251,6 +262,14 @@ class Inspector(object):
attrs
dict containing optional column attributes
+
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
col_defs = self.dialect.get_columns(self.bind, table_name, schema,
@@ -288,6 +307,13 @@ class Inspector(object):
name
optional name of the primary key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_pk_constraint(self.bind, table_name, schema,
info_cache=self.info_cache,
@@ -315,6 +341,13 @@ class Inspector(object):
name
optional name of the foreign key constraint.
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_foreign_keys(self.bind, table_name, schema,
@@ -336,6 +369,13 @@ class Inspector(object):
unique
boolean
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
"""
return self.dialect.get_indexes(self.bind, table_name,
@@ -354,7 +394,14 @@ class Inspector(object):
column_names
list of column names in order
- .. versionadded:: 0.9.0
+ :param table_name: string name of the table. For special quoting,
+ use :class:`.quoted_name`.
+
+ :param schema: string schema name; if omitted, uses the default schema
+ of the database connection. For special quoting,
+ use :class:`.quoted_name`.
+
+ .. versionadded:: 0.8.4
"""
@@ -384,24 +431,25 @@ class Inspector(object):
"""
dialect = self.bind.dialect
- # table attributes we might need.
- reflection_options = dict(
- (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)
-
schema = table.schema
table_name = table.name
- # apply table options
- tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
- if tbl_opts:
- table.kwargs.update(tbl_opts)
+ # get table-level arguments that are specifically
+ # intended for reflection, e.g. oracle_resolve_synonyms.
+ # these are unconditionally passed to related Table
+ # objects
+ reflection_options = dict(
+ (k, table.dialect_kwargs.get(k))
+ for k in dialect.reflection_options
+ if k in table.dialect_kwargs
+ )
- # table.kwargs will need to be passed to each reflection method. Make
- # sure keywords are strings.
- tblkw = table.kwargs.copy()
- for (k, v) in list(tblkw.items()):
- del tblkw[k]
- tblkw[str(k)] = v
+ # reflect table options, like mysql_engine
+ tbl_opts = self.get_table_options(table_name, schema, **table.dialect_kwargs)
+ if tbl_opts:
+ # add additional kwargs to the Table if the dialect
+ # returned them
+ table._validate_dialect_kwargs(tbl_opts)
if util.py2k:
if isinstance(schema, str):
@@ -409,10 +457,13 @@ class Inspector(object):
if isinstance(table_name, str):
table_name = table_name.decode(dialect.encoding)
- # columns
found_table = False
- for col_d in self.get_columns(table_name, schema, **tblkw):
+ cols_by_orig_name = {}
+
+ for col_d in self.get_columns(table_name, schema, **table.dialect_kwargs):
found_table = True
+ orig_name = col_d['name']
+
table.dispatch.column_reflect(self, table, col_d)
name = col_d['name']
@@ -422,12 +473,12 @@ class Inspector(object):
continue
coltype = col_d['type']
- col_kw = {
- 'nullable': col_d['nullable'],
- }
- for k in ('autoincrement', 'quote', 'info', 'key'):
- if k in col_d:
- col_kw[k] = col_d[k]
+
+ col_kw = dict(
+ (k, col_d[k])
+ for k in ['nullable', 'autoincrement', 'quote', 'info', 'key']
+ if k in col_d
+ )
colargs = []
if col_d.get('default') is not None:
@@ -441,7 +492,7 @@ class Inspector(object):
)
if 'sequence' in col_d:
- # TODO: mssql, maxdb and sybase are using this.
+ # TODO: mssql and sybase are using this.
seq = col_d['sequence']
sequence = sa_schema.Sequence(seq['name'], 1, 1)
if 'start' in seq:
@@ -450,37 +501,41 @@ class Inspector(object):
sequence.increment = seq['increment']
colargs.append(sequence)
- col = sa_schema.Column(name, coltype, *colargs, **col_kw)
+ cols_by_orig_name[orig_name] = col = \
+ sa_schema.Column(name, coltype, *colargs, **col_kw)
+
+ if col.key in table.primary_key:
+ col.primary_key = True
table.append_column(col)
if not found_table:
raise exc.NoSuchTableError(table.name)
- # Primary keys
- pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
+ pk_cons = self.get_pk_constraint(table_name, schema, **table.dialect_kwargs)
if pk_cons:
pk_cols = [
- table.c[pk]
+ cols_by_orig_name[pk]
for pk in pk_cons['constrained_columns']
- if pk in table.c and pk not in exclude_columns
- ]
- pk_cols += [
- pk
- for pk in table.primary_key
- if pk.key in exclude_columns
+ if pk in cols_by_orig_name and pk not in exclude_columns
]
- primary_key_constraint = sa_schema.PrimaryKeyConstraint(
- name=pk_cons.get('name'),
- *pk_cols
- )
- table.append_constraint(primary_key_constraint)
+ # update pk constraint name
+ table.primary_key.name = pk_cons.get('name')
+
+ # tell the PKConstraint to re-initialize
+ # it's column collection
+ table.primary_key._reload(pk_cols)
- # Foreign keys
- fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
+ fkeys = self.get_foreign_keys(table_name, schema, **table.dialect_kwargs)
for fkey_d in fkeys:
conname = fkey_d['name']
- constrained_columns = fkey_d['constrained_columns']
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ constrained_columns = [
+ cols_by_orig_name[c].key
+ if c in cols_by_orig_name else c
+ for c in fkey_d['constrained_columns']
+ ]
if exclude_columns and set(constrained_columns).intersection(
exclude_columns):
continue
@@ -504,9 +559,14 @@ class Inspector(object):
)
for column in referred_columns:
refspec.append(".".join([referred_table, column]))
+ if 'options' in fkey_d:
+ options = fkey_d['options']
+ else:
+ options = {}
table.append_constraint(
sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
- conname, link_to_name=True))
+ conname, link_to_name=True,
+ **options))
# Indexes
indexes = self.get_indexes(table_name, schema)
for index_d in indexes:
@@ -520,5 +580,11 @@ class Inspector(object):
"Omitting %s KEY for (%s), key covers omitted columns." %
(flavor, ', '.join(columns)))
continue
- sa_schema.Index(name, *[table.columns[c] for c in columns],
+ # look for columns by orig name in cols_by_orig_name,
+ # but support columns that are in-Python only as fallback
+ sa_schema.Index(name, *[
+ cols_by_orig_name[c] if c in cols_by_orig_name
+ else table.c[c]
+ for c in columns
+ ],
**dict(unique=unique))
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index 65ce3b742..f9e0ca0d2 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -1,5 +1,5 @@
# engine/result.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,8 +9,8 @@ and :class:`.RowProxy."""
-from .. import exc, types, util
-from ..sql import expression
+from .. import exc, util
+from ..sql import expression, sqltypes
import collections
# This reconstructor is necessary so that pickles with the C extension or
@@ -125,8 +125,11 @@ class RowProxy(BaseRowProxy):
__hash__ = None
+ def __lt__(self, other):
+ return tuple(self) < tuple(other)
+
def __eq__(self, other):
- return other is self or other == tuple(self)
+ return other is self or tuple(other) == tuple(self)
def __ne__(self, other):
return not self.__eq__(other)
@@ -205,10 +208,10 @@ class ResultMetaData(object):
else colname.lower()]
except KeyError:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
else:
name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
+ colname, None, typemap.get(coltype, sqltypes.NULLTYPE)
processor = context.get_result_processor(type_, colname, coltype)
@@ -621,6 +624,24 @@ class ResultProxy(object):
else:
return self.context.compiled_parameters[0]
+ @property
+ def returned_defaults(self):
+ """Return the values of default columns that were fetched using
+ the :meth:`.ValuesBase.return_defaults` feature.
+
+ The value is an instance of :class:`.RowProxy`, or ``None``
+ if :meth:`.ValuesBase.return_defaults` was not used or if the
+ backend does not support RETURNING.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults`
+
+ """
+ return self.context.returned_defaults
+
def lastrow_has_defaults(self):
"""Return ``lastrow_has_defaults()`` from the underlying
:class:`.ExecutionContext`.
diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py
index ab9d370a3..f6c064033 100644
--- a/lib/sqlalchemy/engine/strategies.py
+++ b/lib/sqlalchemy/engine/strategies.py
@@ -1,5 +1,5 @@
# engine/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -49,18 +49,27 @@ class DefaultEngineStrategy(EngineStrategy):
dialect_cls = u.get_dialect()
+ if kwargs.pop('_coerce_config', False):
+ def pop_kwarg(key, default=None):
+ value = kwargs.pop(key, default)
+ if key in dialect_cls.engine_config_types:
+ value = dialect_cls.engine_config_types[key](value)
+ return value
+ else:
+ pop_kwarg = kwargs.pop
+
dialect_args = {}
# consume dialect arguments from kwargs
for k in util.get_cls_kwargs(dialect_cls):
if k in kwargs:
- dialect_args[k] = kwargs.pop(k)
+ dialect_args[k] = pop_kwarg(k)
dbapi = kwargs.pop('module', None)
if dbapi is None:
dbapi_args = {}
for k in util.get_func_kwargs(dialect_cls.dbapi):
if k in kwargs:
- dbapi_args[k] = kwargs.pop(k)
+ dbapi_args[k] = pop_kwarg(k)
dbapi = dialect_cls.dbapi(**dbapi_args)
dialect_args['dbapi'] = dbapi
@@ -70,15 +79,15 @@ class DefaultEngineStrategy(EngineStrategy):
# assemble connection arguments
(cargs, cparams) = dialect.create_connect_args(u)
- cparams.update(kwargs.pop('connect_args', {}))
+ cparams.update(pop_kwarg('connect_args', {}))
# look for existing pool or create
- pool = kwargs.pop('pool', None)
+ pool = pop_kwarg('pool', None)
if pool is None:
def connect():
try:
return dialect.connect(*cargs, **cparams)
- except Exception as e:
+ except dialect.dbapi.Error as e:
invalidated = dialect.is_disconnect(e, None, None)
util.raise_from_cause(
exc.DBAPIError.instance(None, None,
@@ -87,9 +96,9 @@ class DefaultEngineStrategy(EngineStrategy):
)
)
- creator = kwargs.pop('creator', connect)
+ creator = pop_kwarg('creator', connect)
- poolclass = kwargs.pop('poolclass', None)
+ poolclass = pop_kwarg('poolclass', None)
if poolclass is None:
poolclass = dialect_cls.get_pool_class(u)
pool_args = {}
@@ -106,7 +115,7 @@ class DefaultEngineStrategy(EngineStrategy):
for k in util.get_cls_kwargs(poolclass):
tk = translate.get(k, k)
if tk in kwargs:
- pool_args[k] = kwargs.pop(tk)
+ pool_args[k] = pop_kwarg(tk)
pool = poolclass(creator, **pool_args)
else:
if isinstance(pool, poollib._DBProxy):
@@ -119,7 +128,7 @@ class DefaultEngineStrategy(EngineStrategy):
engine_args = {}
for k in util.get_cls_kwargs(engineclass):
if k in kwargs:
- engine_args[k] = kwargs.pop(k)
+ engine_args[k] = pop_kwarg(k)
_initialize = kwargs.pop('_initialize', True)
diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py
index d4aeafd6f..ae647a78e 100644
--- a/lib/sqlalchemy/engine/threadlocal.py
+++ b/lib/sqlalchemy/engine/threadlocal.py
@@ -1,5 +1,5 @@
# engine/threadlocal.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py
index ed5729eea..78ac06187 100644
--- a/lib/sqlalchemy/engine/url.py
+++ b/lib/sqlalchemy/engine/url.py
@@ -1,5 +1,5 @@
# engine/url.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,6 +16,7 @@ be used directly and is also accepted directly by ``create_engine()``.
import re
from .. import exc, util
from . import Dialect
+from ..dialects import registry
class URL(object):
@@ -23,8 +24,8 @@ class URL(object):
Represent the components of a URL used to connect to a database.
This object is suitable to be passed directly to a
- ``create_engine()`` call. The fields of the URL are parsed from a
- string by the ``module-level make_url()`` function. the string
+ :func:`~sqlalchemy.create_engine` call. The fields of the URL are parsed from a
+ string by the :func:`.make_url` function. the string
format of the URL is an RFC-1738-style string.
All initialization parameters are available as public attributes.
@@ -61,15 +62,19 @@ class URL(object):
self.database = database
self.query = query or {}
- def __str__(self):
+ def __to_string__(self, hide_password=True):
s = self.drivername + "://"
if self.username is not None:
- s += self.username
+ s += _rfc_1738_quote(self.username)
if self.password is not None:
- s += ':' + util.quote_plus(self.password)
+ s += ':' + ('***' if hide_password
+ else _rfc_1738_quote(self.password))
s += "@"
if self.host is not None:
- s += self.host
+ if ':' in self.host:
+ s += "[%s]" % self.host
+ else:
+ s += self.host
if self.port is not None:
s += ':' + str(self.port)
if self.database is not None:
@@ -80,6 +85,12 @@ class URL(object):
s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
return s
+ def __str__(self):
+ return self.__to_string__(hide_password=False)
+
+ def __repr__(self):
+ return self.__to_string__()
+
def __hash__(self):
return hash(str(self))
@@ -102,7 +113,6 @@ class URL(object):
name = self.drivername
else:
name = self.drivername.replace('+', '.')
- from sqlalchemy.dialects import registry
cls = registry.load(name)
# check for legacy dialects that
# would return a module with 'dialect' as the
@@ -160,10 +170,13 @@ def _parse_rfc1738_args(name):
(?P<name>[\w\+]+)://
(?:
(?P<username>[^:/]*)
- (?::(?P<password>[^/]*))?
+ (?::(?P<password>.*))?
@)?
(?:
- (?P<host>[^/:]*)
+ (?:
+ \[(?P<ipv6host>[^/]+)\] |
+ (?P<ipv4host>[^/:]+)
+ )?
(?::(?P<port>[^/]*))?
)?
(?:/(?P<database>.*))?
@@ -182,10 +195,15 @@ def _parse_rfc1738_args(name):
query = None
components['query'] = query
+ if components['username'] is not None:
+ components['username'] = _rfc_1738_unquote(components['username'])
+
if components['password'] is not None:
- components['password'] = \
- util.unquote_plus(components['password'])
+ components['password'] = _rfc_1738_unquote(components['password'])
+ ipv4host = components.pop('ipv4host')
+ ipv6host = components.pop('ipv6host')
+ components['host'] = ipv4host or ipv6host
name = components.pop('name')
return URL(name, **components)
else:
@@ -193,6 +211,12 @@ def _parse_rfc1738_args(name):
"Could not parse rfc1738 URL from string '%s'" % name)
+def _rfc_1738_quote(text):
+ return re.sub(r'[:@/]', lambda m: "%%%X" % ord(m.group(0)), text)
+
+def _rfc_1738_unquote(text):
+ return util.unquote(text)
+
def _parse_keyvalue_args(name):
m = re.match(r'(\w+)://(.*)', name)
if m is not None:
diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py
index e56452751..6c0644be4 100644
--- a/lib/sqlalchemy/engine/util.py
+++ b/lib/sqlalchemy/engine/util.py
@@ -1,33 +1,11 @@
# engine/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .. import util
-
-def _coerce_config(configuration, prefix):
- """Convert configuration values to expected types."""
-
- options = dict((key[len(prefix):], configuration[key])
- for key in configuration
- if key.startswith(prefix))
- for option, type_ in (
- ('convert_unicode', util.bool_or_str('force')),
- ('pool_timeout', int),
- ('echo', util.bool_or_str('debug')),
- ('echo_pool', util.bool_or_str('debug')),
- ('pool_recycle', int),
- ('pool_size', int),
- ('max_overflow', int),
- ('pool_threadlocal', bool),
- ('use_native_unicode', bool),
- ):
- util.coerce_kw_type(options, option, type_)
- return options
-
-
def connection_memoize(key):
"""Decorator, memoize a function in a connection.info stash.
diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py
deleted file mode 100644
index 64ae49976..000000000
--- a/lib/sqlalchemy/event.py
+++ /dev/null
@@ -1,735 +0,0 @@
-# sqlalchemy/event.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Base event API."""
-
-from __future__ import absolute_import
-
-from . import util, exc
-from itertools import chain
-import weakref
-
-CANCEL = util.symbol('CANCEL')
-NO_RETVAL = util.symbol('NO_RETVAL')
-
-
-def listen(target, identifier, fn, *args, **kw):
- """Register a listener function for the given target.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- event.listen(
- UniqueConstraint,
- "after_parent_attach",
- unique_constraint_name)
-
- """
-
- for evt_cls in _registrars[identifier]:
- tgt = evt_cls._accept_with(target)
- if tgt is not None:
- tgt.dispatch._listen(tgt, identifier, fn, *args, **kw)
- return
- raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
- (identifier, target))
-
-
-def listens_for(target, identifier, *args, **kw):
- """Decorate a function as a listener for the given target + identifier.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- @event.listens_for(UniqueConstraint, "after_parent_attach")
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- """
- def decorate(fn):
- listen(target, identifier, fn, *args, **kw)
- return fn
- return decorate
-
-
-def remove(target, identifier, fn):
- """Remove an event listener.
-
- Note that some event removals, particularly for those event dispatchers
- which create wrapper functions and secondary even listeners, may not yet
- be supported.
-
- """
- for evt_cls in _registrars[identifier]:
- for tgt in evt_cls._accept_with(target):
- tgt.dispatch._remove(identifier, tgt, fn)
- return
-
-def _legacy_signature(since, argnames, converter=None):
- def leg(fn):
- if not hasattr(fn, '_legacy_signatures'):
- fn._legacy_signatures = []
- fn._legacy_signatures.append((since, argnames, converter))
- return fn
- return leg
-
-
-_registrars = util.defaultdict(list)
-
-
-def _is_event_name(name):
- return not name.startswith('_') and name != 'dispatch'
-
-
-class _UnpickleDispatch(object):
- """Serializable callable that re-generates an instance of
- :class:`_Dispatch` given a particular :class:`.Events` subclass.
-
- """
- def __call__(self, _parent_cls):
- for cls in _parent_cls.__mro__:
- if 'dispatch' in cls.__dict__:
- return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
- else:
- raise AttributeError("No class with a 'dispatch' member present.")
-
-
-class _Dispatch(object):
- """Mirror the event listening definitions of an Events class with
- listener collections.
-
- Classes which define a "dispatch" member will return a
- non-instantiated :class:`._Dispatch` subclass when the member
- is accessed at the class level. When the "dispatch" member is
- accessed at the instance level of its owner, an instance
- of the :class:`._Dispatch` class is returned.
-
- A :class:`._Dispatch` class is generated for each :class:`.Events`
- class defined, by the :func:`._create_dispatcher_class` function.
- The original :class:`.Events` classes remain untouched.
- This decouples the construction of :class:`.Events` subclasses from
- the implementation used by the event internals, and allows
- inspecting tools like Sphinx to work in an unsurprising
- way against the public API.
-
- """
-
- def __init__(self, _parent_cls):
- self._parent_cls = _parent_cls
-
- def _join(self, other):
- """Create a 'join' of this :class:`._Dispatch` and another.
-
- This new dispatcher will dispatch events to both
- :class:`._Dispatch` objects.
-
- Once constructed, the joined dispatch will respond to new events
- added to this dispatcher, but may not be aware of events
- added to the other dispatcher after creation of the join. This is
- currently for performance reasons so that both dispatchers need
- not be "evaluated" fully on each call.
-
- """
- if '_joined_dispatch_cls' not in self.__class__.__dict__:
- cls = type(
- "Joined%s" % self.__class__.__name__,
- (_JoinedDispatcher, self.__class__), {}
- )
- for ls in _event_descriptors(self):
- setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
-
- self.__class__._joined_dispatch_cls = cls
- return self._joined_dispatch_cls(self, other)
-
- def __reduce__(self):
- return _UnpickleDispatch(), (self._parent_cls, )
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- for ls in _event_descriptors(other):
- getattr(self, ls.name).\
- for_modify(self)._update(ls, only_propagate=only_propagate)
-
- @util.hybridmethod
- def _clear(self):
- for attr in dir(self):
- if _is_event_name(attr):
- getattr(self, attr).for_modify(self).clear()
-
-
-def _event_descriptors(target):
- return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
-
-
-class _EventMeta(type):
- """Intercept new Event subclasses and create
- associated _Dispatch classes."""
-
- def __init__(cls, classname, bases, dict_):
- _create_dispatcher_class(cls, classname, bases, dict_)
- return type.__init__(cls, classname, bases, dict_)
-
-
-def _create_dispatcher_class(cls, classname, bases, dict_):
- """Create a :class:`._Dispatch` class corresponding to an
- :class:`.Events` class."""
-
- # there's all kinds of ways to do this,
- # i.e. make a Dispatch class that shares the '_listen' method
- # of the Event class, this is the straight monkeypatch.
- dispatch_base = getattr(cls, 'dispatch', _Dispatch)
- cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
- (dispatch_base, ), {})
- dispatch_cls._listen = cls._listen
-
- for k in dict_:
- if _is_event_name(k):
- setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
- _registrars[k].append(cls)
-
-
-def _remove_dispatcher(cls):
- for k in dir(cls):
- if _is_event_name(k):
- _registrars[k].remove(cls)
- if not _registrars[k]:
- del _registrars[k]
-
-class Events(util.with_metaclass(_EventMeta, object)):
- """Define event listening functions for a particular target type."""
-
- @classmethod
- def _accept_with(cls, target):
- # Mapper, ClassManager, Session override this to
- # also accept classes, scoped_sessions, sessionmakers, etc.
- if hasattr(target, 'dispatch') and (
- isinstance(target.dispatch, cls.dispatch) or \
- isinstance(target.dispatch, type) and \
- issubclass(target.dispatch, cls.dispatch)
- ):
- return target
- else:
- return None
-
- @classmethod
- def _listen(cls, target, identifier, fn, propagate=False, insert=False,
- named=False):
- dispatch_descriptor = getattr(target.dispatch, identifier)
- fn = dispatch_descriptor._adjust_fn_spec(fn, named)
-
- if insert:
- dispatch_descriptor.\
- for_modify(target.dispatch).insert(fn, target, propagate)
- else:
- dispatch_descriptor.\
- for_modify(target.dispatch).append(fn, target, propagate)
-
- @classmethod
- def _remove(cls, target, identifier, fn):
- getattr(target.dispatch, identifier).remove(fn, target)
-
- @classmethod
- def _clear(cls):
- cls.dispatch._clear()
-
-
-class _DispatchDescriptor(object):
- """Class-level attributes on :class:`._Dispatch` classes."""
-
- def __init__(self, parent_dispatch_cls, fn):
- self.__name__ = fn.__name__
- argspec = util.inspect_getargspec(fn)
- self.arg_names = argspec.args[1:]
- self.has_kw = bool(argspec.keywords)
- self.legacy_signatures = list(reversed(
- sorted(
- getattr(fn, '_legacy_signatures', []),
- key=lambda s: s[0]
- )
- ))
- self.__doc__ = fn.__doc__ = self._augment_fn_docs(parent_dispatch_cls, fn)
-
- self._clslevel = weakref.WeakKeyDictionary()
- self._empty_listeners = weakref.WeakKeyDictionary()
-
- def _adjust_fn_spec(self, fn, named):
- argspec = util.get_callable_argspec(fn, no_self=True)
- if named:
- fn = self._wrap_fn_for_kw(fn)
- fn = self._wrap_fn_for_legacy(fn, argspec)
- return fn
-
- def _wrap_fn_for_kw(self, fn):
- def wrap_kw(*args, **kw):
- argdict = dict(zip(self.arg_names, args))
- argdict.update(kw)
- return fn(**argdict)
- return wrap_kw
-
- def _wrap_fn_for_legacy(self, fn, argspec):
- for since, argnames, conv in self.legacy_signatures:
- if argnames[-1] == "**kw":
- has_kw = True
- argnames = argnames[0:-1]
- else:
- has_kw = False
-
- if len(argnames) == len(argspec.args) \
- and has_kw is bool(argspec.keywords):
-
- if conv:
- assert not has_kw
- def wrap_leg(*args):
- return fn(*conv(*args))
- else:
- def wrap_leg(*args, **kw):
- argdict = dict(zip(self.arg_names, args))
- args = [argdict[name] for name in argnames]
- if has_kw:
- return fn(*args, **kw)
- else:
- return fn(*args)
- return wrap_leg
- else:
- return fn
-
- def _indent(self, text, indent):
- return "\n".join(
- indent + line
- for line in text.split("\n")
- )
-
- def _standard_listen_example(self, sample_target, fn):
- example_kw_arg = self._indent(
- "\n".join(
- "%(arg)s = kw['%(arg)s']" % {"arg": arg}
- for arg in self.arg_names[0:2]
- ),
- " ")
- if self.legacy_signatures:
- current_since = max(since for since, args, conv in self.legacy_signatures)
- else:
- current_since = None
- text = (
- "from sqlalchemy import event\n\n"
- "# standard decorator style%(current_since)s\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "\n # ... (event handling logic) ...\n"
- )
-
- if len(self.arg_names) > 2:
- text += (
-
- "\n# named argument style (new in 0.9)\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
- "def receive_%(event_name)s(**kw):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "%(example_kw_arg)s\n"
- "\n # ... (event handling logic) ...\n"
- )
-
- text %= {
- "current_since": " (arguments as of %s)" %
- current_since if current_since else "",
- "event_name": fn.__name__,
- "has_kw_arguments": " **kw" if self.has_kw else "",
- "named_event_arguments": ", ".join(self.arg_names),
- "example_kw_arg": example_kw_arg,
- "sample_target": sample_target
- }
- return text
-
- def _legacy_listen_examples(self, sample_target, fn):
- text = ""
- for since, args, conv in self.legacy_signatures:
- text += (
- "\n# legacy calling style (pre-%(since)s)\n"
- "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
- "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
- " \"listen for the '%(event_name)s' event\"\n"
- "\n # ... (event handling logic) ...\n" % {
- "since": since,
- "event_name": fn.__name__,
- "has_kw_arguments": " **kw" if self.has_kw else "",
- "named_event_arguments": ", ".join(args),
- "sample_target": sample_target
- }
- )
- return text
-
- def _version_signature_changes(self):
- since, args, conv = self.legacy_signatures[0]
- return (
- "\n.. versionchanged:: %(since)s\n"
- " The ``%(event_name)s`` event now accepts the \n"
- " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
- " Listener functions which accept the previous argument \n"
- " signature(s) listed above will be automatically \n"
- " adapted to the new signature." % {
- "since": since,
- "event_name": self.__name__,
- "named_event_arguments": ", ".join(self.arg_names),
- "has_kw_arguments": ", **kw" if self.has_kw else ""
- }
- )
-
- def _augment_fn_docs(self, parent_dispatch_cls, fn):
- header = ".. container:: event_signatures\n\n"\
- " Example argument forms::\n"\
- "\n"
-
- sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
- text = (
- header +
- self._indent(
- self._standard_listen_example(sample_target, fn),
- " " * 8)
- )
- if self.legacy_signatures:
- text += self._indent(
- self._legacy_listen_examples(sample_target, fn),
- " " * 8)
-
- text += self._version_signature_changes()
-
- return util.inject_docstring_text(fn.__doc__,
- text,
- 1
- )
-
- def _contains(self, cls, evt):
- return cls in self._clslevel and \
- evt in self._clslevel[cls]
-
- def insert(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls is not target and cls not in self._clslevel:
- self.update_subclass(cls)
- else:
- if cls not in self._clslevel:
- self._clslevel[cls] = []
- self._clslevel[cls].insert(0, obj)
-
- def append(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
-
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls is not target and cls not in self._clslevel:
- self.update_subclass(cls)
- else:
- if cls not in self._clslevel:
- self._clslevel[cls] = []
- self._clslevel[cls].append(obj)
-
- def update_subclass(self, target):
- if target not in self._clslevel:
- self._clslevel[target] = []
- clslevel = self._clslevel[target]
- for cls in target.__mro__[1:]:
- if cls in self._clslevel:
- clslevel.extend([
- fn for fn
- in self._clslevel[cls]
- if fn not in clslevel
- ])
-
- def remove(self, obj, target):
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- if cls in self._clslevel:
- self._clslevel[cls].remove(obj)
-
- def clear(self):
- """Clear all class level listeners"""
-
- for dispatcher in self._clslevel.values():
- dispatcher[:] = []
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _DispatchDescriptor at the class level of
- a dispatcher, this returns self.
-
- """
- return self
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- elif obj._parent_cls in self._empty_listeners:
- ret = self._empty_listeners[obj._parent_cls]
- else:
- self._empty_listeners[obj._parent_cls] = ret = \
- _EmptyListener(self, obj._parent_cls)
- # assigning it to __dict__ means
- # memoized for fast re-access. but more memory.
- obj.__dict__[self.__name__] = ret
- return ret
-
-class _HasParentDispatchDescriptor(object):
- def _adjust_fn_spec(self, fn, named):
- return self.parent._adjust_fn_spec(fn, named)
-
-class _EmptyListener(_HasParentDispatchDescriptor):
- """Serves as a class-level interface to the events
- served by a _DispatchDescriptor, when there are no
- instance-level events present.
-
- Is replaced by _ListenerCollection when instance-level
- events are added.
-
- """
- def __init__(self, parent, target_cls):
- if target_cls not in parent._clslevel:
- parent.update_subclass(target_cls)
- self.parent = parent # _DispatchDescriptor
- self.parent_listeners = parent._clslevel[target_cls]
- self.name = parent.__name__
- self.propagate = frozenset()
- self.listeners = ()
-
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _EmptyListener at the instance level of
- a dispatcher, this generates a new
- _ListenerCollection, applies it to the instance,
- and returns it.
-
- """
- result = _ListenerCollection(self.parent, obj._parent_cls)
- if obj.__dict__[self.name] is self:
- obj.__dict__[self.name] = result
- return result
-
- def _needs_modify(self, *args, **kw):
- raise NotImplementedError("need to call for_modify()")
-
- exec_once = insert = append = remove = clear = _needs_modify
-
- def __call__(self, *args, **kw):
- """Execute this event."""
-
- for fn in self.parent_listeners:
- fn(*args, **kw)
-
- def __len__(self):
- return len(self.parent_listeners)
-
- def __iter__(self):
- return iter(self.parent_listeners)
-
- def __bool__(self):
- return bool(self.parent_listeners)
-
- __nonzero__ = __bool__
-
-
-class _CompoundListener(_HasParentDispatchDescriptor):
- _exec_once = False
-
- def exec_once(self, *args, **kw):
- """Execute this event, but only if it has not been
- executed already for this collection."""
-
- if not self._exec_once:
- self(*args, **kw)
- self._exec_once = True
-
- # I'm not entirely thrilled about the overhead here,
- # but this allows class-level listeners to be added
- # at any point.
- #
- # In the absense of instance-level listeners,
- # we stay with the _EmptyListener object when called
- # at the instance level.
-
- def __call__(self, *args, **kw):
- """Execute this event."""
-
- for fn in self.parent_listeners:
- fn(*args, **kw)
- for fn in self.listeners:
- fn(*args, **kw)
-
- def __len__(self):
- return len(self.parent_listeners) + len(self.listeners)
-
- def __iter__(self):
- return chain(self.parent_listeners, self.listeners)
-
- def __bool__(self):
- return bool(self.listeners or self.parent_listeners)
-
- __nonzero__ = __bool__
-
-class _ListenerCollection(_CompoundListener):
- """Instance-level attributes on instances of :class:`._Dispatch`.
-
- Represents a collection of listeners.
-
- As of 0.7.9, _ListenerCollection is only first
- created via the _EmptyListener.for_modify() method.
-
- """
-
- def __init__(self, parent, target_cls):
- if target_cls not in parent._clslevel:
- parent.update_subclass(target_cls)
- self.parent_listeners = parent._clslevel[target_cls]
- self.parent = parent
- self.name = parent.__name__
- self.listeners = []
- self.propagate = set()
-
- def for_modify(self, obj):
- """Return an event collection which can be modified.
-
- For _ListenerCollection at the instance level of
- a dispatcher, this returns self.
-
- """
- return self
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- existing_listeners = self.listeners
- existing_listener_set = set(existing_listeners)
- self.propagate.update(other.propagate)
- existing_listeners.extend([l for l
- in other.listeners
- if l not in existing_listener_set
- and not only_propagate or l in self.propagate
- ])
-
- def insert(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.insert(0, obj)
- if propagate:
- self.propagate.add(obj)
-
- def append(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.append(obj)
- if propagate:
- self.propagate.add(obj)
-
- def remove(self, obj, target):
- if obj in self.listeners:
- self.listeners.remove(obj)
- self.propagate.discard(obj)
-
- def clear(self):
- self.listeners[:] = []
- self.propagate.clear()
-
-
-class _JoinedDispatcher(object):
- """Represent a connection between two _Dispatch objects."""
-
- def __init__(self, local, parent):
- self.local = local
- self.parent = parent
- self._parent_cls = local._parent_cls
-
-
-class _JoinedDispatchDescriptor(object):
- def __init__(self, name):
- self.name = name
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- else:
- obj.__dict__[self.name] = ret = _JoinedListener(
- obj.parent, self.name,
- getattr(obj.local, self.name)
- )
- return ret
-
-
-class _JoinedListener(_CompoundListener):
- _exec_once = False
-
- def __init__(self, parent, name, local):
- self.parent = parent
- self.name = name
- self.local = local
- self.parent_listeners = self.local
-
- # fix .listeners for the parent. This means
- # new events added to the parent won't be picked
- # up here. Alternatively, the listeners can
- # be via @property to just return getattr(self.parent, self.name)
- # each time. less performant.
- self.listeners = list(getattr(self.parent, self.name))
-
- def _adjust_fn_spec(self, fn, named):
- return self.local._adjust_fn_spec(fn, named)
-
- def for_modify(self, obj):
- self.local = self.parent_listeners = self.local.for_modify(obj)
- return self
-
- def insert(self, obj, target, propagate):
- self.local.insert(obj, target, propagate)
-
- def append(self, obj, target, propagate):
- self.local.append(obj, target, propagate)
-
- def remove(self, obj, target):
- self.local.remove(obj, target)
-
- def clear(self):
- raise NotImplementedError()
-
-
-class dispatcher(object):
- """Descriptor used by target classes to
- deliver the _Dispatch class at the class level
- and produce new _Dispatch instances for target
- instances.
-
- """
- def __init__(self, events):
- self.dispatch_cls = events.dispatch
- self.events = events
-
- def __get__(self, obj, cls):
- if obj is None:
- return self.dispatch_cls
- obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
- return disp
diff --git a/lib/sqlalchemy/event/__init__.py b/lib/sqlalchemy/event/__init__.py
new file mode 100644
index 000000000..b43bf9bfa
--- /dev/null
+++ b/lib/sqlalchemy/event/__init__.py
@@ -0,0 +1,10 @@
+# event/__init__.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains
+from .base import Events, dispatcher
+from .attr import RefCollection
+from .legacy import _legacy_signature
diff --git a/lib/sqlalchemy/event/api.py b/lib/sqlalchemy/event/api.py
new file mode 100644
index 000000000..20e74d90e
--- /dev/null
+++ b/lib/sqlalchemy/event/api.py
@@ -0,0 +1,107 @@
+# event/api.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Public API functions for the event system.
+
+"""
+from __future__ import absolute_import
+
+from .. import util, exc
+from .base import _registrars
+from .registry import _EventKey
+
+CANCEL = util.symbol('CANCEL')
+NO_RETVAL = util.symbol('NO_RETVAL')
+
+
+def _event_key(target, identifier, fn):
+ for evt_cls in _registrars[identifier]:
+ tgt = evt_cls._accept_with(target)
+ if tgt is not None:
+ return _EventKey(target, identifier, fn, tgt)
+ else:
+ raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
+ (identifier, target))
+
+def listen(target, identifier, fn, *args, **kw):
+ """Register a listener function for the given target.
+
+ e.g.::
+
+ from sqlalchemy import event
+ from sqlalchemy.schema import UniqueConstraint
+
+ def unique_constraint_name(const, table):
+ const.name = "uq_%s_%s" % (
+ table.name,
+ list(const.columns)[0].name
+ )
+ event.listen(
+ UniqueConstraint,
+ "after_parent_attach",
+ unique_constraint_name)
+
+ """
+
+ _event_key(target, identifier, fn).listen(*args, **kw)
+
+
+def listens_for(target, identifier, *args, **kw):
+ """Decorate a function as a listener for the given target + identifier.
+
+ e.g.::
+
+ from sqlalchemy import event
+ from sqlalchemy.schema import UniqueConstraint
+
+ @event.listens_for(UniqueConstraint, "after_parent_attach")
+ def unique_constraint_name(const, table):
+ const.name = "uq_%s_%s" % (
+ table.name,
+ list(const.columns)[0].name
+ )
+ """
+ def decorate(fn):
+ listen(target, identifier, fn, *args, **kw)
+ return fn
+ return decorate
+
+
+def remove(target, identifier, fn):
+ """Remove an event listener.
+
+ The arguments here should match exactly those which were sent to
+ :func:`.listen`; all the event registration which proceeded as a result
+ of this call will be reverted by calling :func:`.remove` with the same
+ arguments.
+
+ e.g.::
+
+ # if a function was registered like this...
+ @event.listens_for(SomeMappedClass, "before_insert", propagate=True)
+ def my_listener_function(*arg):
+ pass
+
+ # ... it's removed like this
+ event.remove(SomeMappedClass, "before_insert", my_listener_function)
+
+ Above, the listener function associated with ``SomeMappedClass`` was also
+ propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function
+ will revert all of these operations.
+
+ .. versionadded:: 0.9.0
+
+ """
+ _event_key(target, identifier, fn).remove()
+
+def contains(target, identifier, fn):
+ """Return True if the given target/ident/fn is set up to listen.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ return _event_key(target, identifier, fn).contains()
diff --git a/lib/sqlalchemy/event/attr.py b/lib/sqlalchemy/event/attr.py
new file mode 100644
index 000000000..3f8947546
--- /dev/null
+++ b/lib/sqlalchemy/event/attr.py
@@ -0,0 +1,376 @@
+# event/attr.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Attribute implementation for _Dispatch classes.
+
+The various listener targets for a particular event class are represented
+as attributes, which refer to collections of listeners to be fired off.
+These collections can exist at the class level as well as at the instance
+level. An event is fired off using code like this::
+
+ some_object.dispatch.first_connect(arg1, arg2)
+
+Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
+``first_connect`` is typically an instance of ``_ListenerCollection``
+if event listeners are present, or ``_EmptyListener`` if none are present.
+
+The attribute mechanics here spend effort trying to ensure listener functions
+are available with a minimum of function call overhead, that unnecessary
+objects aren't created (i.e. many empty per-instance listener collections),
+as well as that everything is garbage collectable when owning references are
+lost. Other features such as "propagation" of listener functions across
+many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
+as well as support for subclass propagation (e.g. events assigned to
+``Pool`` vs. ``QueuePool``) are all implemented here.
+
+"""
+
+from __future__ import absolute_import
+
+from .. import util
+from . import registry
+from . import legacy
+from itertools import chain
+import weakref
+
+class RefCollection(object):
+ @util.memoized_property
+ def ref(self):
+ return weakref.ref(self, registry._collection_gced)
+
+class _DispatchDescriptor(RefCollection):
+ """Class-level attributes on :class:`._Dispatch` classes."""
+
+ def __init__(self, parent_dispatch_cls, fn):
+ self.__name__ = fn.__name__
+ argspec = util.inspect_getargspec(fn)
+ self.arg_names = argspec.args[1:]
+ self.has_kw = bool(argspec.keywords)
+ self.legacy_signatures = list(reversed(
+ sorted(
+ getattr(fn, '_legacy_signatures', []),
+ key=lambda s: s[0]
+ )
+ ))
+ self.__doc__ = fn.__doc__ = legacy._augment_fn_docs(
+ self, parent_dispatch_cls, fn)
+
+ self._clslevel = weakref.WeakKeyDictionary()
+ self._empty_listeners = weakref.WeakKeyDictionary()
+
+ def _adjust_fn_spec(self, fn, named):
+ if named:
+ fn = self._wrap_fn_for_kw(fn)
+ if self.legacy_signatures:
+ try:
+ argspec = util.get_callable_argspec(fn, no_self=True)
+ except ValueError:
+ pass
+ else:
+ fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
+ return fn
+
+ def _wrap_fn_for_kw(self, fn):
+ def wrap_kw(*args, **kw):
+ argdict = dict(zip(self.arg_names, args))
+ argdict.update(kw)
+ return fn(**argdict)
+ return wrap_kw
+
+
+ def insert(self, event_key, propagate):
+ target = event_key.dispatch_target
+ assert isinstance(target, type), \
+ "Class-level Event targets must be classes."
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls is not target and cls not in self._clslevel:
+ self.update_subclass(cls)
+ else:
+ if cls not in self._clslevel:
+ self._clslevel[cls] = []
+ self._clslevel[cls].insert(0, event_key._listen_fn)
+ registry._stored_in_collection(event_key, self)
+
+ def append(self, event_key, propagate):
+ target = event_key.dispatch_target
+ assert isinstance(target, type), \
+ "Class-level Event targets must be classes."
+
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls is not target and cls not in self._clslevel:
+ self.update_subclass(cls)
+ else:
+ if cls not in self._clslevel:
+ self._clslevel[cls] = []
+ self._clslevel[cls].append(event_key._listen_fn)
+ registry._stored_in_collection(event_key, self)
+
+ def update_subclass(self, target):
+ if target not in self._clslevel:
+ self._clslevel[target] = []
+ clslevel = self._clslevel[target]
+ for cls in target.__mro__[1:]:
+ if cls in self._clslevel:
+ clslevel.extend([
+ fn for fn
+ in self._clslevel[cls]
+ if fn not in clslevel
+ ])
+
+ def remove(self, event_key):
+ target = event_key.dispatch_target
+ stack = [target]
+ while stack:
+ cls = stack.pop(0)
+ stack.extend(cls.__subclasses__())
+ if cls in self._clslevel:
+ self._clslevel[cls].remove(event_key.fn)
+ registry._removed_from_collection(event_key, self)
+
+ def clear(self):
+ """Clear all class level listeners"""
+
+ to_clear = set()
+ for dispatcher in self._clslevel.values():
+ to_clear.update(dispatcher)
+ dispatcher[:] = []
+ registry._clear(self, to_clear)
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _DispatchDescriptor at the class level of
+ a dispatcher, this returns self.
+
+ """
+ return self
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ elif obj._parent_cls in self._empty_listeners:
+ ret = self._empty_listeners[obj._parent_cls]
+ else:
+ self._empty_listeners[obj._parent_cls] = ret = \
+ _EmptyListener(self, obj._parent_cls)
+ # assigning it to __dict__ means
+ # memoized for fast re-access. but more memory.
+ obj.__dict__[self.__name__] = ret
+ return ret
+
+class _HasParentDispatchDescriptor(object):
+ def _adjust_fn_spec(self, fn, named):
+ return self.parent._adjust_fn_spec(fn, named)
+
+class _EmptyListener(_HasParentDispatchDescriptor):
+ """Serves as a class-level interface to the events
+ served by a _DispatchDescriptor, when there are no
+ instance-level events present.
+
+ Is replaced by _ListenerCollection when instance-level
+ events are added.
+
+ """
+ def __init__(self, parent, target_cls):
+ if target_cls not in parent._clslevel:
+ parent.update_subclass(target_cls)
+ self.parent = parent # _DispatchDescriptor
+ self.parent_listeners = parent._clslevel[target_cls]
+ self.name = parent.__name__
+ self.propagate = frozenset()
+ self.listeners = ()
+
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _EmptyListener at the instance level of
+ a dispatcher, this generates a new
+ _ListenerCollection, applies it to the instance,
+ and returns it.
+
+ """
+ result = _ListenerCollection(self.parent, obj._parent_cls)
+ if obj.__dict__[self.name] is self:
+ obj.__dict__[self.name] = result
+ return result
+
+ def _needs_modify(self, *args, **kw):
+ raise NotImplementedError("need to call for_modify()")
+
+ exec_once = insert = append = remove = clear = _needs_modify
+
+ def __call__(self, *args, **kw):
+ """Execute this event."""
+
+ for fn in self.parent_listeners:
+ fn(*args, **kw)
+
+ def __len__(self):
+ return len(self.parent_listeners)
+
+ def __iter__(self):
+ return iter(self.parent_listeners)
+
+ def __bool__(self):
+ return bool(self.parent_listeners)
+
+ __nonzero__ = __bool__
+
+
+class _CompoundListener(_HasParentDispatchDescriptor):
+ _exec_once = False
+
+ def exec_once(self, *args, **kw):
+ """Execute this event, but only if it has not been
+ executed already for this collection."""
+
+ if not self._exec_once:
+ self(*args, **kw)
+ self._exec_once = True
+
+ def __call__(self, *args, **kw):
+ """Execute this event."""
+
+ for fn in self.parent_listeners:
+ fn(*args, **kw)
+ for fn in self.listeners:
+ fn(*args, **kw)
+
+ def __len__(self):
+ return len(self.parent_listeners) + len(self.listeners)
+
+ def __iter__(self):
+ return chain(self.parent_listeners, self.listeners)
+
+ def __bool__(self):
+ return bool(self.listeners or self.parent_listeners)
+
+ __nonzero__ = __bool__
+
+class _ListenerCollection(RefCollection, _CompoundListener):
+ """Instance-level attributes on instances of :class:`._Dispatch`.
+
+ Represents a collection of listeners.
+
+ As of 0.7.9, _ListenerCollection is only first
+ created via the _EmptyListener.for_modify() method.
+
+ """
+
+ def __init__(self, parent, target_cls):
+ if target_cls not in parent._clslevel:
+ parent.update_subclass(target_cls)
+ self.parent_listeners = parent._clslevel[target_cls]
+ self.parent = parent
+ self.name = parent.__name__
+ self.listeners = []
+ self.propagate = set()
+
+ def for_modify(self, obj):
+ """Return an event collection which can be modified.
+
+ For _ListenerCollection at the instance level of
+ a dispatcher, this returns self.
+
+ """
+ return self
+
+ def _update(self, other, only_propagate=True):
+ """Populate from the listeners in another :class:`_Dispatch`
+ object."""
+
+ existing_listeners = self.listeners
+ existing_listener_set = set(existing_listeners)
+ self.propagate.update(other.propagate)
+ other_listeners = [l for l
+ in other.listeners
+ if l not in existing_listener_set
+ and not only_propagate or l in self.propagate
+ ]
+
+ existing_listeners.extend(other_listeners)
+
+ to_associate = other.propagate.union(other_listeners)
+ registry._stored_in_collection_multi(self, other, to_associate)
+
+ def insert(self, event_key, propagate):
+ if event_key._listen_fn not in self.listeners:
+ event_key.prepend_to_list(self, self.listeners)
+ if propagate:
+ self.propagate.add(event_key._listen_fn)
+
+ def append(self, event_key, propagate):
+ if event_key._listen_fn not in self.listeners:
+ event_key.append_to_list(self, self.listeners)
+ if propagate:
+ self.propagate.add(event_key._listen_fn)
+
+ def remove(self, event_key):
+ self.listeners.remove(event_key._listen_fn)
+ self.propagate.discard(event_key._listen_fn)
+ registry._removed_from_collection(event_key, self)
+
+ def clear(self):
+ registry._clear(self, self.listeners)
+ self.propagate.clear()
+ self.listeners[:] = []
+
+
+class _JoinedDispatchDescriptor(object):
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self
+ else:
+ obj.__dict__[self.name] = ret = _JoinedListener(
+ obj.parent, self.name,
+ getattr(obj.local, self.name)
+ )
+ return ret
+
+
+class _JoinedListener(_CompoundListener):
+ _exec_once = False
+
+ def __init__(self, parent, name, local):
+ self.parent = parent
+ self.name = name
+ self.local = local
+ self.parent_listeners = self.local
+
+ @property
+ def listeners(self):
+ return getattr(self.parent, self.name)
+
+ def _adjust_fn_spec(self, fn, named):
+ return self.local._adjust_fn_spec(fn, named)
+
+ def for_modify(self, obj):
+ self.local = self.parent_listeners = self.local.for_modify(obj)
+ return self
+
+ def insert(self, event_key, propagate):
+ self.local.insert(event_key, propagate)
+
+ def append(self, event_key, propagate):
+ self.local.append(event_key, propagate)
+
+ def remove(self, event_key):
+ self.local.remove(event_key)
+
+ def clear(self):
+ raise NotImplementedError()
+
+
diff --git a/lib/sqlalchemy/event/base.py b/lib/sqlalchemy/event/base.py
new file mode 100644
index 000000000..5c8d92cb3
--- /dev/null
+++ b/lib/sqlalchemy/event/base.py
@@ -0,0 +1,217 @@
+# event/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Base implementation classes.
+
+The public-facing ``Events`` serves as the base class for an event interface;
+it's public attributes represent different kinds of events. These attributes
+are mirrored onto a ``_Dispatch`` class, which serves as a container for
+collections of listener functions. These collections are represented both
+at the class level of a particular ``_Dispatch`` class as well as within
+instances of ``_Dispatch``.
+
+"""
+from __future__ import absolute_import
+
+from .. import util
+from .attr import _JoinedDispatchDescriptor, _EmptyListener, _DispatchDescriptor
+
+_registrars = util.defaultdict(list)
+
+
+def _is_event_name(name):
+ return not name.startswith('_') and name != 'dispatch'
+
+
+class _UnpickleDispatch(object):
+ """Serializable callable that re-generates an instance of
+ :class:`_Dispatch` given a particular :class:`.Events` subclass.
+
+ """
+ def __call__(self, _parent_cls):
+ for cls in _parent_cls.__mro__:
+ if 'dispatch' in cls.__dict__:
+ return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
+ else:
+ raise AttributeError("No class with a 'dispatch' member present.")
+
+
+class _Dispatch(object):
+ """Mirror the event listening definitions of an Events class with
+ listener collections.
+
+ Classes which define a "dispatch" member will return a
+ non-instantiated :class:`._Dispatch` subclass when the member
+ is accessed at the class level. When the "dispatch" member is
+ accessed at the instance level of its owner, an instance
+ of the :class:`._Dispatch` class is returned.
+
+ A :class:`._Dispatch` class is generated for each :class:`.Events`
+ class defined, by the :func:`._create_dispatcher_class` function.
+ The original :class:`.Events` classes remain untouched.
+ This decouples the construction of :class:`.Events` subclasses from
+ the implementation used by the event internals, and allows
+ inspecting tools like Sphinx to work in an unsurprising
+ way against the public API.
+
+ """
+
+ _events = None
+ """reference the :class:`.Events` class which this
+ :class:`._Dispatch` is created for."""
+
+ def __init__(self, _parent_cls):
+ self._parent_cls = _parent_cls
+
+ @util.classproperty
+ def _listen(cls):
+ return cls._events._listen
+
+ def _join(self, other):
+ """Create a 'join' of this :class:`._Dispatch` and another.
+
+ This new dispatcher will dispatch events to both
+ :class:`._Dispatch` objects.
+
+ """
+ if '_joined_dispatch_cls' not in self.__class__.__dict__:
+ cls = type(
+ "Joined%s" % self.__class__.__name__,
+ (_JoinedDispatcher, self.__class__), {}
+ )
+ for ls in _event_descriptors(self):
+ setattr(cls, ls.name, _JoinedDispatchDescriptor(ls.name))
+
+ self.__class__._joined_dispatch_cls = cls
+ return self._joined_dispatch_cls(self, other)
+
+ def __reduce__(self):
+ return _UnpickleDispatch(), (self._parent_cls, )
+
+ def _update(self, other, only_propagate=True):
+ """Populate from the listeners in another :class:`_Dispatch`
+ object."""
+
+ for ls in _event_descriptors(other):
+ if isinstance(ls, _EmptyListener):
+ continue
+ getattr(self, ls.name).\
+ for_modify(self)._update(ls, only_propagate=only_propagate)
+
+ @util.hybridmethod
+ def _clear(self):
+ for attr in dir(self):
+ if _is_event_name(attr):
+ getattr(self, attr).for_modify(self).clear()
+
+
+def _event_descriptors(target):
+ return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
+
+
+class _EventMeta(type):
+ """Intercept new Event subclasses and create
+ associated _Dispatch classes."""
+
+ def __init__(cls, classname, bases, dict_):
+ _create_dispatcher_class(cls, classname, bases, dict_)
+ return type.__init__(cls, classname, bases, dict_)
+
+
+def _create_dispatcher_class(cls, classname, bases, dict_):
+ """Create a :class:`._Dispatch` class corresponding to an
+ :class:`.Events` class."""
+
+ # there's all kinds of ways to do this,
+ # i.e. make a Dispatch class that shares the '_listen' method
+ # of the Event class, this is the straight monkeypatch.
+ dispatch_base = getattr(cls, 'dispatch', _Dispatch)
+ dispatch_cls = type("%sDispatch" % classname,
+ (dispatch_base, ), {})
+ cls._set_dispatch(cls, dispatch_cls)
+
+ for k in dict_:
+ if _is_event_name(k):
+ setattr(dispatch_cls, k, _DispatchDescriptor(cls, dict_[k]))
+ _registrars[k].append(cls)
+
+ if getattr(cls, '_dispatch_target', None):
+ cls._dispatch_target.dispatch = dispatcher(cls)
+
+
+def _remove_dispatcher(cls):
+ for k in dir(cls):
+ if _is_event_name(k):
+ _registrars[k].remove(cls)
+ if not _registrars[k]:
+ del _registrars[k]
+
+class Events(util.with_metaclass(_EventMeta, object)):
+ """Define event listening functions for a particular target type."""
+
+ @staticmethod
+ def _set_dispatch(cls, dispatch_cls):
+ # this allows an Events subclass to define additional utility
+ # methods made available to the target via
+ # "self.dispatch._events.<utilitymethod>"
+ # @staticemethod to allow easy "super" calls while in a metaclass
+ # constructor.
+ cls.dispatch = dispatch_cls
+ dispatch_cls._events = cls
+
+
+ @classmethod
+ def _accept_with(cls, target):
+ # Mapper, ClassManager, Session override this to
+ # also accept classes, scoped_sessions, sessionmakers, etc.
+ if hasattr(target, 'dispatch') and (
+ isinstance(target.dispatch, cls.dispatch) or \
+ isinstance(target.dispatch, type) and \
+ issubclass(target.dispatch, cls.dispatch)
+ ):
+ return target
+ else:
+ return None
+
+ @classmethod
+ def _listen(cls, event_key, propagate=False, insert=False, named=False):
+ event_key.base_listen(propagate=propagate, insert=insert, named=named)
+
+ @classmethod
+ def _remove(cls, event_key):
+ event_key.remove()
+
+ @classmethod
+ def _clear(cls):
+ cls.dispatch._clear()
+
+
+class _JoinedDispatcher(object):
+ """Represent a connection between two _Dispatch objects."""
+
+ def __init__(self, local, parent):
+ self.local = local
+ self.parent = parent
+ self._parent_cls = local._parent_cls
+
+
+class dispatcher(object):
+ """Descriptor used by target classes to
+ deliver the _Dispatch class at the class level
+ and produce new _Dispatch instances for target
+ instances.
+
+ """
+ def __init__(self, events):
+ self.dispatch_cls = events.dispatch
+ self.events = events
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ return self.dispatch_cls
+ obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
+ return disp
+
diff --git a/lib/sqlalchemy/event/legacy.py b/lib/sqlalchemy/event/legacy.py
new file mode 100644
index 000000000..d8a66674d
--- /dev/null
+++ b/lib/sqlalchemy/event/legacy.py
@@ -0,0 +1,156 @@
+# event/legacy.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Routines to handle adaption of legacy call signatures,
+generation of deprecation notes and docstrings.
+
+"""
+
+from .. import util
+
+def _legacy_signature(since, argnames, converter=None):
+ def leg(fn):
+ if not hasattr(fn, '_legacy_signatures'):
+ fn._legacy_signatures = []
+ fn._legacy_signatures.append((since, argnames, converter))
+ return fn
+ return leg
+
+def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
+ for since, argnames, conv in dispatch_descriptor.legacy_signatures:
+ if argnames[-1] == "**kw":
+ has_kw = True
+ argnames = argnames[0:-1]
+ else:
+ has_kw = False
+
+ if len(argnames) == len(argspec.args) \
+ and has_kw is bool(argspec.keywords):
+
+ if conv:
+ assert not has_kw
+ def wrap_leg(*args):
+ return fn(*conv(*args))
+ else:
+ def wrap_leg(*args, **kw):
+ argdict = dict(zip(dispatch_descriptor.arg_names, args))
+ args = [argdict[name] for name in argnames]
+ if has_kw:
+ return fn(*args, **kw)
+ else:
+ return fn(*args)
+ return wrap_leg
+ else:
+ return fn
+
+def _indent(text, indent):
+ return "\n".join(
+ indent + line
+ for line in text.split("\n")
+ )
+
+def _standard_listen_example(dispatch_descriptor, sample_target, fn):
+ example_kw_arg = _indent(
+ "\n".join(
+ "%(arg)s = kw['%(arg)s']" % {"arg": arg}
+ for arg in dispatch_descriptor.arg_names[0:2]
+ ),
+ " ")
+ if dispatch_descriptor.legacy_signatures:
+ current_since = max(since for since, args, conv
+ in dispatch_descriptor.legacy_signatures)
+ else:
+ current_since = None
+ text = (
+ "from sqlalchemy import event\n\n"
+ "# standard decorator style%(current_since)s\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
+ "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "\n # ... (event handling logic) ...\n"
+ )
+
+ if len(dispatch_descriptor.arg_names) > 3:
+ text += (
+
+ "\n# named argument style (new in 0.9)\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s', named=True)\n"
+ "def receive_%(event_name)s(**kw):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "%(example_kw_arg)s\n"
+ "\n # ... (event handling logic) ...\n"
+ )
+
+ text %= {
+ "current_since": " (arguments as of %s)" %
+ current_since if current_since else "",
+ "event_name": fn.__name__,
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "example_kw_arg": example_kw_arg,
+ "sample_target": sample_target
+ }
+ return text
+
+def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
+ text = ""
+ for since, args, conv in dispatch_descriptor.legacy_signatures:
+ text += (
+ "\n# legacy calling style (pre-%(since)s)\n"
+ "@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
+ "def receive_%(event_name)s(%(named_event_arguments)s%(has_kw_arguments)s):\n"
+ " \"listen for the '%(event_name)s' event\"\n"
+ "\n # ... (event handling logic) ...\n" % {
+ "since": since,
+ "event_name": fn.__name__,
+ "has_kw_arguments": " **kw" if dispatch_descriptor.has_kw else "",
+ "named_event_arguments": ", ".join(args),
+ "sample_target": sample_target
+ }
+ )
+ return text
+
+def _version_signature_changes(dispatch_descriptor):
+ since, args, conv = dispatch_descriptor.legacy_signatures[0]
+ return (
+ "\n.. versionchanged:: %(since)s\n"
+ " The ``%(event_name)s`` event now accepts the \n"
+ " arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
+ " Listener functions which accept the previous argument \n"
+ " signature(s) listed above will be automatically \n"
+ " adapted to the new signature." % {
+ "since": since,
+ "event_name": dispatch_descriptor.__name__,
+ "named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
+ "has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
+ }
+ )
+
+def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
+ header = ".. container:: event_signatures\n\n"\
+ " Example argument forms::\n"\
+ "\n"
+
+ sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
+ text = (
+ header +
+ _indent(
+ _standard_listen_example(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
+ )
+ if dispatch_descriptor.legacy_signatures:
+ text += _indent(
+ _legacy_listen_examples(
+ dispatch_descriptor, sample_target, fn),
+ " " * 8)
+
+ text += _version_signature_changes(dispatch_descriptor)
+
+ return util.inject_docstring_text(fn.__doc__,
+ text,
+ 1
+ )
diff --git a/lib/sqlalchemy/event/registry.py b/lib/sqlalchemy/event/registry.py
new file mode 100644
index 000000000..7710ff2d2
--- /dev/null
+++ b/lib/sqlalchemy/event/registry.py
@@ -0,0 +1,236 @@
+# event/registry.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Provides managed registration services on behalf of :func:`.listen`
+arguments.
+
+By "managed registration", we mean that event listening functions and
+other objects can be added to various collections in such a way that their
+membership in all those collections can be revoked at once, based on
+an equivalent :class:`._EventKey`.
+
+"""
+
+from __future__ import absolute_import
+
+import weakref
+import collections
+import types
+from .. import exc
+
+
+_key_to_collection = collections.defaultdict(dict)
+"""
+Given an original listen() argument, can locate all
+listener collections and the listener fn contained
+
+(target, identifier, fn) -> {
+ ref(listenercollection) -> ref(listener_fn)
+ ref(listenercollection) -> ref(listener_fn)
+ ref(listenercollection) -> ref(listener_fn)
+ }
+"""
+
+_collection_to_key = collections.defaultdict(dict)
+"""
+Given a _ListenerCollection or _DispatchDescriptor, can locate
+all the original listen() arguments and the listener fn contained
+
+ref(listenercollection) -> {
+ ref(listener_fn) -> (target, identifier, fn),
+ ref(listener_fn) -> (target, identifier, fn),
+ ref(listener_fn) -> (target, identifier, fn),
+ }
+"""
+
+def _collection_gced(ref):
+ # defaultdict, so can't get a KeyError
+ if not _collection_to_key or ref not in _collection_to_key:
+ return
+ listener_to_key = _collection_to_key.pop(ref)
+ for key in listener_to_key.values():
+ if key in _key_to_collection:
+ # defaultdict, so can't get a KeyError
+ dispatch_reg = _key_to_collection[key]
+ dispatch_reg.pop(ref)
+ if not dispatch_reg:
+ _key_to_collection.pop(key)
+
+def _stored_in_collection(event_key, owner):
+ key = event_key._key
+
+ dispatch_reg = _key_to_collection[key]
+
+ owner_ref = owner.ref
+ listen_ref = weakref.ref(event_key._listen_fn)
+
+ if owner_ref in dispatch_reg:
+ assert dispatch_reg[owner_ref] == listen_ref
+ else:
+ dispatch_reg[owner_ref] = listen_ref
+
+ listener_to_key = _collection_to_key[owner_ref]
+ listener_to_key[listen_ref] = key
+
+def _removed_from_collection(event_key, owner):
+ key = event_key._key
+
+ dispatch_reg = _key_to_collection[key]
+
+ listen_ref = weakref.ref(event_key._listen_fn)
+
+ owner_ref = owner.ref
+ dispatch_reg.pop(owner_ref, None)
+ if not dispatch_reg:
+ del _key_to_collection[key]
+
+ if owner_ref in _collection_to_key:
+ listener_to_key = _collection_to_key[owner_ref]
+ listener_to_key.pop(listen_ref)
+
+def _stored_in_collection_multi(newowner, oldowner, elements):
+ if not elements:
+ return
+
+ oldowner = oldowner.ref
+ newowner = newowner.ref
+
+ old_listener_to_key = _collection_to_key[oldowner]
+ new_listener_to_key = _collection_to_key[newowner]
+
+ for listen_fn in elements:
+ listen_ref = weakref.ref(listen_fn)
+ key = old_listener_to_key[listen_ref]
+ dispatch_reg = _key_to_collection[key]
+ if newowner in dispatch_reg:
+ assert dispatch_reg[newowner] == listen_ref
+ else:
+ dispatch_reg[newowner] = listen_ref
+
+ new_listener_to_key[listen_ref] = key
+
+def _clear(owner, elements):
+ if not elements:
+ return
+
+ owner = owner.ref
+ listener_to_key = _collection_to_key[owner]
+ for listen_fn in elements:
+ listen_ref = weakref.ref(listen_fn)
+ key = listener_to_key[listen_ref]
+ dispatch_reg = _key_to_collection[key]
+ dispatch_reg.pop(owner, None)
+
+ if not dispatch_reg:
+ del _key_to_collection[key]
+
+
+class _EventKey(object):
+ """Represent :func:`.listen` arguments.
+ """
+
+
+ def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None):
+ self.target = target
+ self.identifier = identifier
+ self.fn = fn
+ if isinstance(fn, types.MethodType):
+ self.fn_key = id(fn.__func__), id(fn.__self__)
+ else:
+ self.fn_key = id(fn)
+ self.fn_wrap = _fn_wrap
+ self.dispatch_target = dispatch_target
+
+ @property
+ def _key(self):
+ return (id(self.target), self.identifier, self.fn_key)
+
+ def with_wrapper(self, fn_wrap):
+ if fn_wrap is self._listen_fn:
+ return self
+ else:
+ return _EventKey(
+ self.target,
+ self.identifier,
+ self.fn,
+ self.dispatch_target,
+ _fn_wrap=fn_wrap
+ )
+
+ def with_dispatch_target(self, dispatch_target):
+ if dispatch_target is self.dispatch_target:
+ return self
+ else:
+ return _EventKey(
+ self.target,
+ self.identifier,
+ self.fn,
+ dispatch_target,
+ _fn_wrap=self.fn_wrap
+ )
+
+ def listen(self, *args, **kw):
+ self.dispatch_target.dispatch._listen(self, *args, **kw)
+
+ def remove(self):
+ key = self._key
+
+ if key not in _key_to_collection:
+ raise exc.InvalidRequestError(
+ "No listeners found for event %s / %r / %s " %
+ (self.target, self.identifier, self.fn)
+ )
+ dispatch_reg = _key_to_collection.pop(key)
+
+ for collection_ref, listener_ref in dispatch_reg.items():
+ collection = collection_ref()
+ listener_fn = listener_ref()
+ if collection is not None and listener_fn is not None:
+ collection.remove(self.with_wrapper(listener_fn))
+
+ def contains(self):
+ """Return True if this event key is registered to listen.
+ """
+ return self._key in _key_to_collection
+
+ def base_listen(self, propagate=False, insert=False,
+ named=False):
+
+ target, identifier, fn = \
+ self.dispatch_target, self.identifier, self._listen_fn
+
+ dispatch_descriptor = getattr(target.dispatch, identifier)
+
+ fn = dispatch_descriptor._adjust_fn_spec(fn, named)
+ self = self.with_wrapper(fn)
+
+ if insert:
+ dispatch_descriptor.\
+ for_modify(target.dispatch).insert(self, propagate)
+ else:
+ dispatch_descriptor.\
+ for_modify(target.dispatch).append(self, propagate)
+
+ @property
+ def _listen_fn(self):
+ return self.fn_wrap or self.fn
+
+ def append_value_to_list(self, owner, list_, value):
+ _stored_in_collection(self, owner)
+ list_.append(value)
+
+ def append_to_list(self, owner, list_):
+ _stored_in_collection(self, owner)
+ list_.append(self._listen_fn)
+
+ def remove_from_list(self, owner, list_):
+ _removed_from_collection(self, owner)
+ list_.remove(self._listen_fn)
+
+ def prepend_to_list(self, owner, list_):
+ _stored_in_collection(self, owner)
+ list_.insert(0, self._listen_fn)
+
diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py
index 4fb997b9c..9f05c8b5b 100644
--- a/lib/sqlalchemy/events.py
+++ b/lib/sqlalchemy/events.py
@@ -1,20 +1,20 @@
# sqlalchemy/events.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core event interfaces."""
-from . import event, exc, util
-engine = util.importlater('sqlalchemy', 'engine')
-pool = util.importlater('sqlalchemy', 'pool')
-
+from . import event, exc
+from .pool import Pool
+from .engine import Connectable, Engine
+from .sql.base import SchemaEventTarget
class DDLEvents(event.Events):
"""
Define event listeners for schema objects,
- that is, :class:`.SchemaItem` and :class:`.SchemaEvent`
+ that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget`
subclasses, including :class:`.MetaData`, :class:`.Table`,
:class:`.Column`.
@@ -71,6 +71,7 @@ class DDLEvents(event.Events):
"""
_target_class_doc = "SomeSchemaClassOrObject"
+ _dispatch_target = SchemaEventTarget
def before_create(self, target, connection, **kw):
"""Called before CREATE statments are emitted.
@@ -219,25 +220,6 @@ class DDLEvents(event.Events):
"""
-class SchemaEventTarget(object):
- """Base class for elements that are the targets of :class:`.DDLEvents`
- events.
-
- This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
-
- """
- dispatch = event.dispatcher(DDLEvents)
-
- def _set_parent(self, parent):
- """Associate with this SchemaEvent's parent object."""
-
- raise NotImplementedError()
-
- def _set_parent_with_dispatch(self, parent):
- self.dispatch.before_parent_attach(self, parent)
- self._set_parent(parent)
- self.dispatch.after_parent_attach(self, parent)
-
class PoolEvents(event.Events):
"""Available events for :class:`.Pool`.
@@ -269,62 +251,74 @@ class PoolEvents(event.Events):
"""
_target_class_doc = "SomeEngineOrPool"
+ _dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
- if issubclass(target, engine.Engine):
- return pool.Pool
- elif issubclass(target, pool.Pool):
+ if issubclass(target, Engine):
+ return Pool
+ elif issubclass(target, Pool):
return target
- elif isinstance(target, engine.Engine):
+ elif isinstance(target, Engine):
return target.pool
else:
return target
def connect(self, dbapi_connection, connection_record):
- """Called once for each new DB-API connection or Pool's ``creator()``.
+ """Called at the moment a particular DBAPI connection is first
+ created for a given :class:`.Pool`.
+
+ This event allows one to capture the point directly after which
+ the DBAPI module-level ``.connect()`` method has been used in order
+ to produce a new DBAPI connection.
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
- """Called exactly once for the first DB-API connection.
+ """Called exactly once for the first time a DBAPI connection is
+ checked out from a particular :class:`.Pool`.
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
+ The rationale for :meth:`.PoolEvents.first_connect` is to determine
+ information about a particular series of database connections based
+ on the settings used for all connections. Since a particular
+ :class:`.Pool` refers to a single "creator" function (which in terms
+ of a :class:`.Engine` refers to the URL and connection options used),
+ it is typically valid to make observations about a single connection
+ that can be safely assumed to be valid about all subsequent connections,
+ such as the database version, the server and client encoding settings,
+ collation settings, and many others.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param dbapi_connection: a DBAPI connection.
+
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
- :param con_proxy:
- The ``_ConnectionFairy`` which manages the connection for the span of
- the current checkout.
+ :param connection_proxy: the :class:`._ConnectionFairy` object which
+ will proxy the public interface of the DBAPI connection for the lifespan
+ of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
- .. seealso:: :meth:`.ConnectionEvents.connect` - a similar event
+ .. seealso:: :meth:`.ConnectionEvents.engine_connect` - a similar event
which occurs upon creation of a new :class:`.Connection`.
"""
@@ -336,15 +330,14 @@ class PoolEvents(event.Events):
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
"""
- def reset(self, dbapi_con, con_record):
+ def reset(self, dbapi_connnection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
@@ -358,11 +351,10 @@ class PoolEvents(event.Events):
the :meth:`.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
- :param dbapi_con:
- A raw DB-API connection
+ :param dbapi_connection: a DBAPI connection.
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
.. versionadded:: 0.8
@@ -374,6 +366,30 @@ class PoolEvents(event.Events):
"""
+ def invalidate(self, dbapi_connection, connection_record, exception):
+ """Called when a DBAPI connection is to be "invalidated".
+
+ This event is called any time the :meth:`._ConnectionRecord.invalidate`
+ method is invoked, either from API usage or via "auto-invalidation".
+ The event occurs before a final attempt to call ``.close()`` on the connection
+ occurs.
+
+ :param dbapi_connection: a DBAPI connection.
+
+ :param connection_record: the :class:`._ConnectionRecord` managing the
+ DBAPI connection.
+
+ :param exception: the exception object corresponding to the reason
+ for this invalidation, if any. May be ``None``.
+
+ .. versionadded:: 0.9.2 Added support for connection invalidation
+ listening.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
+ """
class ConnectionEvents(event.Events):
@@ -448,9 +464,14 @@ class ConnectionEvents(event.Events):
"""
_target_class_doc = "SomeEngine"
+ _dispatch_target = Connectable
+
@classmethod
- def _listen(cls, target, identifier, fn, retval=False):
+ def _listen(cls, event_key, retval=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
target._has_events = True
if not retval:
@@ -479,7 +500,7 @@ class ConnectionEvents(event.Events):
"'before_cursor_execute' engine "
"event listeners accept the 'retval=True' "
"argument.")
- event.Events._listen(target, identifier, fn)
+ event_key.with_wrapper(fn).base_listen()
def before_execute(self, conn, clauseelement, multiparams, params):
"""Intercept high level execute() events, receiving uncompiled
@@ -680,7 +701,7 @@ class ConnectionEvents(event.Events):
Note that this method is not called when a new :class:`.Connection`
is produced which is inheriting execution options from its parent
:class:`.Engine`; to intercept this condition, use the
- :meth:`.ConnectionEvents.connect` event.
+ :meth:`.ConnectionEvents.engine_connect` event.
:param conn: The newly copied :class:`.Connection` object
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index cfd1e2bc7..68e517e26 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -1,14 +1,14 @@
# sqlalchemy/exc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Exceptions used with SQLAlchemy.
-The base exception class is :class:`.SQLAlchemyError`. Exceptions which are
+The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are
raised as a result of DBAPI exceptions are all subclasses of
-:class:`.DBAPIError`.
+:exc:`.DBAPIError`.
"""
@@ -26,6 +26,9 @@ class ArgumentError(SQLAlchemyError):
"""
+class NoSuchModuleError(ArgumentError):
+ """Raised when a dynamically-loaded module (usually a database dialect)
+ of a particular name cannot be located."""
class NoForeignKeysError(ArgumentError):
"""Raised when no foreign keys can be located between two selectables
@@ -169,7 +172,7 @@ class UnboundExecutionError(InvalidRequestError):
class DontWrapMixin(object):
"""A mixin class which, when applied to a user-defined Exception class,
- will not be wrapped inside of :class:`.StatementError` if the error is
+ will not be wrapped inside of :exc:`.StatementError` if the error is
emitted within the process of executing a statement.
E.g.::
@@ -187,10 +190,6 @@ class DontWrapMixin(object):
raise MyCustomException("invalid!")
"""
-import sys
-if sys.version_info < (2, 5):
- class DontWrapMixin:
- pass
# Moved to orm.exc; compatibility definition installed by orm import until 0.6
UnmappedColumnError = None
@@ -223,6 +222,10 @@ class StatementError(SQLAlchemyError):
self.statement = statement
self.params = params
self.orig = orig
+ self.detail = []
+
+ def add_detail(self, msg):
+ self.detail.append(msg)
def __reduce__(self):
return self.__class__, (self.args[0], self.statement,
@@ -231,8 +234,13 @@ class StatementError(SQLAlchemyError):
def __str__(self):
from sqlalchemy.sql import util
params_repr = util._repr_params(self.params, 10)
- return ' '.join((SQLAlchemyError.__str__(self),
- repr(self.statement), repr(params_repr)))
+
+ return ' '.join([
+ "(%s)" % det for det in self.detail
+ ] + [
+ SQLAlchemyError.__str__(self),
+ repr(self.statement), repr(params_repr)
+ ])
def __unicode__(self):
return self.__str__()
diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py
index 0efc37bd5..1d77acaa7 100644
--- a/lib/sqlalchemy/ext/__init__.py
+++ b/lib/sqlalchemy/ext/__init__.py
@@ -1,5 +1,5 @@
# ext/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py
index fca2f0008..e62958b49 100644
--- a/lib/sqlalchemy/ext/associationproxy.py
+++ b/lib/sqlalchemy/ext/associationproxy.py
@@ -1,5 +1,5 @@
# ext/associationproxy.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -242,7 +242,11 @@ class AssociationProxy(interfaces._InspectionAttr):
return self
if self.scalar:
- return self._scalar_get(getattr(obj, self.target_collection))
+ target = getattr(obj, self.target_collection)
+ if target is not None:
+ return self._scalar_get(target)
+ else:
+ return None
else:
try:
# If the owning instance is reborn (orm session resurrect,
diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py
new file mode 100644
index 000000000..7a1512f6a
--- /dev/null
+++ b/lib/sqlalchemy/ext/automap.py
@@ -0,0 +1,840 @@
+# ext/automap.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Define an extension to the :mod:`sqlalchemy.ext.declarative` system
+which automatically generates mapped classes and relationships from a database
+schema, typically though not necessarily one which is reflected.
+
+.. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`.
+
+.. note::
+
+ The :mod:`sqlalchemy.ext.automap` extension should be considered
+ **experimental** as of 0.9.1. Featureset and API stability is
+ not guaranteed at this time.
+
+It is hoped that the :class:`.AutomapBase` system provides a quick
+and modernized solution to the problem that the very famous
+`SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_
+also tries to solve, that of generating a quick and rudimentary object
+model from an existing database on the fly. By addressing the issue strictly
+at the mapper configuration level, and integrating fully with existing
+Declarative class techniques, :class:`.AutomapBase` seeks to provide
+a well-integrated approach to the issue of expediently auto-generating ad-hoc
+mappings.
+
+
+Basic Use
+=========
+
+The simplest usage is to reflect an existing database into a new model.
+We create a new :class:`.AutomapBase` class in a similar manner as to how
+we create a declarative base class, using :func:`.automap_base`.
+We then call :meth:`.AutomapBase.prepare` on the resulting base class,
+asking it to reflect the schema and produce mappings::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy.orm import Session
+ from sqlalchemy import create_engine
+
+ Base = automap_base()
+
+ # engine, suppose it has two tables 'user' and 'address' set up
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # reflect the tables
+ Base.prepare(engine, reflect=True)
+
+ # mapped classes are now created with names by default
+ # matching that of the table name.
+ User = Base.classes.user
+ Address = Base.classes.address
+
+ session = Session(engine)
+
+ # rudimentary relationships are produced
+ session.add(Address(email_address="foo@bar.com", user=User(name="foo")))
+ session.commit()
+
+ # collection-based relationships are by default named "<classname>_collection"
+ print (u1.address_collection)
+
+Above, calling :meth:`.AutomapBase.prepare` while passing along the
+:paramref:`.AutomapBase.prepare.reflect` parameter indicates that the
+:meth:`.MetaData.reflect` method will be called on this declarative base
+classes' :class:`.MetaData` collection; then, each viable
+:class:`.Table` within the :class:`.MetaData` will get a new mapped class
+generated automatically. The :class:`.ForeignKeyConstraint` objects which
+link the various tables together will be used to produce new, bidirectional
+:func:`.relationship` objects between classes. The classes and relationships
+follow along a default naming scheme that we can customize. At this point,
+our basic mapping consisting of related ``User`` and ``Address`` classes is ready
+to use in the traditional way.
+
+Generating Mappings from an Existing MetaData
+=============================================
+
+We can pass a pre-declared :class:`.MetaData` object to :func:`.automap_base`.
+This object can be constructed in any way, including programmatically, from
+a serialized file, or from itself being reflected using :meth:`.MetaData.reflect`.
+Below we illustrate a combination of reflection and explicit table declaration::
+
+ from sqlalchemy import create_engine, MetaData, Table, Column, ForeignKey
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ # produce our own MetaData object
+ metadata = MetaData()
+
+ # we can reflect it ourselves from a database, using options
+ # such as 'only' to limit what tables we look at...
+ metadata.reflect(engine, only=['user', 'address'])
+
+ # ... or just define our own Table objects with it (or combine both)
+ Table('user_order', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('user_id', ForeignKey('user.id'))
+ )
+
+ # we can then produce a set of mappings from this MetaData.
+ Base = automap_base(metadata=metadata)
+
+ # calling prepare() just sets up mapped classes and relationships.
+ Base.prepare()
+
+ # mapped classes are ready
+ User, Address, Order = Base.classes.user, Base.classes.address, Base.classes.user_order
+
+Specifying Classes Explcitly
+============================
+
+The :mod:`.sqlalchemy.ext.automap` extension allows classes to be defined
+explicitly, in a way similar to that of the :class:`.DeferredReflection` class.
+Classes that extend from :class:`.AutomapBase` act like regular declarative
+classes, but are not immediately mapped after their construction, and are instead
+mapped when we call :meth:`.AutomapBase.prepare`. The :meth:`.AutomapBase.prepare`
+method will make use of the classes we've established based on the table name
+we use. If our schema contains tables ``user`` and ``address``, we can define
+one or both of the classes to be used::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import create_engine
+
+ # automap base
+ Base = automap_base()
+
+ # pre-declare User for the 'user' table
+ class User(Base):
+ __tablename__ = 'user'
+
+ # override schema elements like Columns
+ user_name = Column('name', String)
+
+ # override relationships too, if desired.
+ # we must use the same name that automap would use for the relationship,
+ # and also must refer to the class name that automap will generate
+ # for "address"
+ address_collection = relationship("address", collection_class=set)
+
+ # reflect
+ engine = create_engine("sqlite:///mydatabase.db")
+ Base.prepare(engine, reflect=True)
+
+ # we still have Address generated from the tablename "address",
+ # but User is the same as Base.classes.User now
+
+ Address = Base.classes.address
+
+ u1 = session.query(User).first()
+ print (u1.address_collection)
+
+ # the backref is still there:
+ a1 = session.query(Address).first()
+ print (a1.user)
+
+Above, one of the more intricate details is that we illustrated overriding
+one of the :func:`.relationship` objects that automap would have created.
+To do this, we needed to make sure the names match up with what automap
+would normally generate, in that the relationship name would be ``User.address_collection``
+and the name of the class referred to, from automap's perspective, is called
+``address``, even though we are referring to it as ``Address`` within our usage
+of this class.
+
+Overriding Naming Schemes
+=========================
+
+:mod:`.sqlalchemy.ext.automap` is tasked with producing mapped classes and
+relationship names based on a schema, which means it has decision points in how
+these names are determined. These three decision points are provided using
+functions which can be passed to the :meth:`.AutomapBase.prepare` method, and
+are known as :func:`.classname_for_table`,
+:func:`.name_for_scalar_relationship`,
+and :func:`.name_for_collection_relationship`. Any or all of these
+functions are provided as in the example below, where we use a "camel case"
+scheme for class names and a "pluralizer" for collection names using the
+`Inflect <https://pypi.python.org/pypi/inflect>`_ package::
+
+ import re
+ import inflect
+
+ def camelize_classname(base, tablename, table):
+ "Produce a 'camelized' class name, e.g. "
+ "'words_and_underscores' -> 'WordsAndUnderscores'"
+
+ return str(tablename[0].upper() + \\
+ re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:]))
+
+ _pluralizer = inflect.engine()
+ def pluralize_collection(base, local_cls, referred_cls, constraint):
+ "Produce an 'uncamelized', 'pluralized' class name, e.g. "
+ "'SomeTerm' -> 'some_terms'"
+
+ referred_name = referred_cls.__name__
+ uncamelized = referred_name[0].lower() + \\
+ re.sub(r'\W',
+ lambda m: "_%s" % m.group(0).lower(),
+ referred_name[1:])
+ pluralized = _pluralizer.plural(uncamelized)
+ return pluralized
+
+ from sqlalchemy.ext.automap import automap_base
+
+ Base = automap_base()
+
+ engine = create_engine("sqlite:///mydatabase.db")
+
+ Base.prepare(engine, reflect=True,
+ classname_for_table=camelize_classname,
+ name_for_collection_relationship=pluralize_collection
+ )
+
+From the above mapping, we would now have classes ``User`` and ``Address``,
+where the collection from ``User`` to ``Address`` is called ``User.addresses``::
+
+ User, Address = Base.classes.User, Base.classes.Address
+
+ u1 = User(addresses=[Address(email="foo@bar.com")])
+
+Relationship Detection
+======================
+
+The vast majority of what automap accomplishes is the generation of
+:func:`.relationship` structures based on foreign keys. The mechanism
+by which this works for many-to-one and one-to-many relationships is as follows:
+
+1. A given :class:`.Table`, known to be mapped to a particular class,
+ is examined for :class:`.ForeignKeyConstraint` objects.
+
+2. From each :class:`.ForeignKeyConstraint`, the remote :class:`.Table`
+ object present is matched up to the class to which it is to be mapped,
+ if any, else it is skipped.
+
+3. As the :class:`.ForeignKeyConstraint` we are examining correponds to a reference
+ from the immediate mapped class,
+ the relationship will be set up as a many-to-one referring to the referred class;
+ a corresponding one-to-many backref will be created on the referred class referring
+ to this class.
+
+4. The names of the relationships are determined using the
+ :paramref:`.AutomapBase.prepare.name_for_scalar_relationship` and
+ :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ callable functions. It is important to note that the default relationship
+ naming derives the name from the **the actual class name**. If you've
+ given a particular class an explicit name by declaring it, or specified an
+ alternate class naming scheme, that's the name from which the relationship
+ name will be derived.
+
+5. The classes are inspected for an existing mapped property matching these
+ names. If one is detected on one side, but none on the other side, :class:`.AutomapBase`
+ attempts to create a relationship on the missing side, then uses the
+ :paramref:`.relationship.back_populates` parameter in order to point
+ the new relationship to the other side.
+
+6. In the usual case where no relationship is on either side,
+ :meth:`.AutomapBase.prepare` produces a :func:`.relationship` on the "many-to-one"
+ side and matches it to the other using the :paramref:`.relationship.backref`
+ parameter.
+
+7. Production of the :func:`.relationship` and optionally the :func:`.backref`
+ is handed off to the :paramref:`.AutomapBase.prepare.generate_relationship`
+ function, which can be supplied by the end-user in order to augment
+ the arguments passed to :func:`.relationship` or :func:`.backref` or to
+ make use of custom implementations of these functions.
+
+Custom Relationship Arguments
+-----------------------------
+
+The :paramref:`.AutomapBase.prepare.generate_relationship` hook can be used
+to add parameters to relationships. For most cases, we can make use of the
+existing :func:`.automap.generate_relationship` function to return
+the object, after augmenting the given keyword dictionary with our own
+arguments.
+
+Below is an illustration of how to send
+:paramref:`.relationship.cascade` and
+:paramref:`.relationship.passive_deletes`
+options along to all one-to-many relationships::
+
+ from sqlalchemy.ext.automap import generate_relationship
+
+ def _gen_relationship(base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw):
+ if direction is interfaces.ONETOMANY:
+ kw['cascade'] = 'all, delete-orphan'
+ kw['passive_deletes'] = True
+ # make use of the built-in function to actually return
+ # the result.
+ return generate_relationship(base, direction, return_fn,
+ attrname, local_cls, referred_cls, **kw)
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import create_engine
+
+ # automap base
+ Base = automap_base()
+
+ engine = create_engine("sqlite:///mydatabase.db")
+ Base.prepare(engine, reflect=True,
+ generate_relationship=_gen_relationship)
+
+Many-to-Many relationships
+--------------------------
+
+:mod:`.sqlalchemy.ext.automap` will generate many-to-many relationships, e.g.
+those which contain a ``secondary`` argument. The process for producing these
+is as follows:
+
+1. A given :class:`.Table` is examined for :class:`.ForeignKeyConstraint` objects,
+ before any mapped class has been assigned to it.
+
+2. If the table contains two and exactly two :class:`.ForeignKeyConstraint`
+ objects, and all columns within this table are members of these two
+ :class:`.ForeignKeyConstraint` objects, the table is assumed to be a
+ "secondary" table, and will **not be mapped directly**.
+
+3. The two (or one, for self-referential) external tables to which the :class:`.Table`
+ refers to are matched to the classes to which they will be mapped, if any.
+
+4. If mapped classes for both sides are located, a many-to-many bi-directional
+ :func:`.relationship` / :func:`.backref` pair is created between the two
+ classes.
+
+5. The override logic for many-to-many works the same as that of one-to-many/
+ many-to-one; the :func:`.generate_relationship` function is called upon
+ to generate the strucures and existing attributes will be maintained.
+
+Using Automap with Explicit Declarations
+========================================
+
+As noted previously, automap has no dependency on reflection, and can make
+use of any collection of :class:`.Table` objects within a :class:`.MetaData`
+collection. From this, it follows that automap can also be used
+generate missing relationships given an otherwise complete model that fully defines
+table metadata::
+
+ from sqlalchemy.ext.automap import automap_base
+ from sqlalchemy import Column, Integer, String, ForeignKey
+
+ Base = automap_base()
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String)
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ id = Column(Integer, primary_key=True)
+ email = Column(String)
+ user_id = Column(ForeignKey('user.id'))
+
+ # produce relationships
+ Base.prepare()
+
+ # mapping is complete, with "address_collection" and
+ # "user" relationships
+ a1 = Address(email='u1')
+ a2 = Address(email='u2')
+ u1 = User(address_collection=[a1, a2])
+ assert a1.user is u1
+
+Above, given mostly complete ``User`` and ``Address`` mappings, the
+:class:`.ForeignKey` which we defined on ``Address.user_id`` allowed a
+bidirectional relationship pair ``Address.user`` and ``User.address_collection``
+to be generated on the mapped classes.
+
+Note that when subclassing :class:`.AutomapBase`, the :meth:`.AutomapBase.prepare`
+method is required; if not called, the classes we've declared are in an
+un-mapped state.
+
+
+"""
+from .declarative import declarative_base as _declarative_base
+from .declarative.base import _DeferredMapperConfig
+from ..sql import and_
+from ..schema import ForeignKeyConstraint
+from ..orm import relationship, backref, interfaces
+from .. import util
+
+
+def classname_for_table(base, tablename, table):
+ """Return the class name that should be used, given the name
+ of a table.
+
+ The default implementation is::
+
+ return str(tablename)
+
+ Alternate implementations can be specified using the
+ :paramref:`.AutomapBase.prepare.classname_for_table`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param tablename: string name of the :class:`.Table`.
+
+ :param table: the :class:`.Table` object itself.
+
+ :return: a string class name.
+
+ .. note::
+
+ In Python 2, the string used for the class name **must** be a non-Unicode
+ object, e.g. a ``str()`` object. The ``.name`` attribute of
+ :class:`.Table` is typically a Python unicode subclass, so the ``str()``
+ function should be applied to this name, after accounting for any non-ASCII
+ characters.
+
+ """
+ return str(tablename)
+
+def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
+ """Return the attribute name that should be used to refer from one
+ class to another, for a scalar object reference.
+
+ The default implementation is::
+
+ return referred_cls.__name__.lower()
+
+ Alternate implementations can be specified using the
+ :paramref:`.AutomapBase.prepare.name_for_scalar_relationship`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param local_cls: the class to be mapped on the local side.
+
+ :param referred_cls: the class to be mapped on the referring side.
+
+ :param constraint: the :class:`.ForeignKeyConstraint` that is being
+ inspected to produce this relationship.
+
+ """
+ return referred_cls.__name__.lower()
+
+def name_for_collection_relationship(base, local_cls, referred_cls, constraint):
+ """Return the attribute name that should be used to refer from one
+ class to another, for a collection reference.
+
+ The default implementation is::
+
+ return referred_cls.__name__.lower() + "_collection"
+
+ Alternate implementations
+ can be specified using the :paramref:`.AutomapBase.prepare.name_for_collection_relationship`
+ parameter.
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param local_cls: the class to be mapped on the local side.
+
+ :param referred_cls: the class to be mapped on the referring side.
+
+ :param constraint: the :class:`.ForeignKeyConstraint` that is being
+ inspected to produce this relationship.
+
+ """
+ return referred_cls.__name__.lower() + "_collection"
+
+def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw):
+ """Generate a :func:`.relationship` or :func:`.backref` on behalf of two
+ mapped classes.
+
+ An alternate implementation of this function can be specified using the
+ :paramref:`.AutomapBase.prepare.generate_relationship` parameter.
+
+ The default implementation of this function is as follows::
+
+ if return_fn is backref:
+ return return_fn(attrname, **kw)
+ elif return_fn is relationship:
+ return return_fn(referred_cls, **kw)
+ else:
+ raise TypeError("Unknown relationship function: %s" % return_fn)
+
+ :param base: the :class:`.AutomapBase` class doing the prepare.
+
+ :param direction: indicate the "direction" of the relationship; this will
+ be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`.
+
+ :param return_fn: the function that is used by default to create the
+ relationship. This will be either :func:`.relationship` or :func:`.backref`.
+ The :func:`.backref` function's result will be used to produce a new
+ :func:`.relationship` in a second step, so it is critical that user-defined
+ implementations correctly differentiate between the two functions, if
+ a custom relationship function is being used.
+
+ :attrname: the attribute name to which this relationship is being assigned.
+ If the value of :paramref:`.generate_relationship.return_fn` is the
+ :func:`.backref` function, then this name is the name that is being
+ assigned to the backref.
+
+ :param local_cls: the "local" class to which this relationship or backref
+ will be locally present.
+
+ :param referred_cls: the "referred" class to which the relationship or backref
+ refers to.
+
+ :param \**kw: all additional keyword arguments are passed along to the
+ function.
+
+ :return: a :func:`.relationship` or :func:`.backref` construct, as dictated
+ by the :paramref:`.generate_relationship.return_fn` parameter.
+
+ """
+ if return_fn is backref:
+ return return_fn(attrname, **kw)
+ elif return_fn is relationship:
+ return return_fn(referred_cls, **kw)
+ else:
+ raise TypeError("Unknown relationship function: %s" % return_fn)
+
+class AutomapBase(object):
+ """Base class for an "automap" schema.
+
+ The :class:`.AutomapBase` class can be compared to the "declarative base"
+ class that is produced by the :func:`.declarative.declarative_base`
+ function. In practice, the :class:`.AutomapBase` class is always used
+ as a mixin along with an actual declarative base.
+
+ A new subclassable :class:`.AutomapBase` is typically instantated
+ using the :func:`.automap_base` function.
+
+ .. seealso::
+
+ :ref:`automap_toplevel`
+
+ """
+ __abstract__ = True
+
+ classes = None
+ """An instance of :class:`.util.Properties` containing classes.
+
+ This object behaves much like the ``.c`` collection on a table. Classes
+ are present under the name they were given, e.g.::
+
+ Base = automap_base()
+ Base.prepare(engine=some_engine, reflect=True)
+
+ User, Address = Base.classes.User, Base.classes.Address
+
+ """
+
+ @classmethod
+ def prepare(cls,
+ engine=None,
+ reflect=False,
+ classname_for_table=classname_for_table,
+ collection_class=list,
+ name_for_scalar_relationship=name_for_scalar_relationship,
+ name_for_collection_relationship=name_for_collection_relationship,
+ generate_relationship=generate_relationship):
+
+ """Extract mapped classes and relationships from the :class:`.MetaData` and
+ perform mappings.
+
+ :param engine: an :class:`.Engine` or :class:`.Connection` with which
+ to perform schema reflection, if specified.
+ If the :paramref:`.AutomapBase.prepare.reflect` argument is False, this
+ object is not used.
+
+ :param reflect: if True, the :meth:`.MetaData.reflect` method is called
+ on the :class:`.MetaData` associated with this :class:`.AutomapBase`.
+ The :class:`.Engine` passed via :paramref:`.AutomapBase.prepare.engine` will
+ be used to perform the reflection if present; else, the :class:`.MetaData`
+ should already be bound to some engine else the operation will fail.
+
+ :param classname_for_table: callable function which will be used to
+ produce new class names, given a table name. Defaults to
+ :func:`.classname_for_table`.
+
+ :param name_for_scalar_relationship: callable function which will be used
+ to produce relationship names for scalar relationships. Defaults to
+ :func:`.name_for_scalar_relationship`.
+
+ :param name_for_collection_relationship: callable function which will be used
+ to produce relationship names for collection-oriented relationships. Defaults to
+ :func:`.name_for_collection_relationship`.
+
+ :param generate_relationship: callable function which will be used to
+ actually generate :func:`.relationship` and :func:`.backref` constructs.
+ Defaults to :func:`.generate_relationship`.
+
+ :param collection_class: the Python collection class that will be used
+ when a new :func:`.relationship` object is created that represents a
+ collection. Defaults to ``list``.
+
+ """
+ if reflect:
+ cls.metadata.reflect(
+ engine,
+ extend_existing=True,
+ autoload_replace=False
+ )
+
+ table_to_map_config = dict(
+ (m.local_table, m)
+ for m in _DeferredMapperConfig.classes_for_base(cls)
+ )
+
+ many_to_many = []
+
+ for table in cls.metadata.tables.values():
+ lcl_m2m, rem_m2m, m2m_const = _is_many_to_many(cls, table)
+ if lcl_m2m is not None:
+ many_to_many.append((lcl_m2m, rem_m2m, m2m_const, table))
+ elif not table.primary_key:
+ continue
+ elif table not in table_to_map_config:
+ mapped_cls = type(
+ classname_for_table(cls, table.name, table),
+ (cls, ),
+ {"__table__": table}
+ )
+ map_config = _DeferredMapperConfig.config_for_cls(mapped_cls)
+ cls.classes[map_config.cls.__name__] = mapped_cls
+ table_to_map_config[table] = map_config
+
+ for map_config in table_to_map_config.values():
+ _relationships_for_fks(cls,
+ map_config,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
+
+ for lcl_m2m, rem_m2m, m2m_const, table in many_to_many:
+ _m2m_relationship(cls, lcl_m2m, rem_m2m, m2m_const, table,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship)
+ for map_config in table_to_map_config.values():
+ map_config.map()
+
+
+ _sa_decl_prepare = True
+ """Indicate that the mapping of classes should be deferred.
+
+ The presence of this attribute name indicates to declarative
+ that the call to mapper() should not occur immediately; instead,
+ information about the table and attributes to be mapped are gathered
+ into an internal structure called _DeferredMapperConfig. These
+ objects can be collected later using classes_for_base(), additional
+ mapping decisions can be made, and then the map() method will actually
+ apply the mapping.
+
+ The only real reason this deferral of the whole
+ thing is needed is to support primary key columns that aren't reflected
+ yet when the class is declared; everything else can theoretically be
+ added to the mapper later. However, the _DeferredMapperConfig is a
+ nice interface in any case which exists at that not usually exposed point
+ at which declarative has the class and the Table but hasn't called
+ mapper() yet.
+
+ """
+
+def automap_base(declarative_base=None, **kw):
+ """Produce a declarative automap base.
+
+ This function produces a new base class that is a product of the
+ :class:`.AutomapBase` class as well a declarative base produced by
+ :func:`.declarative.declarative_base`.
+
+ All parameters other than ``declarative_base`` are keyword arguments
+ that are passed directly to the :func:`.declarative.declarative_base`
+ function.
+
+ :param declarative_base: an existing class produced by
+ :func:`.declarative.declarative_base`. When this is passed, the function
+ no longer invokes :func:`.declarative.declarative_base` itself, and all other
+ keyword arguments are ignored.
+
+ :param \**kw: keyword arguments are passed along to
+ :func:`.declarative.declarative_base`.
+
+ """
+ if declarative_base is None:
+ Base = _declarative_base(**kw)
+ else:
+ Base = declarative_base
+
+ return type(
+ Base.__name__,
+ (AutomapBase, Base,),
+ {"__abstract__": True, "classes": util.Properties({})}
+ )
+
+def _is_many_to_many(automap_base, table):
+ fk_constraints = [const for const in table.constraints
+ if isinstance(const, ForeignKeyConstraint)]
+ if len(fk_constraints) != 2:
+ return None, None, None
+
+ cols = sum(
+ [[fk.parent for fk in fk_constraint.elements]
+ for fk_constraint in fk_constraints], [])
+
+ if set(cols) != set(table.c):
+ return None, None, None
+
+ return (
+ fk_constraints[0].elements[0].column.table,
+ fk_constraints[1].elements[0].column.table,
+ fk_constraints
+ )
+
+def _relationships_for_fks(automap_base, map_config, table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
+ local_table = map_config.local_table
+ local_cls = map_config.cls
+
+ for constraint in local_table.constraints:
+ if isinstance(constraint, ForeignKeyConstraint):
+ fks = constraint.elements
+ referred_table = fks[0].column.table
+ referred_cfg = table_to_map_config.get(referred_table, None)
+ if referred_cfg is None:
+ continue
+ referred_cls = referred_cfg.cls
+
+ relationship_name = name_for_scalar_relationship(
+ automap_base,
+ local_cls,
+ referred_cls, constraint)
+ backref_name = name_for_collection_relationship(
+ automap_base,
+ referred_cls,
+ local_cls,
+ constraint
+ )
+
+ create_backref = backref_name not in referred_cfg.properties
+
+ if relationship_name not in map_config.properties:
+ if create_backref:
+ backref_obj = generate_relationship(automap_base,
+ interfaces.ONETOMANY, backref,
+ backref_name, referred_cls, local_cls,
+ collection_class=collection_class)
+ else:
+ backref_obj = None
+ map_config.properties[relationship_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOONE,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ foreign_keys=[fk.parent for fk in constraint.elements],
+ backref=backref_obj,
+ remote_side=[fk.column for fk in constraint.elements]
+ )
+ if not create_backref:
+ referred_cfg.properties[backref_name].back_populates = relationship_name
+ elif create_backref:
+ referred_cfg.properties[backref_name] = \
+ generate_relationship(automap_base,
+ interfaces.ONETOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ foreign_keys=[fk.parent for fk in constraint.elements],
+ back_populates=relationship_name,
+ collection_class=collection_class)
+ map_config.properties[relationship_name].back_populates = backref_name
+
+def _m2m_relationship(automap_base, lcl_m2m, rem_m2m, m2m_const, table,
+ table_to_map_config,
+ collection_class,
+ name_for_scalar_relationship,
+ name_for_collection_relationship,
+ generate_relationship):
+
+ map_config = table_to_map_config.get(lcl_m2m, None)
+ referred_cfg = table_to_map_config.get(rem_m2m, None)
+ if map_config is None or referred_cfg is None:
+ return
+
+ local_cls = map_config.cls
+ referred_cls = referred_cfg.cls
+
+ relationship_name = name_for_collection_relationship(
+ automap_base,
+ local_cls,
+ referred_cls, m2m_const[0])
+ backref_name = name_for_collection_relationship(
+ automap_base,
+ referred_cls,
+ local_cls,
+ m2m_const[1]
+ )
+
+ create_backref = backref_name not in referred_cfg.properties
+
+ if relationship_name not in map_config.properties:
+ if create_backref:
+ backref_obj = generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ backref,
+ backref_name,
+ referred_cls, local_cls,
+ collection_class=collection_class
+ )
+ else:
+ backref_obj = None
+ map_config.properties[relationship_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ relationship,
+ relationship_name,
+ local_cls, referred_cls,
+ secondary=table,
+ primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
+ secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
+ backref=backref_obj,
+ collection_class=collection_class
+ )
+ if not create_backref:
+ referred_cfg.properties[backref_name].back_populates = relationship_name
+ elif create_backref:
+ referred_cfg.properties[backref_name] = \
+ generate_relationship(automap_base,
+ interfaces.MANYTOMANY,
+ relationship,
+ backref_name,
+ referred_cls, local_cls,
+ secondary=table,
+ primaryjoin=and_(fk.column == fk.parent for fk in m2m_const[1].elements),
+ secondaryjoin=and_(fk.column == fk.parent for fk in m2m_const[0].elements),
+ back_populates=relationship_name,
+ collection_class=collection_class)
+ map_config.properties[relationship_name].back_populates = backref_name
diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py
index 703475de7..5dde74e09 100644
--- a/lib/sqlalchemy/ext/compiler.py
+++ b/lib/sqlalchemy/ext/compiler.py
@@ -1,5 +1,5 @@
# ext/compiler.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -238,7 +238,7 @@ A synopsis is as follows:
class timestamp(ColumnElement):
type = TIMESTAMP()
-* :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a
+* :class:`~sqlalchemy.sql.functions.FunctionElement` - This is a hybrid of a
``ColumnElement`` and a "from clause" like object, and represents a SQL
function or stored procedure type of call. Since most databases support
statements along the line of "SELECT FROM <some function>"
diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py
index f8c685da0..0ee4e33fd 100644
--- a/lib/sqlalchemy/ext/declarative/__init__.py
+++ b/lib/sqlalchemy/ext/declarative/__init__.py
@@ -1,5 +1,5 @@
# ext/declarative/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -897,11 +897,57 @@ reference a common target class via many-to-one::
__tablename__ = 'target'
id = Column(Integer, primary_key=True)
+Using Advanced Relationship Arguments (e.g. ``primaryjoin``, etc.)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
:func:`~sqlalchemy.orm.relationship` definitions which require explicit
-primaryjoin, order_by etc. expressions should use the string forms
-for these arguments, so that they are evaluated as late as possible.
-To reference the mixin class in these expressions, use the given ``cls``
-to get its name::
+primaryjoin, order_by etc. expressions should in all but the most
+simplistic cases use **late bound** forms
+for these arguments, meaning, using either the string form or a lambda.
+The reason for this is that the related :class:`.Column` objects which are to
+be configured using ``@declared_attr`` are not available to another
+``@declared_attr`` attribute; while the methods will work and return new
+:class:`.Column` objects, those are not the :class:`.Column` objects that
+Declarative will be using as it calls the methods on its own, thus using
+*different* :class:`.Column` objects.
+
+The canonical example is the primaryjoin condition that depends upon
+another mixed-in column::
+
+ class RefTargetMixin(object):
+ @declared_attr
+ def target_id(cls):
+ return Column('target_id', ForeignKey('target.id'))
+
+ @declared_attr
+ def target(cls):
+ return relationship(Target,
+ primaryjoin=Target.id==cls.target_id # this is *incorrect*
+ )
+
+Mapping a class using the above mixin, we will get an error like::
+
+ sqlalchemy.exc.InvalidRequestError: this ForeignKey's parent column is not
+ yet associated with a Table.
+
+This is because the ``target_id`` :class:`.Column` we've called upon in our ``target()``
+method is not the same :class:`.Column` that declarative is actually going to map
+to our table.
+
+The condition above is resolved using a lambda::
+
+ class RefTargetMixin(object):
+ @declared_attr
+ def target_id(cls):
+ return Column('target_id', ForeignKey('target.id'))
+
+ @declared_attr
+ def target(cls):
+ return relationship(Target,
+ primaryjoin=lambda: Target.id==cls.target_id
+ )
+
+or alternatively, the string form (which ultmately generates a lambda)::
class RefTargetMixin(object):
@declared_attr
@@ -1238,7 +1284,7 @@ Sessions
Note that ``declarative`` does nothing special with sessions, and is
only intended as an easier way to configure mappers and
:class:`~sqlalchemy.schema.Table` objects. A typical application
-setup using :class:`~sqlalchemy.orm.scoped_session` might look like::
+setup using :class:`~sqlalchemy.orm.scoping.scoped_session` might look like::
engine = create_engine('postgresql://scott:tiger@localhost/test')
Session = scoped_session(sessionmaker(autocommit=False,
@@ -1254,7 +1300,7 @@ Mapped instances then make usage of
from .api import declarative_base, synonym_for, comparable_using, \
instrument_declarative, ConcreteBase, AbstractConcreteBase, \
DeclarativeMeta, DeferredReflection, has_inherited_table,\
- declared_attr
+ declared_attr, as_declarative
__all__ = ['declarative_base', 'synonym_for', 'has_inherited_table',
diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py
index 2f222f682..2418c6e50 100644
--- a/lib/sqlalchemy/ext/declarative/api.py
+++ b/lib/sqlalchemy/ext/declarative/api.py
@@ -1,5 +1,5 @@
# ext/declarative/api.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -9,14 +9,17 @@
from ...schema import Table, MetaData
from ...orm import synonym as _orm_synonym, mapper,\
comparable_property,\
- interfaces
-from ...orm.util import polymorphic_union, _mapper_or_none
+ interfaces, properties
+from ...orm.util import polymorphic_union
+from ...orm.base import _mapper_or_none
+from ...util import compat
from ... import exc
import weakref
from .base import _as_declarative, \
_declarative_constructor,\
- _MapperConfig, _add_attribute
+ _DeferredMapperConfig, _add_attribute
+from .clsregistry import _class_resolver
def instrument_declarative(cls, registry, metadata):
@@ -173,16 +176,16 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
of the class.
:param bind: An optional
- :class:`~sqlalchemy.engine.base.Connectable`, will be assigned
- the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
+ :class:`~sqlalchemy.engine.Connectable`, will be assigned
+ the ``bind`` attribute on the :class:`~sqlalchemy.schema.MetaData`
instance.
:param metadata:
- An optional :class:`~sqlalchemy.MetaData` instance. All
+ An optional :class:`~sqlalchemy.schema.MetaData` instance. All
:class:`~sqlalchemy.schema.Table` objects implicitly declared by
subclasses of the base will share this MetaData. A MetaData instance
will be created if none is provided. The
- :class:`~sqlalchemy.MetaData` instance will be available via the
+ :class:`~sqlalchemy.schema.MetaData` instance will be available via the
`metadata` attribute of the generated declarative base class.
:param mapper:
@@ -218,6 +221,10 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
compatible callable to use as the meta type of the generated
declarative base class.
+ .. seealso::
+
+ :func:`.as_declarative`
+
"""
lcl_metadata = metadata or MetaData()
if bind:
@@ -237,6 +244,42 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
return metaclass(name, bases, class_dict)
+def as_declarative(**kw):
+ """
+ Class decorator for :func:`.declarative_base`.
+
+ Provides a syntactical shortcut to the ``cls`` argument
+ sent to :func:`.declarative_base`, allowing the base class
+ to be converted in-place to a "declarative" base::
+
+ from sqlalchemy.ext.declarative import as_declarative
+
+ @as_declarative()
+ class Base(object):
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+ id = Column(Integer, primary_key=True)
+
+ class MyMappedClass(Base):
+ # ...
+
+ All keyword arguments passed to :func:`.as_declarative` are passed
+ along to :func:`.declarative_base`.
+
+ .. versionadded:: 0.8.3
+
+ .. seealso::
+
+ :func:`.declarative_base`
+
+ """
+ def decorate(cls):
+ kw['cls'] = cls
+ kw['name'] = cls.__name__
+ return declarative_base(**kw)
+
+ return decorate
class ConcreteBase(object):
"""A helper class for 'concrete' declarative mappings.
@@ -245,7 +288,7 @@ class ConcreteBase(object):
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
- a hook for the :func:`.MapperEvents.after_configured` event.
+ a hook for the :meth:`.after_configured` event.
:class:`.ConcreteBase` produces a mapped
table for the class itself. Compare to :class:`.AbstractConcreteBase`,
@@ -300,7 +343,7 @@ class AbstractConcreteBase(ConcreteBase):
function automatically, against all tables mapped as a subclass
to this class. The function is called via the
``__declare_last__()`` function, which is essentially
- a hook for the :func:`.MapperEvents.after_configured` event.
+ a hook for the :meth:`.after_configured` event.
:class:`.AbstractConcreteBase` does not produce a mapped
table for the class itself. Compare to :class:`.ConcreteBase`,
@@ -380,7 +423,7 @@ class DeferredReflection(object):
Above, ``MyClass`` is not yet mapped. After a series of
classes have been defined in the above fashion, all tables
can be reflected and mappings created using
- :meth:`.DeferredReflection.prepare`::
+ :meth:`.prepare`::
engine = create_engine("someengine://...")
DeferredReflection.prepare(engine)
@@ -424,11 +467,30 @@ class DeferredReflection(object):
def prepare(cls, engine):
"""Reflect all :class:`.Table` objects for all current
:class:`.DeferredReflection` subclasses"""
- to_map = [m for m in _MapperConfig.configs.values()
- if issubclass(m.cls, cls)]
+
+ to_map = _DeferredMapperConfig.classes_for_base(cls)
for thingy in to_map:
cls._sa_decl_prepare(thingy.local_table, engine)
thingy.map()
+ mapper = thingy.cls.__mapper__
+ metadata = mapper.class_.metadata
+ for rel in mapper._props.values():
+ if isinstance(rel, properties.RelationshipProperty) and \
+ rel.secondary is not None:
+ if isinstance(rel.secondary, Table):
+ cls._reflect_table(rel.secondary, engine)
+ elif isinstance(rel.secondary, _class_resolver):
+ rel.secondary._resolvers += (
+ cls._sa_deferred_table_resolver(engine, metadata),
+ )
+
+ @classmethod
+ def _sa_deferred_table_resolver(cls, engine, metadata):
+ def _resolve(key):
+ t1 = Table(key, metadata)
+ cls._reflect_table(t1, engine)
+ return t1
+ return _resolve
@classmethod
def _sa_decl_prepare(cls, local_table, engine):
@@ -437,10 +499,14 @@ class DeferredReflection(object):
# will fill in db-loaded columns
# into the existing Table object.
if local_table is not None:
- Table(local_table.name,
- local_table.metadata,
- extend_existing=True,
- autoload_replace=False,
- autoload=True,
- autoload_with=engine,
- schema=local_table.schema)
+ cls._reflect_table(local_table, engine)
+
+ @classmethod
+ def _reflect_table(cls, table, engine):
+ Table(table.name,
+ table.metadata,
+ extend_existing=True,
+ autoload_replace=False,
+ autoload=True,
+ autoload_with=engine,
+ schema=table.schema)
diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py
index 5a2b88db4..a764f126b 100644
--- a/lib/sqlalchemy/ext/declarative/base.py
+++ b/lib/sqlalchemy/ext/declarative/base.py
@@ -1,25 +1,27 @@
# ext/declarative/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Internal implementation for declarative."""
from ...schema import Table, Column
-from ...orm import mapper, class_mapper
+from ...orm import mapper, class_mapper, synonym
from ...orm.interfaces import MapperProperty
from ...orm.properties import ColumnProperty, CompositeProperty
-from ...orm.util import _is_mapped_class
+from ...orm.attributes import QueryableAttribute
+from ...orm.base import _is_mapped_class
from ... import util, exc
from ...sql import expression
from ... import event
from . import clsregistry
-
+import collections
+import weakref
def _declared_mapping_info(cls):
# deferred mapping
- if cls in _MapperConfig.configs:
- return _MapperConfig.configs[cls]
+ if _DeferredMapperConfig.has_cls(cls):
+ return _DeferredMapperConfig.config_for_cls(cls)
# regular mapping
elif _is_mapped_class(cls):
return class_mapper(cls, configure=False)
@@ -148,6 +150,15 @@ def _as_declarative(cls, classname, dict_):
if isinstance(value, declarative_props):
value = getattr(cls, k)
+ elif isinstance(value, QueryableAttribute) and \
+ value.class_ is not cls and \
+ value.key != k:
+ # detect a QueryableAttribute that's already mapped being
+ # assigned elsewhere in userland, turn into a synonym()
+ value = synonym(value.key)
+ setattr(cls, k, value)
+
+
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
@@ -173,15 +184,19 @@ def _as_declarative(cls, classname, dict_):
# extract columns from the class dict
declared_columns = set()
+ name_to_prop_key = collections.defaultdict(set)
for key, c in list(our_stuff.items()):
if isinstance(c, (ColumnProperty, CompositeProperty)):
for col in c.columns:
if isinstance(col, Column) and \
col.table is None:
_undefer_column_name(key, col)
+ if not isinstance(c, CompositeProperty):
+ name_to_prop_key[col.name].add(key)
declared_columns.add(col)
elif isinstance(c, Column):
_undefer_column_name(key, c)
+ name_to_prop_key[c.name].add(key)
declared_columns.add(c)
# if the column is the same name as the key,
# remove it from the explicit properties dict.
@@ -190,6 +205,15 @@ def _as_declarative(cls, classname, dict_):
# in multi-column ColumnProperties.
if key == c.key:
del our_stuff[key]
+
+ for name, keys in name_to_prop_key.items():
+ if len(keys) > 1:
+ util.warn(
+ "On class %r, Column object %r named directly multiple times, "
+ "only one will be used: %s" %
+ (classname, name, (", ".join(sorted(keys))))
+ )
+
declared_columns = sorted(
declared_columns, key=lambda c: c._creation_order)
table = None
@@ -281,19 +305,24 @@ def _as_declarative(cls, classname, dict_):
inherited_mapped_table is not inherited_table:
inherited_mapped_table._refresh_for_new_column(c)
- mt = _MapperConfig(mapper_cls,
+ defer_map = hasattr(cls, '_sa_decl_prepare')
+ if defer_map:
+ cfg_cls = _DeferredMapperConfig
+ else:
+ cfg_cls = _MapperConfig
+ mt = cfg_cls(mapper_cls,
cls, table,
inherits,
declared_columns,
column_copies,
our_stuff,
mapper_args_fn)
- if not hasattr(cls, '_sa_decl_prepare'):
+ if not defer_map:
mt.map()
class _MapperConfig(object):
- configs = util.OrderedDict()
+
mapped_table = None
def __init__(self, mapper_cls,
@@ -311,7 +340,7 @@ class _MapperConfig(object):
self.mapper_args_fn = mapper_args_fn
self.declared_columns = declared_columns
self.column_copies = column_copies
- self.configs[cls] = self
+
def _prepare_mapper_arguments(self):
properties = self.properties
@@ -368,7 +397,6 @@ class _MapperConfig(object):
return result_mapper_args
def map(self):
- self.configs.pop(self.cls, None)
mapper_args = self._prepare_mapper_arguments()
self.cls.__mapper__ = self.mapper_cls(
self.cls,
@@ -376,6 +404,42 @@ class _MapperConfig(object):
**mapper_args
)
+class _DeferredMapperConfig(_MapperConfig):
+ _configs = util.OrderedDict()
+
+ @property
+ def cls(self):
+ return self._cls()
+
+ @cls.setter
+ def cls(self, class_):
+ self._cls = weakref.ref(class_, self._remove_config_cls)
+ self._configs[self._cls] = self
+
+ @classmethod
+ def _remove_config_cls(cls, ref):
+ cls._configs.pop(ref, None)
+
+ @classmethod
+ def has_cls(cls, class_):
+ # 2.6 fails on weakref if class_ is an old style class
+ return isinstance(class_, type) and \
+ weakref.ref(class_) in cls._configs
+
+ @classmethod
+ def config_for_cls(cls, class_):
+ return cls._configs[weakref.ref(class_)]
+
+
+ @classmethod
+ def classes_for_base(cls, base_cls):
+ return [m for m in cls._configs.values()
+ if issubclass(m.cls, base_cls)]
+
+ def map(self):
+ self._configs.pop(self._cls, None)
+ super(_DeferredMapperConfig, self).map()
+
def _add_attribute(cls, key, value):
"""add an attribute to an existing declarative class.
@@ -384,6 +448,7 @@ def _add_attribute(cls, key, value):
adds it to the Mapper, adds a column to the mapped Table, etc.
"""
+
if '__mapper__' in cls.__dict__:
if isinstance(value, Column):
_undefer_column_name(key, value)
@@ -400,6 +465,14 @@ def _add_attribute(cls, key, value):
key,
clsregistry._deferred_relationship(cls, value)
)
+ elif isinstance(value, QueryableAttribute) and value.key != key:
+ # detect a QueryableAttribute that's already mapped being
+ # assigned elsewhere in userland, turn into a synonym()
+ value = synonym(value.key)
+ cls.__mapper__.add_property(
+ key,
+ clsregistry._deferred_relationship(cls, value)
+ )
else:
type.__setattr__(cls, key, value)
else:
diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py
index a669e37f4..fda1cffb5 100644
--- a/lib/sqlalchemy/ext/declarative/clsregistry.py
+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py
@@ -1,5 +1,5 @@
# ext/declarative/clsregistry.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -225,47 +225,62 @@ def _determine_container(key, value):
return _GetColumns(value)
-def _resolver(cls, prop):
- def resolve_arg(arg):
- import sqlalchemy
- from sqlalchemy.orm import foreign, remote
-
- fallback = sqlalchemy.__dict__.copy()
- fallback.update({'foreign': foreign, 'remote': remote})
-
- def access_cls(key):
- if key in cls._decl_class_registry:
- return _determine_container(key, cls._decl_class_registry[key])
- elif key in cls.metadata.tables:
- return cls.metadata.tables[key]
- elif key in cls.metadata._schemas:
- return _GetTable(key, cls.metadata)
- elif '_sa_module_registry' in cls._decl_class_registry and \
- key in cls._decl_class_registry['_sa_module_registry']:
- registry = cls._decl_class_registry['_sa_module_registry']
- return registry.resolve_attr(key)
+class _class_resolver(object):
+ def __init__(self, cls, prop, fallback, arg):
+ self.cls = cls
+ self.prop = prop
+ self.arg = self._declarative_arg = arg
+ self.fallback = fallback
+ self._dict = util.PopulateDict(self._access_cls)
+ self._resolvers = ()
+
+ def _access_cls(self, key):
+ cls = self.cls
+ if key in cls._decl_class_registry:
+ return _determine_container(key, cls._decl_class_registry[key])
+ elif key in cls.metadata.tables:
+ return cls.metadata.tables[key]
+ elif key in cls.metadata._schemas:
+ return _GetTable(key, cls.metadata)
+ elif '_sa_module_registry' in cls._decl_class_registry and \
+ key in cls._decl_class_registry['_sa_module_registry']:
+ registry = cls._decl_class_registry['_sa_module_registry']
+ return registry.resolve_attr(key)
+ elif self._resolvers:
+ for resolv in self._resolvers:
+ value = resolv(key)
+ if value is not None:
+ return value
+
+ return self.fallback[key]
+
+ def __call__(self):
+ try:
+ x = eval(self.arg, globals(), self._dict)
+
+ if isinstance(x, _GetColumns):
+ return x.cls
else:
- return fallback[key]
+ return x
+ except NameError as n:
+ raise exc.InvalidRequestError(
+ "When initializing mapper %s, expression %r failed to "
+ "locate a name (%r). If this is a class name, consider "
+ "adding this relationship() to the %r class after "
+ "both dependent classes have been defined." %
+ (self.prop.parent, self.arg, n.args[0], self.cls)
+ )
- d = util.PopulateDict(access_cls)
- def return_cls():
- try:
- x = eval(arg, globals(), d)
+def _resolver(cls, prop):
+ import sqlalchemy
+ from sqlalchemy.orm import foreign, remote
- if isinstance(x, _GetColumns):
- return x.cls
- else:
- return x
- except NameError as n:
- raise exc.InvalidRequestError(
- "When initializing mapper %s, expression %r failed to "
- "locate a name (%r). If this is a class name, consider "
- "adding this relationship() to the %r class after "
- "both dependent classes have been defined." %
- (prop.parent, arg, n.args[0], cls)
- )
- return return_cls
+ fallback = sqlalchemy.__dict__.copy()
+ fallback.update({'foreign': foreign, 'remote': remote})
+
+ def resolve_arg(arg):
+ return _class_resolver(cls, prop, fallback, arg)
return resolve_arg
@@ -277,7 +292,7 @@ def _deferred_relationship(cls, prop):
for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
'secondary', '_user_defined_foreign_keys', 'remote_side'):
v = getattr(prop, attr)
- if isinstance(v, str):
+ if isinstance(v, util.string_types):
setattr(prop, attr, resolve_arg(v))
if prop.backref and isinstance(prop.backref, tuple):
diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py
index 95e264c3b..8b3f968dc 100644
--- a/lib/sqlalchemy/ext/horizontal_shard.py
+++ b/lib/sqlalchemy/ext/horizontal_shard.py
@@ -1,5 +1,5 @@
# ext/horizontal_shard.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py
index 59e5a74cb..576e0bd4e 100644
--- a/lib/sqlalchemy/ext/hybrid.py
+++ b/lib/sqlalchemy/ext/hybrid.py
@@ -1,5 +1,5 @@
# ext/hybrid.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -229,7 +229,7 @@ mapping which relates a ``User`` to a ``SavingsAccount``::
account = Account(owner=self)
else:
account = self.accounts[0]
- account.balance = balance
+ account.balance = value
@balance.expression
def balance(cls):
@@ -269,7 +269,7 @@ Correlated Subquery Relationship Hybrid
We can, of course, forego being dependent on the enclosing query's usage
of joins in favor of the correlated subquery, which can portably be packed
-into a single colunn expression. A correlated subquery is more portable, but
+into a single column expression. A correlated subquery is more portable, but
often performs more poorly at the SQL level. Using the same technique
illustrated at :ref:`mapper_column_property_sql_expressions`,
we can adjust our ``SavingsAccount`` example to aggregate the balances for
diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py
index bb44a492c..2cf36e9bd 100644
--- a/lib/sqlalchemy/ext/instrumentation.py
+++ b/lib/sqlalchemy/ext/instrumentation.py
@@ -22,7 +22,7 @@ see the example :ref:`examples_instrumentation`.
:mod:`sqlalchemy.orm.instrumentation` so that it
takes effect, including recognition of
``__sa_instrumentation_manager__`` on mapped classes, as
- well :attr:`.instrumentation_finders`
+ well :data:`.instrumentation_finders`
being used to determine class instrumentation resolution.
"""
@@ -31,7 +31,7 @@ from ..orm.instrumentation import (
ClassManager, InstrumentationFactory, _default_state_getter,
_default_dict_getter, _default_manager_getter
)
-from ..orm import attributes, collections
+from ..orm import attributes, collections, base as orm_base
from .. import util
from ..orm import exc as orm_exc
import weakref
@@ -399,9 +399,9 @@ def _install_lookups(lookups):
instance_state = lookups['instance_state']
instance_dict = lookups['instance_dict']
manager_of_class = lookups['manager_of_class']
- attributes.instance_state = \
+ orm_base.instance_state = attributes.instance_state = \
orm_instrumentation.instance_state = instance_state
- attributes.instance_dict = \
+ orm_base.instance_dict = attributes.instance_dict = \
orm_instrumentation.instance_dict = instance_dict
- attributes.manager_of_class = \
+ orm_base.manager_of_class = attributes.manager_of_class = \
orm_instrumentation.manager_of_class = manager_of_class
diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py
index d3133b1f5..82410031d 100644
--- a/lib/sqlalchemy/ext/mutable.py
+++ b/lib/sqlalchemy/ext/mutable.py
@@ -1,5 +1,5 @@
# ext/mutable.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,13 +7,9 @@
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
-The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy
-approach to in-place mutations of scalar values, established by the
-:class:`.types.MutableType` class as well as the ``mutable=True`` type flag,
-with a system that allows change events to be propagated from the value to
-the owning parent, thereby removing the need for the ORM to maintain copies
-of values as well as the very expensive requirement of scanning through all
-"mutable" values on each flush call, looking for changes.
+.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's
+ legacy approach to in-place mutations of scalar values; see
+ :ref:`07_migration_mutation_extension`.
.. _mutable_scalars:
@@ -182,7 +178,7 @@ callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
-that excludes the :meth:`~.MutableBase._parents` collection from the pickle
+that excludes the :meth:`~MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
@@ -332,7 +328,7 @@ Supporting Pickling
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
-:meth:`.MutableBase._parents` attribute which isn't picklable. If we need to
+:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
@@ -349,7 +345,7 @@ the minimal form of our ``Point`` class::
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
-:meth:`.MutableBase._parents` collection is restored to all ``Point`` objects.
+:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from ..orm.attributes import flag_modified
@@ -542,7 +538,7 @@ class Mutable(MutableBase):
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
- of the particular :meth:`.Mutable` subclass to establish a global
+ of the particular :class:`.Mutable` subclass to establish a global
association.
.. warning::
@@ -595,7 +591,7 @@ def _setup_composite_listener():
issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
- if not Mapper.dispatch.mapper_configured._contains(Mapper, _listen_for_type):
+ if not event.contains(Mapper, "mapper_configured", _listen_for_type):
event.listen(Mapper, 'mapper_configured', _listen_for_type)
_setup_composite_listener()
diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py
index 24d405e39..9310c6071 100644
--- a/lib/sqlalchemy/ext/orderinglist.py
+++ b/lib/sqlalchemy/ext/orderinglist.py
@@ -1,5 +1,5 @@
# ext/orderinglist.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py
index 8abd1fdf3..388cd4048 100644
--- a/lib/sqlalchemy/ext/serializer.py
+++ b/lib/sqlalchemy/ext/serializer.py
@@ -1,5 +1,5 @@
# ext/serializer.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -58,7 +58,7 @@ from ..orm.interfaces import MapperProperty
from ..orm.attributes import QueryableAttribute
from .. import Table, Column
from ..engine import Engine
-from ..util import pickle, byte_buffer, b64encode, b64decode
+from ..util import pickle, byte_buffer, b64encode, b64decode, text_type
import re
@@ -80,9 +80,9 @@ def Serializer(*args, **kw):
id = "mapperprop:" + b64encode(pickle.dumps(obj.parent.class_)) + \
":" + obj.key
elif isinstance(obj, Table):
- id = "table:" + str(obj)
+ id = "table:" + text_type(obj.key)
elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id = "column:" + str(obj.table) + ":" + obj.key
+ id = "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
elif isinstance(obj, Session):
id = "session:"
elif isinstance(obj, Engine):
@@ -112,7 +112,7 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
return None
def persistent_load(id):
- m = our_ids.match(str(id))
+ m = our_ids.match(text_type(id))
if not m:
return None
else:
diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py
index 74218fa91..fe9e40555 100644
--- a/lib/sqlalchemy/inspection.py
+++ b/lib/sqlalchemy/inspection.py
@@ -1,5 +1,5 @@
# sqlalchemy/inspect.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,11 +39,11 @@ def inspect(subject, raiseerr=True):
The returned value in some cases may be the
same object as the one given, such as if a
- :class:`.orm.Mapper` object is passed. In other
+ :class:`.Mapper` object is passed. In other
cases, it will be an instance of the registered
inspection type for the given object, such as
- if a :class:`.engine.Engine` is passed, an
- :class:`.engine.Inspector` object is returned.
+ if an :class:`.engine.Engine` is passed, an
+ :class:`.Inspector` object is returned.
:param subject: the subject to be inspected.
:param raiseerr: When ``True``, if the given subject
@@ -87,5 +87,6 @@ def _inspects(*types):
return decorate
-def _self_inspects(*types):
- _inspects(*types)(True)
+def _self_inspects(cls):
+ _inspects(cls)(True)
+ return cls
diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py
index 2968176a7..ed50a6456 100644
--- a/lib/sqlalchemy/interfaces.py
+++ b/lib/sqlalchemy/interfaces.py
@@ -1,5 +1,5 @@
# sqlalchemy/interfaces.py
-# Copyright (C) 2007-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2007-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2007 Jason Kirtland jek@discorporate.us
#
# This module is part of SQLAlchemy and is released under
diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py
index 1bb5581de..935761d5f 100644
--- a/lib/sqlalchemy/log.py
+++ b/lib/sqlalchemy/log.py
@@ -1,5 +1,5 @@
# sqlalchemy/log.py
-# Copyright (C) 2006-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2006-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
#
# This module is part of SQLAlchemy and is released under
@@ -38,17 +38,13 @@ def _add_default_handler(logger):
_logged_classes = set()
-def class_logger(cls, enable=False):
+def class_logger(cls):
logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
- if enable == 'debug':
- logger.setLevel(logging.DEBUG)
- elif enable == 'info':
- logger.setLevel(logging.INFO)
cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG)
cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
cls.logger = logger
_logged_classes.add(cls)
-
+ return cls
class Identified(object):
logging_name = None
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 1173d5d09..7825a70ac 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -1,5 +1,5 @@
# orm/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,11 +24,13 @@ from .mapper import (
from .interfaces import (
EXT_CONTINUE,
EXT_STOP,
- MapperExtension,
PropComparator,
+ )
+from .deprecated_interfaces import (
+ MapperExtension,
SessionExtension,
AttributeExtension,
- )
+)
from .util import (
aliased,
join,
@@ -39,14 +41,13 @@ from .util import (
with_parent,
with_polymorphic,
)
-from .properties import (
- ColumnProperty,
+from .properties import ColumnProperty
+from .relationships import RelationshipProperty
+from .descriptor_props import (
ComparableProperty,
CompositeProperty,
- RelationshipProperty,
- PropertyLoader,
SynonymProperty,
- )
+ )
from .relationships import (
foreign,
remote,
@@ -61,75 +62,10 @@ from .scoping import (
scoped_session
)
from . import mapper as mapperlib
-from . import strategies
-from .query import AliasOption, Query
-from ..sql import util as sql_util
-from .. import util as sa_util
-
-from . import interfaces
-
-# here, we can establish InstrumentationManager back
-# in sqlalchemy.orm and sqlalchemy.orm.interfaces, which
-# also re-establishes the extended instrumentation system.
-#from ..ext import instrumentation as _ext_instrumentation
-#InstrumentationManager = \
-# interfaces.InstrumentationManager = \
-# _ext_instrumentation.InstrumentationManager
-
-__all__ = (
- 'EXT_CONTINUE',
- 'EXT_STOP',
- 'MapperExtension',
- 'AttributeExtension',
- 'PropComparator',
- 'Query',
- 'Session',
- 'aliased',
- 'backref',
- 'class_mapper',
- 'clear_mappers',
- 'column_property',
- 'comparable_property',
- 'compile_mappers',
- 'configure_mappers',
- 'composite',
- 'contains_alias',
- 'contains_eager',
- 'create_session',
- 'defer',
- 'deferred',
- 'dynamic_loader',
- 'eagerload',
- 'eagerload_all',
- 'foreign',
- 'immediateload',
- 'join',
- 'joinedload',
- 'joinedload_all',
- 'lazyload',
- 'mapper',
- 'make_transient',
- 'noload',
- 'object_mapper',
- 'object_session',
- 'outerjoin',
- 'polymorphic_union',
- 'reconstructor',
- 'relationship',
- 'relation',
- 'remote',
- 'scoped_session',
- 'sessionmaker',
- 'subqueryload',
- 'subqueryload_all',
- 'synonym',
- 'undefer',
- 'undefer_group',
- 'validates',
- 'was_deleted',
- 'with_polymorphic'
- )
-
+from .query import AliasOption, Query, Bundle
+from ..util.langhelpers import public_factory
+from .. import util as _sa_util
+from . import strategies as _strategies
def create_session(bind=None, **kwargs):
"""Create a new :class:`.Session`
@@ -167,501 +103,7 @@ def create_session(bind=None, **kwargs):
kwargs.setdefault('expire_on_commit', False)
return Session(bind=bind, **kwargs)
-
-def relationship(argument, secondary=None, **kwargs):
- """Provide a relationship of a primary Mapper to a secondary Mapper.
-
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
-
- A typical :func:`.relationship`, used in a classical mapping::
-
- mapper(Parent, properties={
- 'children': relationship(Child)
- })
-
- Some arguments accepted by :func:`.relationship` optionally accept a
- callable function, which when called produces the desired value.
- The callable is invoked by the parent :class:`.Mapper` at "mapper
- initialization" time, which happens only when mappers are first used, and
- is assumed to be after all mappings have been constructed. This can be
- used to resolve order-of-declaration and other dependency issues, such as
- if ``Child`` is declared below ``Parent`` in the same file::
-
- mapper(Parent, properties={
- "children":relationship(lambda: Child,
- order_by=lambda: Child.id)
- })
-
- When using the :ref:`declarative_toplevel` extension, the Declarative
- initializer allows string arguments to be passed to :func:`.relationship`.
- These string arguments are converted into callables that evaluate
- the string as Python code, using the Declarative
- class-registry as a namespace. This allows the lookup of related
- classes to be automatic via their string name, and removes the need to
- import related classes at all into the local module space::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
-
- class Parent(Base):
- __tablename__ = 'parent'
- id = Column(Integer, primary_key=True)
- children = relationship("Child", order_by="Child.id")
-
- A full array of examples and reference documentation regarding
- :func:`.relationship` is at :ref:`relationship_config_toplevel`.
-
- :param argument:
- a mapped class, or actual :class:`.Mapper` instance, representing the
- target of the relationship.
-
- ``argument`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param secondary:
- for a many-to-many relationship, specifies the intermediary
- table, and is an instance of :class:`.Table`. The ``secondary`` keyword
- argument should generally only
- be used for a table that is not otherwise expressed in any class
- mapping, unless this relationship is declared as view only, otherwise
- conflicting persistence operations can occur.
-
- ``secondary`` may
- also be passed as a callable function which is evaluated at
- mapper initialization time.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- many-to-one reference should be loaded when replaced, if
- not already loaded. Normally, history tracking logic for
- simple many-to-ones only needs to be aware of the "new"
- value in order to perform a flush. This flag is available
- for applications that make use of
- :func:`.attributes.get_history` which also need to know
- the "previous" value of the attribute.
-
- :param backref:
- indicates the string name of a property to be placed on the related
- mapper's class that will handle this relationship in the other
- direction. The other property will be created automatically
- when the mappers are configured. Can also be passed as a
- :func:`backref` object to control the configuration of the
- new relationship.
-
- :param back_populates:
- Takes a string name and has the same meaning as ``backref``,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate ``back_populates``
- to this relationship to ensure proper functioning.
-
- :param cascade:
- a comma-separated list of cascade rules which determines how
- Session operations should be "cascaded" from parent to child.
- This defaults to ``False``, which means the default cascade
- should be used. The default value is ``"save-update, merge"``.
-
- Available cascades are:
-
- * ``save-update`` - cascade the :meth:`.Session.add`
- operation. This cascade applies both to future and
- past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
- meaning new items added to a collection or scalar relationship
- get placed into the same session as that of the parent, and
- also applies to items which have been removed from this
- relationship but are still part of unflushed history.
-
- * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
- operation
-
- * ``expunge`` - cascade the :meth:`.Session.expunge`
- operation
-
- * ``delete`` - cascade the :meth:`.Session.delete`
- operation
-
- * ``delete-orphan`` - if an item of the child's type is
- detached from its parent, mark it for deletion.
-
- .. versionchanged:: 0.7
- This option does not prevent
- a new instance of the child object from being persisted
- without a parent to start with; to constrain against
- that case, ensure the child's foreign key column(s)
- is configured as NOT NULL
-
- * ``refresh-expire`` - cascade the :meth:`.Session.expire`
- and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
-
- * ``all`` - shorthand for "save-update,merge, refresh-expire,
- expunge, delete"
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param cascade_backrefs=True:
- a boolean value indicating if the ``save-update`` cascade should
- operate along an assignment event intercepted by a backref.
- When set to ``False``,
- the attribute managed by this relationship will not cascade
- an incoming transient object into the session of a
- persistent parent, if the event is received via backref.
-
- That is::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B, backref="a", cascade_backrefs=False)
- })
-
- If an ``A()`` is present in the session, assigning it to
- the "a" attribute on a transient ``B()`` will not place
- the ``B()`` into the session. To set the flag in the other
- direction, i.e. so that ``A().bs.append(B())`` won't add
- a transient ``A()`` into the session for a persistent ``B()``::
-
- mapper(A, a_table, properties={
- 'bs':relationship(B,
- backref=backref("a", cascade_backrefs=False)
- )
- })
-
- See the section :ref:`unitofwork_cascades` for more background
- on configuring cascades.
-
- :param collection_class:
- a class or callable that returns a new list-holding object. will
- be used in place of a plain list for storing elements.
- Behavior of this attribute is described in detail at
- :ref:`custom_collections`.
-
- :param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
-
- :param doc:
- docstring which will be applied to the resulting descriptor.
-
- :param extension:
- an :class:`.AttributeExtension` instance, or list of extensions,
- which will be prepended to the list of attribute listeners for
- the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- :param foreign_keys:
- a list of columns which are to be used as "foreign key" columns,
- or columns which refer to the value in a remote column, within the
- context of this :func:`.relationship` object's ``primaryjoin``
- condition. That is, if the ``primaryjoin`` condition of this
- :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
- are required to be present in ``a.id``, then the "foreign key" column
- of this :func:`.relationship` is ``b.a_id``.
-
- In normal cases, the ``foreign_keys`` parameter is **not required.**
- :func:`.relationship` will **automatically** determine which columns
- in the ``primaryjoin`` conditition are to be considered "foreign key"
- columns based on those :class:`.Column` objects that specify
- :class:`.ForeignKey`, or are otherwise listed as referencing columns
- in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
- needed when:
-
- 1. There is more than one way to construct a join from the local
- table to the remote table, as there are multiple foreign key
- references present. Setting ``foreign_keys`` will limit the
- :func:`.relationship` to consider just those columns specified
- here as "foreign".
-
- .. versionchanged:: 0.8
- A multiple-foreign key join ambiguity can be resolved by
- setting the ``foreign_keys`` parameter alone, without the
- need to explicitly set ``primaryjoin`` as well.
-
- 2. The :class:`.Table` being mapped does not actually have
- :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
- constructs present, often because the table
- was reflected from a database that does not support foreign key
- reflection (MySQL MyISAM).
-
- 3. The ``primaryjoin`` argument is used to construct a non-standard
- join condition, which makes use of columns or expressions that do
- not normally refer to their "parent" column, such as a join condition
- expressed by a complex comparison using a SQL function.
-
- The :func:`.relationship` construct will raise informative error messages
- that suggest the use of the ``foreign_keys`` parameter when presented
- with an ambiguous condition. In typical cases, if :func:`.relationship`
- doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
- not needed.
-
- ``foreign_keys`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. seealso::
-
- :ref:`relationship_foreign_keys`
-
- :ref:`relationship_custom_foreign`
-
- :func:`.foreign` - allows direct annotation of the "foreign" columns
- within a ``primaryjoin`` condition.
-
- .. versionadded:: 0.8
- The :func:`.foreign` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "foreign".
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param innerjoin=False:
- when ``True``, joined eager loads will use an inner join to join
- against related tables instead of an outer join. The purpose
- of this option is generally one of performance, as inner joins
- generally perform better than outer joins. Another reason can be
- the use of ``with_lockmode``, which does not support outer joins.
-
- This flag can be set to ``True`` when the relationship references an
- object via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is guaranteed
- to have one or at least one entry.
-
- :param join_depth:
- when non-``None``, an integer value indicating how many levels
- deep "eager" loaders should join on a self-referring or cyclical
- relationship. The number counts how many times the same Mapper
- shall be present in the loading condition along a particular join
- branch. When left at its default of ``None``, eager loaders
- will stop chaining when they encounter a the same target mapper
- which is already higher up in the chain. This option applies
- both to joined- and subquery- eager loaders.
-
- :param lazy='select': specifies
- how the related items should be loaded. Default value is
- ``select``. Values include:
-
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
- fetch for simple many-to-one references.
-
- * ``immediate`` - items should be loaded as the parents are loaded,
- using a separate SELECT statement, or identity map fetch for
- simple many-to-one references.
-
- .. versionadded:: 0.6.5
-
- * ``joined`` - items should be loaded "eagerly" in the same query as
- that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
- the join is "outer" or not is determined by the ``innerjoin``
- parameter.
-
- * ``subquery`` - items should be loaded "eagerly" as the parents are
- loaded, using one additional SQL statement, which issues a JOIN to a
- subquery of the original statement, for each collection requested.
-
- * ``noload`` - no loading should occur at any time. This is to
- support "write-only" attributes, or attributes which are
- populated in some manner specific to the application.
-
- * ``dynamic`` - the attribute will return a pre-configured
- :class:`~sqlalchemy.orm.query.Query` object for all read
- operations, onto which further filtering operations can be
- applied before iterating the results. See
- the section :ref:`dynamic_relationship` for more details.
-
- * True - a synonym for 'select'
-
- * False - a synonym for 'joined'
-
- * None - a synonym for 'noload'
-
- Detailed discussion of loader strategies is at :doc:`/orm/loading`.
-
- :param load_on_pending=False:
- Indicates loading behavior for transient or pending parent objects.
-
- .. versionchanged:: 0.8
- load_on_pending is superseded by
- :meth:`.Session.enable_relationship_loading`.
-
- When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
- "attached" to a :class:`.Session` but is not part of its pending
- collection.
-
- The load_on_pending flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before flush() proceeds. This flag
- is not not intended for general use.
-
- .. versionadded:: 0.6.5
-
- :param order_by:
- indicates the ordering that should be applied when loading these
- items. ``order_by`` is expected to refer to one of the :class:`.Column`
- objects to which the target class is mapped, or
- the attribute itself bound to the target class which refers
- to the column.
-
- ``order_by`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param passive_deletes=False:
- Indicates loading behavior during delete operations.
-
- A value of True indicates that unloaded child items should not
- be loaded during a delete operation on the parent. Normally,
- when a parent item is deleted, all child items are loaded so
- that they can either be marked as deleted, or have their
- foreign key to the parent set to NULL. Marking this flag as
- True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
- place which will handle updating/deleting child rows on the
- database side.
-
- Additionally, setting the flag to the string value 'all' will
- disable the "nulling out" of the child foreign keys, when there
- is no delete or delete-orphan cascade enabled. This is
- typically used when a triggering or error raise scenario is in
- place on the database side. Note that the foreign key
- attributes on in-session child objects will not be changed
- after a flush occurs so this is a very special use-case
- setting.
-
- :param passive_updates=True:
- Indicates loading and INSERT/UPDATE/DELETE behavior when the
- source of a foreign key value changes (i.e. an "on update"
- cascade), which are typically the primary key columns of the
- source row.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will
- handle propagation of an UPDATE from a source column to
- dependent rows. Note that with databases which enforce
- referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
- ON UPDATE CASCADE is required for this operation. The
- relationship() will update the value of the attribute on related
- items which are locally present in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE
- (i.e. SQLite, MySQL MyISAM tables).
-
- Also see the passive_updates flag on ``mapper()``.
-
- A future SQLAlchemy release will provide a "detect" feature for
- this flag.
-
- :param post_update:
- this indicates that the relationship should be handled by a
- second UPDATE statement after an INSERT or before a
- DELETE. Currently, it also will issue an UPDATE after the
- instance was UPDATEd as well, although this technically should
- be improved. This flag is used to handle saving bi-directional
- dependencies between two individual rows (i.e. each row
- references the other), where it would otherwise be impossible to
- INSERT or DELETE both rows fully since one row exists before the
- other. Use this flag when a particular mapping arrangement will
- incur two rows that are dependent on each other, such as a table
- that has a one-to-many relationship to a set of child rows, and
- also has a column that references a single child row within that
- list (i.e. both tables contain a foreign key to each other). If
- a ``flush()`` operation returns an error that a "cyclical
- dependency" was detected, this is a cue that you might want to
- use ``post_update`` to "break" the cycle.
-
- :param primaryjoin:
- a SQL expression that will be used as the primary
- join of this child object against the parent object, or in a
- many-to-many relationship the join of the primary object to the
- association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
-
- ``primaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param remote_side:
- used for self-referential relationships, indicates the column or
- list of columns that form the "remote side" of the relationship.
-
- ``remote_side`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- .. versionchanged:: 0.8
- The :func:`.remote` annotation can also be applied
- directly to the ``primaryjoin`` expression, which is an alternate,
- more specific system of describing which columns in a particular
- ``primaryjoin`` should be considered "remote".
-
- :param query_class:
- a :class:`.Query` subclass that will be used as the base of the
- "appender query" returned by a "dynamic" relationship, that
- is, a relationship that specifies ``lazy="dynamic"`` or was
- otherwise constructed using the :func:`.orm.dynamic_loader`
- function.
-
- :param secondaryjoin:
- a SQL expression that will be used as the join of
- an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
-
- ``secondaryjoin`` may also be passed as a callable function
- which is evaluated at mapper initialization time, and may be passed as a
- Python-evaluable string when using Declarative.
-
- :param single_parent=(True|False):
- when True, installs a validator which will prevent objects
- from being associated with more than one parent at a time.
- This is used for many-to-one or many-to-many relationships that
- should be treated either as one-to-one or one-to-many. Its
- usage is optional unless delete-orphan cascade is also
- set on this relationship(), in which case its required.
-
- :param uselist=(True|False):
- a boolean that indicates if this property should be loaded as a
- list or a scalar. In most cases, this value is determined
- automatically by ``relationship()``, based on the type and direction
- of the relationship - one to many forms a list, many to one
- forms a scalar, many to many is a list. If a scalar is desired
- where normally a list would be present, such as a bi-directional
- one-to-one relationship, set uselist to False.
-
- :param viewonly=False:
- when set to True, the relationship is used only for loading objects
- within the relationship, and has no effect on the unit-of-work
- flush process. Relationships with viewonly can specify any kind of
- join conditions to provide additional views of related objects
- onto a parent object. Note that the functionality of a viewonly
- relationship has its limits - complicated join conditions may
- not compile into eager or lazy loaders properly. If this is the
- case, use an alternative method.
-
- .. versionchanged:: 0.6
- :func:`relationship` was renamed from its previous name
- :func:`relation`.
-
- """
- return RelationshipProperty(argument, secondary=secondary, **kwargs)
-
+relationship = public_factory(RelationshipProperty, ".orm.relationship")
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
@@ -689,138 +131,8 @@ def dynamic_loader(argument, **kw):
return relationship(argument, **kw)
-def column_property(*cols, **kw):
- """Provide a column-level property for use with a Mapper.
-
- Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
-
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
-
- :param \*cols:
- list of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. Normally, history tracking logic for
- simple non-primary-key scalar values only needs to be
- aware of the "new" value in order to perform a flush. This
- flag is available for applications that make use of
- :func:`.attributes.get_history` or :meth:`.Session.is_modified`
- which also need to know
- the "previous" value of the attribute.
-
- .. versionadded:: 0.6.6
-
- :param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param group:
- a group name for this property when marked as deferred.
-
- :param deferred:
- when True, the column property is "deferred", meaning that
- it does not load immediately, and is instead loaded when the
- attribute is first accessed on an instance. See also
- :func:`~sqlalchemy.orm.deferred`.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param expire_on_flush=True:
- Disable expiry on flush. A column_property() which refers
- to a SQL expression (and not a single table-bound column)
- is considered to be a "read only" property; populating it
- has no effect on the state of data, and it can only return
- database state. For this reason a column_property()'s value
- is expired whenever the parent object is involved in a
- flush, that is, has any kind of "dirty" state within a flush.
- Setting this parameter to ``False`` will have the effect of
- leaving any existing value present after the flush proceeds.
- Note however that the :class:`.Session` with default expiration
- settings still expires
- all attributes after a :meth:`.Session.commit` call, however.
-
- .. versionadded:: 0.7.3
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an
- :class:`.AttributeExtension`
- instance, or list of extensions, which will be prepended
- to the list of attribute listeners for the resulting
- descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
-
- return ColumnProperty(*cols, **kw)
-
-
-def composite(class_, *cols, **kwargs):
- """Return a composite column-based property for use with a Mapper.
-
- See the mapping documentation section :ref:`mapper_composite` for a full
- usage example.
-
- The :class:`.MapperProperty` returned by :func:`.composite`
- is the :class:`.CompositeProperty`.
-
- :param class\_:
- The "composite type" class.
-
- :param \*cols:
- List of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. See the same flag on :func:`.column_property`.
-
- .. versionchanged:: 0.7
- This flag specifically becomes meaningful
- - previously it was a placeholder.
-
- :param group:
- A group name for this property when marked as deferred.
-
- :param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
-
- :param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.MapperProperty.info` attribute of this object.
-
- .. versionadded:: 0.8
-
- :param extension:
- an :class:`.AttributeExtension` instance,
- or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
- return CompositeProperty(class_, *cols, **kwargs)
+column_property = public_factory(ColumnProperty, ".orm.column_property")
+composite = public_factory(CompositeProperty, ".orm.composite")
def backref(name, **kwargs):
@@ -836,488 +148,33 @@ def backref(name, **kwargs):
return (name, kwargs)
-def deferred(*columns, **kwargs):
- """Return a :class:`.DeferredColumnProperty`, which indicates this
- object attributes should only be loaded from its corresponding
- table column when first accessed.
-
- Used with the "properties" dictionary sent to :func:`mapper`.
-
- See also:
-
- :ref:`deferred`
-
- """
- return ColumnProperty(deferred=True, *columns, **kwargs)
-
-
-def mapper(class_, local_table=None, *args, **params):
- """Return a new :class:`~.Mapper` object.
-
- This function is typically used behind the scenes
- via the Declarative extension. When using Declarative,
- many of the usual :func:`.mapper` arguments are handled
- by the Declarative extension itself, including ``class_``,
- ``local_table``, ``properties``, and ``inherits``.
- Other options are passed to :func:`.mapper` using
- the ``__mapper_args__`` class variable::
+def deferred(*columns, **kw):
+ """Indicate a column-based mapped attribute that by default will
+ not load unless accessed.
- class MyClass(Base):
- __tablename__ = 'my_table'
- id = Column(Integer, primary_key=True)
- type = Column(String(50))
- alt = Column("some_alt", Integer)
+ :param \*columns: columns to be mapped. This is typically a single
+ :class:`.Column` object, however a collection is supported in order
+ to support multiple columns mapped under the same attribute.
- __mapper_args__ = {
- 'polymorphic_on' : type
- }
+ :param \**kw: additional keyword arguments passed to :class:`.ColumnProperty`.
+ .. seealso::
- Explicit use of :func:`.mapper`
- is often referred to as *classical mapping*. The above
- declarative example is equivalent in classical form to::
-
- my_table = Table("my_table", metadata,
- Column('id', Integer, primary_key=True),
- Column('type', String(50)),
- Column("some_alt", Integer)
- )
-
- class MyClass(object):
- pass
-
- mapper(MyClass, my_table,
- polymorphic_on=my_table.c.type,
- properties={
- 'alt':my_table.c.some_alt
- })
-
- See also:
-
- :ref:`classical_mapping` - discussion of direct usage of
- :func:`.mapper`
-
- :param class\_: The class to be mapped. When using Declarative,
- this argument is automatically passed as the declared class
- itself.
-
- :param local_table: The :class:`.Table` or other selectable
- to which the class is mapped. May be ``None`` if
- this mapper inherits from another mapper using single-table
- inheritance. When using Declarative, this argument is
- automatically passed by the extension, based on what
- is configured via the ``__table__`` argument or via the
- :class:`.Table` produced as a result of the ``__tablename__``
- and :class:`.Column` arguments present.
-
- :param always_refresh: If True, all query operations for this mapped
- class will overwrite all data within object instances that already
- exist within the session, erasing any in-memory changes with
- whatever information was loaded from the database. Usage of this
- flag is highly discouraged; as an alternative, see the method
- :meth:`.Query.populate_existing`.
-
- :param allow_partial_pks: Defaults to True. Indicates that a
- composite primary key with some NULL values should be considered as
- possibly existing within the database. This affects whether a
- mapper will assign an incoming row to an existing identity, as well
- as if :meth:`.Session.merge` will check the database first for a
- particular primary key value. A "partial primary key" can occur if
- one has mapped to an OUTER JOIN, for example.
-
- :param batch: Defaults to ``True``, indicating that save operations
- of multiple entities can be batched together for efficiency.
- Setting to False indicates
- that an instance will be fully saved before saving the next
- instance. This is used in the extremely rare case that a
- :class:`.MapperEvents` listener requires being called
- in between individual row persistence operations.
-
- :param column_prefix: A string which will be prepended
- to the mapped attribute name when :class:`.Column`
- objects are automatically assigned as attributes to the
- mapped class. Does not affect explicitly specified
- column-based properties.
-
- See the section :ref:`column_prefix` for an example.
-
- :param concrete: If True, indicates this mapper should use concrete
- table inheritance with its parent mapper.
-
- See the section :ref:`concrete_inheritance` for an example.
-
- :param exclude_properties: A list or set of string column names to
- be excluded from mapping.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param extension: A :class:`.MapperExtension` instance or
- list of :class:`.MapperExtension` instances which will be applied
- to all operations by this :class:`.Mapper`. **Deprecated.**
- Please see :class:`.MapperEvents`.
-
- :param include_properties: An inclusive list or set of string column
- names to map.
-
- See :ref:`include_exclude_cols` for an example.
-
- :param inherits: A mapped class or the corresponding :class:`.Mapper`
- of one indicating a superclass to which this :class:`.Mapper`
- should *inherit* from. The mapped class here must be a subclass
- of the other mapper's class. When using Declarative, this argument
- is passed automatically as a result of the natural class
- hierarchy of the declared classes.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param inherit_condition: For joined table inheritance, a SQL
- expression which will
- define how the two tables are joined; defaults to a natural join
- between the two tables.
-
- :param inherit_foreign_keys: When ``inherit_condition`` is used and the
- columns present are missing a :class:`.ForeignKey` configuration,
- this parameter can be used to specify which columns are "foreign".
- In most cases can be left as ``None``.
-
- :param legacy_is_orphan: Boolean, defaults to ``False``.
- When ``True``, specifies that "legacy" orphan consideration
- is to be applied to objects mapped by this mapper, which means
- that a pending (that is, not persistent) object is auto-expunged
- from an owning :class:`.Session` only when it is de-associated
- from *all* parents that specify a ``delete-orphan`` cascade towards
- this mapper. The new default behavior is that the object is auto-expunged
- when it is de-associated with *any* of its parents that specify
- ``delete-orphan`` cascade. This behavior is more consistent with
- that of a persistent object, and allows behavior to be consistent
- in more scenarios independently of whether or not an orphanable
- object has been flushed yet or not.
-
- See the change note and example at :ref:`legacy_is_orphan_addition`
- for more detail on this change.
-
- .. versionadded:: 0.8 - the consideration of a pending object as
- an "orphan" has been modified to more closely match the
- behavior as that of persistent objects, which is that the object
- is expunged from the :class:`.Session` as soon as it is
- de-associated from any of its orphan-enabled parents. Previously,
- the pending object would be expunged only if de-associated
- from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
- is added to :func:`.orm.mapper` which re-establishes the
- legacy behavior.
-
- :param non_primary: Specify that this :class:`.Mapper` is in addition
- to the "primary" mapper, that is, the one used for persistence.
- The :class:`.Mapper` created here may be used for ad-hoc
- mapping of the class to an alternate selectable, for loading
- only.
-
- The ``non_primary`` feature is rarely needed with modern
- usage.
-
- :param order_by: A single :class:`.Column` or list of :class:`.Column`
- objects for which selection operations should use as the default
- ordering for entities. By default mappers have no pre-defined
- ordering.
-
- :param passive_updates: Indicates UPDATE behavior of foreign key
- columns when a primary key column changes on a joined-table
- inheritance mapping. Defaults to ``True``.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will handle
- propagation of an UPDATE from a source column to dependent columns
- on joined-table rows.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The :class:`.Mapper` here will
- emit an UPDATE statement for the dependent columns during a
- primary key change.
-
- See also:
-
- :ref:`passive_updates` - description of a similar feature as
- used with :func:`.relationship`
-
- :param polymorphic_on: Specifies the column, attribute, or
- SQL expression used to determine the target class for an
- incoming row, when inheriting classes are present.
-
- This value is commonly a :class:`.Column` object that's
- present in the mapped :class:`.Table`::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":discriminator,
- "polymorphic_identity":"employee"
- }
-
- It may also be specified
- as a SQL expression, as in this example where we
- use the :func:`.case` construct to provide a conditional
- approach::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
-
- __mapper_args__ = {
- "polymorphic_on":case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee"),
- "polymorphic_identity":"employee"
- }
-
- It may also refer to any attribute
- configured with :func:`.column_property`, or to the
- string name of one::
-
- class Employee(Base):
- __tablename__ = 'employee'
-
- id = Column(Integer, primary_key=True)
- discriminator = Column(String(50))
- employee_type = column_property(
- case([
- (discriminator == "EN", "engineer"),
- (discriminator == "MA", "manager"),
- ], else_="employee")
- )
-
- __mapper_args__ = {
- "polymorphic_on":employee_type,
- "polymorphic_identity":"employee"
- }
-
- .. versionchanged:: 0.7.4
- ``polymorphic_on`` may be specified as a SQL expression,
- or refer to any attribute configured with
- :func:`.column_property`, or to the string name of one.
-
- When setting ``polymorphic_on`` to reference an
- attribute or expression that's not present in the
- locally mapped :class:`.Table`, yet the value
- of the discriminator should be persisted to the database,
- the value of the
- discriminator is not automatically set on new
- instances; this must be handled by the user,
- either through manual means or via event listeners.
- A typical approach to establishing such a listener
- looks like::
-
- from sqlalchemy import event
- from sqlalchemy.orm import object_mapper
-
- @event.listens_for(Employee, "init", propagate=True)
- def set_identity(instance, *arg, **kw):
- mapper = object_mapper(instance)
- instance.discriminator = mapper.polymorphic_identity
-
- Where above, we assign the value of ``polymorphic_identity``
- for the mapped class to the ``discriminator`` attribute,
- thus persisting the value to the ``discriminator`` column
- in the database.
-
- See also:
-
- :ref:`inheritance_toplevel`
-
- :param polymorphic_identity: Specifies the value which
- identifies this particular class as returned by the
- column expression referred to by the ``polymorphic_on``
- setting. As rows are received, the value corresponding
- to the ``polymorphic_on`` column expression is compared
- to this value, indicating which subclass should
- be used for the newly reconstructed object.
-
- :param properties: A dictionary mapping the string names of object
- attributes to :class:`.MapperProperty` instances, which define the
- persistence behavior of that attribute. Note that :class:`.Column`
- objects present in
- the mapped :class:`.Table` are automatically placed into
- ``ColumnProperty`` instances upon mapping, unless overridden.
- When using Declarative, this argument is passed automatically,
- based on all those :class:`.MapperProperty` instances declared
- in the declared class body.
-
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
- This is normally simply the primary key of the ``local_table``, but
- can be overridden here.
-
- :param version_id_col: A :class:`.Column`
- that will be used to keep a running version id of mapped entities
- in the database. This is used during save operations to ensure that
- no other thread or process has updated the instance during the
- lifetime of the entity, else a
- :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
- thrown. By default the column must be of :class:`.Integer` type,
- unless ``version_id_generator`` specifies a new generation
- algorithm.
-
- :param version_id_generator: A callable which defines the algorithm
- used to generate new version ids. Defaults to an integer
- generator. Can be replaced with one that generates timestamps,
- uuids, etc. e.g.::
-
- import uuid
-
- class MyClass(Base):
- __tablename__ = 'mytable'
- id = Column(Integer, primary_key=True)
- version_uuid = Column(String(32))
-
- __mapper_args__ = {
- 'version_id_col':version_uuid,
- 'version_id_generator':lambda version:uuid.uuid4().hex
- }
-
- The callable receives the current version identifier as its
- single argument.
-
- :param with_polymorphic: A tuple in the form ``(<classes>,
- <selectable>)`` indicating the default style of "polymorphic"
- loading, that is, which tables are queried at once. <classes> is
- any single or list of mappers and/or classes indicating the
- inherited classes that should be loaded at once. The special value
- ``'*'`` may be used to indicate all descending classes should be
- loaded immediately. The second tuple argument <selectable>
- indicates a selectable that will be used to query for multiple
- classes.
-
- See also:
-
- :ref:`concrete_inheritance` - typically uses ``with_polymorphic``
- to specify a UNION statement to select from.
-
- :ref:`with_polymorphic` - usage example of the related
- :meth:`.Query.with_polymorphic` method
+ :ref:`deferred`
"""
- return Mapper(class_, local_table, *args, **params)
-
+ return ColumnProperty(deferred=True, *columns, **kw)
-def synonym(name, map_column=False, descriptor=None,
- comparator_factory=None, doc=None):
- """Denote an attribute name as a synonym to a mapped property.
- .. versionchanged:: 0.7
- :func:`.synonym` is superseded by the :mod:`~sqlalchemy.ext.hybrid`
- extension. See the documentation for hybrids
- at :ref:`hybrids_toplevel`.
+mapper = public_factory(Mapper, ".orm.mapper")
- Used with the ``properties`` dictionary sent to
- :func:`~sqlalchemy.orm.mapper`::
+synonym = public_factory(SynonymProperty, ".orm.synonym")
- class MyClass(object):
- def _get_status(self):
- return self._status
- def _set_status(self, value):
- self._status = value
- status = property(_get_status, _set_status)
-
- mapper(MyClass, sometable, properties={
- "status":synonym("_status", map_column=True)
- })
-
- Above, the ``status`` attribute of MyClass will produce
- expression behavior against the table column named ``status``,
- using the Python attribute ``_status`` on the mapped class
- to represent the underlying value.
-
- :param name: the name of the existing mapped property, which can be
- any other ``MapperProperty`` including column-based properties and
- relationships.
-
- :param map_column: if ``True``, an additional ``ColumnProperty`` is created
- on the mapper automatically, using the synonym's name as the keyname of
- the property, and the keyname of this ``synonym()`` as the name of the
- column to map.
-
- """
- return SynonymProperty(name, map_column=map_column,
- descriptor=descriptor,
- comparator_factory=comparator_factory,
- doc=doc)
+comparable_property = public_factory(ComparableProperty,
+ ".orm.comparable_property")
-def comparable_property(comparator_factory, descriptor=None):
- """Provides a method of applying a :class:`.PropComparator`
- to any Python descriptor attribute.
-
- .. versionchanged:: 0.7
- :func:`.comparable_property` is superseded by
- the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
- at :ref:`hybrid_custom_comparators`.
-
- Allows any Python descriptor to behave like a SQL-enabled
- attribute when used at the class level in queries, allowing
- redefinition of expression operator behavior.
-
- In the example below we redefine :meth:`.PropComparator.operate`
- to wrap both sides of an expression in ``func.lower()`` to produce
- case-insensitive comparison::
-
- from sqlalchemy.orm import comparable_property
- from sqlalchemy.orm.interfaces import PropComparator
- from sqlalchemy.sql import func
- from sqlalchemy import Integer, String, Column
- from sqlalchemy.ext.declarative import declarative_base
-
- class CaseInsensitiveComparator(PropComparator):
- def __clause_element__(self):
- return self.prop
-
- def operate(self, op, other):
- return op(
- func.lower(self.__clause_element__()),
- func.lower(other)
- )
-
- Base = declarative_base()
-
- class SearchWord(Base):
- __tablename__ = 'search_word'
- id = Column(Integer, primary_key=True)
- word = Column(String)
- word_insensitive = comparable_property(lambda prop, mapper:
- CaseInsensitiveComparator(mapper.c.word, mapper)
- )
-
-
- A mapping like the above allows the ``word_insensitive`` attribute
- to render an expression like::
-
- >>> print SearchWord.word_insensitive == "Trucks"
- lower(search_word.word) = lower(:lower_1)
-
- :param comparator_factory:
- A PropComparator subclass or factory that defines operator behavior
- for this property.
-
- :param descriptor:
- Optional when used in a ``properties={}`` declaration. The Python
- descriptor or property to layer comparison behavior on top of.
-
- The like-named descriptor will be automatically retrieved from the
- mapped class if left blank in a ``properties`` declaration.
-
- """
- return ComparableProperty(comparator_factory, descriptor)
-
-
-@sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
+@_sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
"is renamed to :func:`.configure_mappers`")
def compile_mappers():
"""Initialize the inter-mapper relationships of all mappers that have
@@ -1359,107 +216,24 @@ def clear_mappers():
finally:
mapperlib._CONFIGURE_MUTEX.release()
-
-def joinedload(*keys, **kw):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload` will remain available for the foreseeable
- future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- examples::
-
- # joined-load the "orders" collection on "User"
- query(User).options(joinedload(User.orders))
-
- # joined-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(joinedload(Order.items, Item.keywords))
-
- # to joined-load across both, use joinedload_all()
- query(Order).options(joinedload_all(Order.items, Item.keywords))
-
- # set the default strategy to be 'joined'
- query(Order).options(joinedload('*'))
-
- :func:`joinedload` also accepts a keyword argument `innerjoin=True` which
- indicates using an inner join instead of an outer::
-
- query(Order).options(joinedload(Order.user, innerjoin=True))
-
- .. note::
-
- The join created by :func:`joinedload` is anonymously aliased such that
- it **does not affect the query results**. An :meth:`.Query.order_by`
- or :meth:`.Query.filter` call **cannot** reference these aliased
- tables - so-called "user space" joins are constructed using
- :meth:`.Query.join`. The rationale for this is that
- :func:`joinedload` is only applied in order to affect how related
- objects or collections are loaded as an optimizing detail - it can be
- added or removed with no impact on actual results. See the section
- :ref:`zen_of_eager_loading` for a detailed description of how this is
- used, including how to use a single explicit JOIN for
- filtering/ordering and eager loading simultaneously.
-
- See also: :func:`subqueryload`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined'),
- strategies.EagerJoinOption(keys, innerjoin)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined')
-
-
-def joinedload_all(*keys, **kw):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into an joined eager load.
-
- .. versionchanged:: 0.6beta3
- This function is known as :func:`eagerload_all` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload_all` will remain available for the
- foreseeable future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(joinedload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one joined eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(joinedload_all(User.orders, Order.items, Item.keywords))
-
- The keyword arguments accept a flag `innerjoin=True|False` which will
- override the value of the `innerjoin` flag specified on the
- relationship().
-
- See also: :func:`subqueryload_all`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined', chained=True),
- strategies.EagerJoinOption(keys, innerjoin, chained=True)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined', chained=True)
-
+from . import strategy_options
+
+joinedload = strategy_options.joinedload._unbound_fn
+joinedload_all = strategy_options.joinedload._unbound_all_fn
+contains_eager = strategy_options.contains_eager._unbound_fn
+defer = strategy_options.defer._unbound_fn
+undefer = strategy_options.undefer._unbound_fn
+undefer_group = strategy_options.undefer_group._unbound_fn
+load_only = strategy_options.load_only._unbound_fn
+lazyload = strategy_options.lazyload._unbound_fn
+lazyload_all = strategy_options.lazyload_all._unbound_all_fn
+subqueryload = strategy_options.subqueryload._unbound_fn
+subqueryload_all = strategy_options.subqueryload_all._unbound_all_fn
+immediateload = strategy_options.immediateload._unbound_fn
+noload = strategy_options.noload._unbound_fn
+defaultload = strategy_options.defaultload._unbound_fn
+
+from .strategy_options import Load
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
@@ -1471,316 +245,23 @@ def eagerload_all(*args, **kwargs):
return joinedload_all(*args, **kwargs)
-def subqueryload(*keys):
- """Return a ``MapperOption`` that will convert the property
- of the given name or series of mapped attributes
- into an subquery eager load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
- examples::
- # subquery-load the "orders" collection on "User"
- query(User).options(subqueryload(User.orders))
+contains_alias = public_factory(AliasOption, ".orm.contains_alias")
- # subquery-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(subqueryload(Order.items, Item.keywords))
- # to subquery-load across both, use subqueryload_all()
- query(Order).options(subqueryload_all(Order.items, Item.keywords))
- # set the default strategy to be 'subquery'
- query(Order).options(subqueryload('*'))
-
- See also: :func:`joinedload`, :func:`lazyload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery")
+def __go(lcls):
+ global __all__
+ from .. import util as sa_util
+ from . import dynamic
+ from . import events
+ import inspect as _inspect
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
-def subqueryload_all(*keys):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into a subquery eager load.
+ _sa_util.dependencies.resolve_all("sqlalchemy.orm")
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(subqueryload_all('orders.items.keywords'))...
-
- will set all of ``orders``, ``orders.items``, and
- ``orders.items.keywords`` to load in one subquery eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(subqueryload_all(User.orders, Order.items,
- Item.keywords))
-
- See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
-
-
-def lazyload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True)
-
-
-def lazyload_all(*keys):
- """Return a ``MapperOption`` that will convert all the properties
- along the given dot-separated path or series of mapped attributes
- into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True, chained=True)
-
-
-def noload(*keys):
- """Return a ``MapperOption`` that will convert the property of the
- given name or series of mapped attributes into a non-load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`,
- :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=None)
-
-
-def immediateload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an immediate load.
-
- The "immediate" load means the attribute will be fetched
- with a separate SELECT statement per parent in the
- same way as lazy loading - except the loader is guaranteed
- to be called at load time before the parent object
- is returned in the result.
-
- The normal behavior of lazy loading applies - if
- the relationship is a simple many-to-one, and the child
- object is already present in the :class:`.Session`,
- no SELECT statement will be emitted.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
-
- .. versionadded:: 0.6.5
-
- """
- return strategies.EagerLazyOption(keys, lazy='immediate')
-
-
-def contains_alias(alias):
- """Return a :class:`.MapperOption` that will indicate to the query that
- the main table has been aliased.
-
- This is used in the very rare case that :func:`.contains_eager`
- is being used in conjunction with a user-defined SELECT
- statement that aliases the parent table. E.g.::
-
- # define an aliased UNION called 'ulist'
- statement = users.select(users.c.user_id==7).\\
- union(users.select(users.c.user_id>7)).\\
- alias('ulist')
-
- # add on an eager load of "addresses"
- statement = statement.outerjoin(addresses).\\
- select().apply_labels()
-
- # create query, indicating "ulist" will be an
- # alias for the main table, "addresses"
- # property should be eager loaded
- query = session.query(User).options(
- contains_alias('ulist'),
- contains_eager('addresses'))
-
- # then get results via the statement
- results = query.from_statement(statement).all()
-
- :param alias: is the string name of an alias, or a
- :class:`~.sql.expression.Alias` object representing
- the alias.
-
- """
- return AliasOption(alias)
-
-
-def contains_eager(*keys, **kwargs):
- """Return a ``MapperOption`` that will indicate to the query that
- the given attribute should be eagerly loaded from columns currently
- in the query.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- The option is used in conjunction with an explicit join that loads
- the desired rows, i.e.::
-
- sess.query(Order).\\
- join(Order.user).\\
- options(contains_eager(Order.user))
-
- The above query would join from the ``Order`` entity to its related
- ``User`` entity, and the returned ``Order`` objects would have the
- ``Order.user`` attribute pre-populated.
-
- :func:`contains_eager` also accepts an `alias` argument, which is the
- string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
- construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
- the eagerly-loaded rows are to come from an aliased table::
-
- user_alias = aliased(User)
- sess.query(Order).\\
- join((user_alias, Order.user)).\\
- options(contains_eager(Order.user, alias=user_alias))
-
- See also :func:`eagerload` for the "automatic" version of this
- functionality.
-
- For additional examples of :func:`contains_eager` see
- :ref:`contains_eager`.
-
- """
- alias = kwargs.pop('alias', None)
- if kwargs:
- raise exc.ArgumentError(
- 'Invalid kwargs for contains_eager: %r' % list(kwargs.keys()))
- return strategies.EagerLazyOption(keys, lazy='joined',
- propagate_to_loaders=False, chained=True), \
- strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
-
-
-def defer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a deferred load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import defer
-
- query(MyClass).options(defer("attribute_one"),
- defer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- defer(MyClass.attribute_one),
- defer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.defer` option will be applied to that object
- when loaded::
-
- query(MyClass).options(
- defer("related.attribute_one"),
- defer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- defer(MyClass.related, MyOtherClass.attribute_one),
- defer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=True)
-
-
-def undefer(*key):
- """Return a :class:`.MapperOption` that will convert the column property
- of the given name into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- from sqlalchemy.orm import undefer
-
- query(MyClass).options(
- undefer("attribute_one"),
- undefer("attribute_two"))
-
- A class bound descriptor is also accepted::
-
- query(MyClass).options(
- undefer(MyClass.attribute_one),
- undefer(MyClass.attribute_two))
-
- A "path" can be specified onto a related or collection object using a
- dotted name. The :func:`.orm.undefer` option will be applied to that
- object when loaded::
-
- query(MyClass).options(
- undefer("related.attribute_one"),
- undefer("related.attribute_two"))
-
- To specify a path via class, send multiple arguments::
-
- query(MyClass).options(
- undefer(MyClass.related, MyOtherClass.attribute_one),
- undefer(MyClass.related, MyOtherClass.attribute_two))
-
- See also:
-
- :func:`.orm.undefer_group` as a means to "undefer" a group
- of attributes at once.
-
- :ref:`deferred`
-
- :param \*key: A key representing an individual path. Multiple entries
- are accepted to allow a multiple-token path for a single target, not
- multiple targets.
-
- """
- return strategies.DeferredOption(key, defer=False)
-
-
-def undefer_group(name):
- """Return a :class:`.MapperOption` that will convert the given group of
- deferred column properties into a non-deferred (regular column) load.
-
- Used with :meth:`.Query.options`.
-
- e.g.::
-
- query(MyClass).options(undefer("group_one"))
-
- See also:
-
- :ref:`deferred`
-
- :param name: String name of the deferred group. This name is
- established using the "group" name to the :func:`.orm.deferred`
- configurational function.
-
- """
- return strategies.UndeferGroupOption(name)
+__go(locals())
-from sqlalchemy import util as _sa_util
-_sa_util.importlater.resolve_all()
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index 13c2cf256..e5f8550ab 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -1,5 +1,5 @@
# orm/attributes.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -14,109 +14,19 @@ defines a large part of the ORM's interactivity.
"""
import operator
-from operator import itemgetter
-
from .. import util, event, inspection
-from . import interfaces, collections, events, exc as orm_exc
-from .instrumentation import instance_state, instance_dict, manager_of_class
+from . import interfaces, collections, exc as orm_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
-"""Symbol returned by a loader callable or other attribute/history
-retrieval operation when a value could not be determined, based
-on loader callable flags.
-"""
-)
-
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
-"""Symbol returned by a loader callable to indicate the
-retrieved value, or values, were assigned to their attributes
-on the target object.
-""")
-
-ATTR_EMPTY = util.symbol('ATTR_EMPTY',
-"""Symbol used internally to indicate an attribute had no callable.
-""")
-
-NO_VALUE = util.symbol('NO_VALUE',
-"""Symbol which may be placed as the 'previous' value of an attribute,
-indicating no value was loaded for an attribute when it was modified,
-and flags indicated we were not to load it.
-"""
-)
-
-NEVER_SET = util.symbol('NEVER_SET',
-"""Symbol which may be placed as the 'previous' value of an attribute
-indicating that the attribute had not been assigned to previously.
-"""
-)
-
-NO_CHANGE = util.symbol("NO_CHANGE",
-"""No callables or SQL should be emitted on attribute access
-and no state should change""", canonical=0
-)
-
-CALLABLES_OK = util.symbol("CALLABLES_OK",
-"""Loader callables can be fired off if a value
-is not present.""", canonical=1
-)
-
-SQL_OK = util.symbol("SQL_OK",
-"""Loader callables can emit SQL at least on scalar value
-attributes.""", canonical=2)
-
-RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
-"""callables can use SQL to load related objects as well
-as scalar value attributes.
-""", canonical=4
-)
-
-INIT_OK = util.symbol("INIT_OK",
-"""Attributes should be initialized with a blank
-value (None or an empty collection) upon get, if no other
-value can be obtained.
-""", canonical=8
-)
-
-NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
-"""callables can be emitted if the parent is not persistent.""",
-canonical=16
-)
-
-LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
-"""callables should use committed values as primary/foreign keys during a load
-""", canonical=32
-)
-
-# pre-packaged sets of flags used as inputs
-PASSIVE_OFF = util.symbol("PASSIVE_OFF",
- "Callables can be emitted in all cases.",
- canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
- INIT_OK | CALLABLES_OK | SQL_OK)
-)
-PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
- """PASSIVE_OFF ^ INIT_OK""",
- canonical=PASSIVE_OFF ^ INIT_OK
-)
-PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
- "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
- canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
-)
-PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
- "PASSIVE_OFF ^ SQL_OK",
- canonical=PASSIVE_OFF ^ SQL_OK
-)
-PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
- "PASSIVE_OFF ^ RELATED_OBJECT_OK",
- canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
-)
-PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
- "PASSIVE_OFF ^ NON_PERSISTENT_OK",
- canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
-)
+from .base import instance_state, instance_dict, manager_of_class
+from .base import PASSIVE_NO_RESULT, ATTR_WAS_SET, ATTR_EMPTY, NO_VALUE,\
+ NEVER_SET, NO_CHANGE, CALLABLES_OK, SQL_OK, RELATED_OBJECT_OK,\
+ INIT_OK, NON_PERSISTENT_OK, LOAD_AGAINST_COMMITTED, PASSIVE_OFF,\
+ PASSIVE_RETURN_NEVER_SET, PASSIVE_NO_INITIALIZE, PASSIVE_NO_FETCH,\
+ PASSIVE_NO_FETCH_RELATED, PASSIVE_ONLY_PERSISTENT
+from .base import state_str, instance_str
+@inspection._self_inspects
class QueryableAttribute(interfaces._MappedAttribute,
interfaces._InspectionAttr,
interfaces.PropComparator):
@@ -159,9 +69,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
if key in base:
self.dispatch._update(base[key].dispatch)
- dispatch = event.dispatcher(events.AttributeEvents)
- dispatch.dispatch_cls._active_history = False
-
@util.memoized_property
def _supports_population(self):
return self.impl.supports_population
@@ -236,6 +143,18 @@ class QueryableAttribute(interfaces._MappedAttribute,
def __clause_element__(self):
return self.comparator.__clause_element__()
+ def _query_clause_element(self):
+ """like __clause_element__(), but called specifically
+ by :class:`.Query` to allow special behavior."""
+
+ return self.comparator._query_clause_element()
+
+ def adapt_to_entity(self, adapt_to_entity):
+ assert not self._of_type
+ return self.__class__(adapt_to_entity.entity, self.key, impl=self.impl,
+ comparator=self.comparator.adapt_to_entity(adapt_to_entity),
+ parententity=adapt_to_entity)
+
def of_type(self, cls):
return QueryableAttribute(
self.class_,
@@ -246,7 +165,7 @@ class QueryableAttribute(interfaces._MappedAttribute,
of_type=cls)
def label(self, name):
- return self.__clause_element__().label(name)
+ return self._query_clause_element().label(name)
def operate(self, op, *other, **kwargs):
return op(self.comparator, *other, **kwargs)
@@ -286,8 +205,6 @@ class QueryableAttribute(interfaces._MappedAttribute,
"""
return self.comparator.property
-inspection._self_inspects(QueryableAttribute)
-
class InstrumentedAttribute(QueryableAttribute):
"""Class bound instrumented attribute which adds basic
@@ -359,7 +276,7 @@ def create_proxied_attribute(descriptor):
return self._comparator
def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.class_, self.key, self.descriptor,
+ return self.__class__(adapt_to_entity.entity, self.key, self.descriptor,
self._comparator,
adapt_to_entity)
@@ -398,6 +315,53 @@ def create_proxied_attribute(descriptor):
from_instance=descriptor)
return Proxy
+OP_REMOVE = util.symbol("REMOVE")
+OP_APPEND = util.symbol("APPEND")
+OP_REPLACE = util.symbol("REPLACE")
+
+class Event(object):
+ """A token propagated throughout the course of a chain of attribute
+ events.
+
+ Serves as an indicator of the source of the event and also provides
+ a means of controlling propagation across a chain of attribute
+ operations.
+
+ The :class:`.Event` object is sent as the ``initiator`` argument
+ when dealing with the :meth:`.AttributeEvents.append`,
+ :meth:`.AttributeEvents.set`,
+ and :meth:`.AttributeEvents.remove` events.
+
+ The :class:`.Event` object is currently interpreted by the backref
+ event handlers, and is used to control the propagation of operations
+ across two mutually-dependent attributes.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ impl = None
+ """The :class:`.AttributeImpl` which is the current event initiator.
+ """
+
+ op = None
+ """The symbol :attr:`.OP_APPEND`, :attr:`.OP_REMOVE` or :attr:`.OP_REPLACE`,
+ indicating the source operation.
+
+ """
+
+ def __init__(self, attribute_impl, op):
+ self.impl = attribute_impl
+ self.op = op
+ self.parent_token = self.impl.parent_token
+
+
+ @property
+ def key(self):
+ return self.impl.key
+
+ def hasparent(self, state):
+ return self.impl.hasparent(state)
class AttributeImpl(object):
"""internal implementation for instrumented attributes."""
@@ -406,6 +370,7 @@ class AttributeImpl(object):
callable_, dispatch, trackparent=False, extension=None,
compare_function=None, active_history=False,
parent_token=None, expire_missing=True,
+ send_modified_events=True,
**kwargs):
"""Construct an AttributeImpl.
@@ -449,6 +414,10 @@ class AttributeImpl(object):
during state.expire_attributes(None), if no value is present
for this key.
+ send_modified_events
+ if False, the InstanceState._modified_event method will have no effect;
+ this means the attribute will never show up as changed in a
+ history entry.
"""
self.class_ = class_
self.key = key
@@ -456,6 +425,7 @@ class AttributeImpl(object):
self.dispatch = dispatch
self.trackparent = trackparent
self.parent_token = parent_token or self
+ self.send_modified_events = send_modified_events
if compare_function is None:
self.is_equal = operator.eq
else:
@@ -534,8 +504,8 @@ class AttributeImpl(object):
"but the parent record "
"has gone stale, can't be sure this "
"is the most recent parent." %
- (orm_util.state_str(state),
- orm_util.state_str(parent_state),
+ (state_str(state),
+ state_str(parent_state),
self.key))
return
@@ -588,7 +558,6 @@ class AttributeImpl(object):
def get(self, state, dict_, passive=PASSIVE_OFF):
"""Retrieve a value from the given object.
-
If a callable is assembled on this object's attribute, and
passive is False, the callable will be executed and the
resulting value will be set as the new value for this attribute.
@@ -683,19 +652,24 @@ class ScalarAttributeImpl(AttributeImpl):
old = dict_.get(self.key, NO_VALUE)
if self.dispatch.remove:
- self.fire_remove_event(state, dict_, old, None)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
state._modified_event(dict_, self, old)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
- return History.from_scalar_attribute(
- self, state, dict_.get(self.key, NO_VALUE))
+ if self.key in dict_:
+ return History.from_scalar_attribute(self, state, dict_[self.key])
+ else:
+ if passive & INIT_OK:
+ passive ^= INIT_OK
+ current = self.get(state, dict_, passive=passive)
+ if current is PASSIVE_NO_RESULT:
+ return HISTORY_BLANK
+ else:
+ return History.from_scalar_attribute(self, state, current)
def set(self, state, dict_, value, initiator,
passive=PASSIVE_OFF, check_old=None, pop=False):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
else:
@@ -707,14 +681,26 @@ class ScalarAttributeImpl(AttributeImpl):
state._modified_event(dict_, self, old)
dict_[self.key] = value
+ @util.memoized_property
+ def _replace_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_REPLACE)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_replace_event(self, state, dict_, value, previous, initiator):
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
return value
def fire_remove_event(self, state, dict_, value, initiator):
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
@property
def type(self):
@@ -736,7 +722,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
def delete(self, state, dict_):
old = self.get(state, dict_)
- self.fire_remove_event(state, dict_, old, self)
+ self.fire_remove_event(state, dict_, old, self._remove_token)
del dict_[self.key]
def get_history(self, state, dict_, passive=PASSIVE_OFF):
@@ -773,14 +759,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
passive=PASSIVE_OFF, check_old=None, pop=False):
"""Set a value on the given InstanceState.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
-
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
if self.dispatch._active_history:
old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
else:
@@ -794,19 +773,20 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
else:
raise ValueError(
"Object %s not associated with %s on attribute '%s'" % (
- orm_util.instance_str(check_old),
- orm_util.state_str(state),
+ instance_str(check_old),
+ state_str(state),
self.key
))
value = self.fire_replace_event(state, dict_, value, old, initiator)
dict_[self.key] = value
+
def fire_remove_event(self, state, dict_, value, initiator):
if self.trackparent and value is not None:
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, value)
@@ -818,7 +798,7 @@ class ScalarObjectAttributeImpl(ScalarAttributeImpl):
self.sethasparent(instance_state(previous), state, False)
for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
+ value = fn(state, value, previous, initiator or self._replace_token)
state._modified_event(dict_, self, previous)
@@ -902,9 +882,17 @@ class CollectionAttributeImpl(AttributeImpl):
return [(instance_state(o), o) for o in current]
+ @util.memoized_property
+ def _append_token(self):
+ return Event(self, OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return Event(self, OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator):
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -921,7 +909,7 @@ class CollectionAttributeImpl(AttributeImpl):
self.sethasparent(instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
state._modified_event(dict_, self, NEVER_SET, True)
@@ -948,8 +936,6 @@ class CollectionAttributeImpl(AttributeImpl):
self.key, state, self.collection_factory)
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
collection = self.get_collection(state, dict_, passive=passive)
if collection is PASSIVE_NO_RESULT:
value = self.fire_append_event(state, dict_, value, initiator)
@@ -960,9 +946,6 @@ class CollectionAttributeImpl(AttributeImpl):
collection.append_with_event(value, initiator)
def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
collection = self.get_collection(state, state.dict, passive=passive)
if collection is PASSIVE_NO_RESULT:
self.fire_remove_event(state, dict_, value, initiator)
@@ -985,14 +968,8 @@ class CollectionAttributeImpl(AttributeImpl):
passive=PASSIVE_OFF, pop=False):
"""Set a value on the given object.
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
"""
- if initiator and initiator.parent_token is self.parent_token:
- return
-
self._set_iterable(
state, dict_, value,
lambda adapter, i: adapter.adapt_like_to_iterable(i))
@@ -1085,6 +1062,7 @@ def backref_listeners(attribute, key, uselist):
# use easily recognizable names for stack traces
parent_token = attribute.impl.parent_token
+ parent_impl = attribute.impl
def _acceptable_key_err(child_state, initiator, child_impl):
raise ValueError(
@@ -1092,7 +1070,7 @@ def backref_listeners(attribute, key, uselist):
'Passing object %s to attribute "%s" '
'triggers a modify event on attribute "%s" '
'via the backref "%s".' % (
- orm_util.state_str(child_state),
+ state_str(child_state),
initiator.parent_token,
child_impl.parent_token,
attribute.impl.parent_token
@@ -1108,10 +1086,14 @@ def backref_listeners(attribute, key, uselist):
old_state, old_dict = instance_state(oldchild),\
instance_dict(oldchild)
impl = old_state.manager[key].impl
- impl.pop(old_state,
- old_dict,
- state.obj(),
- initiator, passive=PASSIVE_NO_FETCH)
+
+ if initiator.impl is not impl or \
+ initiator.op not in (OP_REPLACE, OP_REMOVE):
+ impl.pop(old_state,
+ old_dict,
+ state.obj(),
+ parent_impl._append_token,
+ passive=PASSIVE_NO_FETCH)
if child is not None:
child_state, child_dict = instance_state(child),\
@@ -1120,12 +1102,14 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
+ child_state,
+ child_dict,
+ state.obj(),
+ initiator,
+ passive=PASSIVE_NO_FETCH)
return child
def emit_backref_from_collection_append_event(state, child, initiator):
@@ -1139,7 +1123,9 @@ def backref_listeners(attribute, key, uselist):
if initiator.parent_token is not parent_token and \
initiator.parent_token is not child_impl.parent_token:
_acceptable_key_err(state, initiator, child_impl)
- child_impl.append(
+ elif initiator.impl is not child_impl or \
+ initiator.op not in (OP_APPEND, OP_REPLACE):
+ child_impl.append(
child_state,
child_dict,
state.obj(),
@@ -1152,10 +1138,9 @@ def backref_listeners(attribute, key, uselist):
child_state, child_dict = instance_state(child),\
instance_dict(child)
child_impl = child_state.manager[key].impl
- # can't think of a path that would produce an initiator
- # mismatch here, as it would require an existing collection
- # mismatch.
- child_impl.pop(
+ if initiator.impl is not child_impl or \
+ initiator.op not in (OP_REMOVE, OP_REPLACE):
+ child_impl.pop(
child_state,
child_dict,
state.obj(),
@@ -1268,7 +1253,7 @@ class History(History):
original = state.committed_state.get(attribute.key, _NO_HISTORY)
if original is _NO_HISTORY:
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), ())
else:
return cls((), [current], ())
@@ -1285,7 +1270,7 @@ class History(History):
deleted = ()
else:
deleted = [original]
- if current is NO_VALUE:
+ if current is NEVER_SET:
return cls((), (), deleted)
else:
return cls([current], (), deleted)
diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py
new file mode 100644
index 000000000..577f9ff76
--- /dev/null
+++ b/lib/sqlalchemy/orm/base.py
@@ -0,0 +1,453 @@
+# orm/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Constants and rudimental functions used throughout the ORM.
+
+"""
+
+from .. import util, inspection, exc as sa_exc
+from ..sql import expression
+from . import exc
+import operator
+
+PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT',
+"""Symbol returned by a loader callable or other attribute/history
+retrieval operation when a value could not be determined, based
+on loader callable flags.
+"""
+)
+
+ATTR_WAS_SET = util.symbol('ATTR_WAS_SET',
+"""Symbol returned by a loader callable to indicate the
+retrieved value, or values, were assigned to their attributes
+on the target object.
+""")
+
+ATTR_EMPTY = util.symbol('ATTR_EMPTY',
+"""Symbol used internally to indicate an attribute had no callable.
+""")
+
+NO_VALUE = util.symbol('NO_VALUE',
+"""Symbol which may be placed as the 'previous' value of an attribute,
+indicating no value was loaded for an attribute when it was modified,
+and flags indicated we were not to load it.
+"""
+)
+
+NEVER_SET = util.symbol('NEVER_SET',
+"""Symbol which may be placed as the 'previous' value of an attribute
+indicating that the attribute had not been assigned to previously.
+"""
+)
+
+NO_CHANGE = util.symbol("NO_CHANGE",
+"""No callables or SQL should be emitted on attribute access
+and no state should change""", canonical=0
+)
+
+CALLABLES_OK = util.symbol("CALLABLES_OK",
+"""Loader callables can be fired off if a value
+is not present.""", canonical=1
+)
+
+SQL_OK = util.symbol("SQL_OK",
+"""Loader callables can emit SQL at least on scalar value
+attributes.""", canonical=2)
+
+RELATED_OBJECT_OK = util.symbol("RELATED_OBJECT_OK",
+"""callables can use SQL to load related objects as well
+as scalar value attributes.
+""", canonical=4
+)
+
+INIT_OK = util.symbol("INIT_OK",
+"""Attributes should be initialized with a blank
+value (None or an empty collection) upon get, if no other
+value can be obtained.
+""", canonical=8
+)
+
+NON_PERSISTENT_OK = util.symbol("NON_PERSISTENT_OK",
+"""callables can be emitted if the parent is not persistent.""",
+canonical=16
+)
+
+LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED",
+"""callables should use committed values as primary/foreign keys during a load
+""", canonical=32
+)
+
+# pre-packaged sets of flags used as inputs
+PASSIVE_OFF = util.symbol("PASSIVE_OFF",
+ "Callables can be emitted in all cases.",
+ canonical=(RELATED_OBJECT_OK | NON_PERSISTENT_OK |
+ INIT_OK | CALLABLES_OK | SQL_OK)
+)
+PASSIVE_RETURN_NEVER_SET = util.symbol("PASSIVE_RETURN_NEVER_SET",
+ """PASSIVE_OFF ^ INIT_OK""",
+ canonical=PASSIVE_OFF ^ INIT_OK
+)
+PASSIVE_NO_INITIALIZE = util.symbol("PASSIVE_NO_INITIALIZE",
+ "PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK",
+ canonical=PASSIVE_RETURN_NEVER_SET ^ CALLABLES_OK
+)
+PASSIVE_NO_FETCH = util.symbol("PASSIVE_NO_FETCH",
+ "PASSIVE_OFF ^ SQL_OK",
+ canonical=PASSIVE_OFF ^ SQL_OK
+)
+PASSIVE_NO_FETCH_RELATED = util.symbol("PASSIVE_NO_FETCH_RELATED",
+ "PASSIVE_OFF ^ RELATED_OBJECT_OK",
+ canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK
+)
+PASSIVE_ONLY_PERSISTENT = util.symbol("PASSIVE_ONLY_PERSISTENT",
+ "PASSIVE_OFF ^ NON_PERSISTENT_OK",
+ canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK
+)
+
+DEFAULT_MANAGER_ATTR = '_sa_class_manager'
+DEFAULT_STATE_ATTR = '_sa_instance_state'
+_INSTRUMENTOR = ('mapper', 'instrumentor')
+
+EXT_CONTINUE = util.symbol('EXT_CONTINUE')
+EXT_STOP = util.symbol('EXT_STOP')
+
+ONETOMANY = util.symbol('ONETOMANY',
+"""Indicates the one-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOONE = util.symbol('MANYTOONE',
+"""Indicates the many-to-one direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+MANYTOMANY = util.symbol('MANYTOMANY',
+"""Indicates the many-to-many direction for a :func:`.relationship`.
+
+This symbol is typically used by the internals but may be exposed within
+certain API features.
+
+""")
+
+NOT_EXTENSION = util.symbol('NOT_EXTENSION',
+"""Symbol indicating an :class:`_InspectionAttr` that's
+ not part of sqlalchemy.ext.
+
+ Is assigned to the :attr:`._InspectionAttr.extension_type`
+ attibute.
+
+""")
+
+_none_set = frozenset([None])
+
+
+def _generative(*assertions):
+ """Mark a method as generative, e.g. method-chained."""
+
+ @util.decorator
+ def generate(fn, *args, **kw):
+ self = args[0]._clone()
+ for assertion in assertions:
+ assertion(self, fn.__name__)
+ fn(self, *args[1:], **kw)
+ return self
+ return generate
+
+
+# these can be replaced by sqlalchemy.ext.instrumentation
+# if augmented class instrumentation is enabled.
+def manager_of_class(cls):
+ return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
+
+instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
+
+instance_dict = operator.attrgetter('__dict__')
+
+def instance_str(instance):
+ """Return a string describing an instance."""
+
+ return state_str(instance_state(instance))
+
+def state_str(state):
+ """Return a string describing an instance via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
+
+def state_class_str(state):
+ """Return a string describing an instance's class via its InstanceState."""
+
+ if state is None:
+ return "None"
+ else:
+ return '<%s>' % (state.class_.__name__, )
+
+
+def attribute_str(instance, attribute):
+ return instance_str(instance) + "." + attribute
+
+
+def state_attribute_str(state, attribute):
+ return state_str(state) + "." + attribute
+
+def object_mapper(instance):
+ """Given an object, return the primary Mapper associated with the object
+ instance.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ This function is available via the inspection system as::
+
+ inspect(instance).mapper
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ return object_state(instance).mapper
+
+
+def object_state(instance):
+ """Given an object, return the :class:`.InstanceState`
+ associated with the object.
+
+ Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
+ if no mapping is configured.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(instance)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
+ not part of a mapping.
+
+ """
+ state = _inspect_mapped_object(instance)
+ if state is None:
+ raise exc.UnmappedInstanceError(instance)
+ else:
+ return state
+
+
+@inspection._inspects(object)
+def _inspect_mapped_object(instance):
+ try:
+ return instance_state(instance)
+ # TODO: whats the py-2/3 syntax to catch two
+ # different kinds of exceptions at once ?
+ except exc.UnmappedClassError:
+ return None
+ except exc.NO_STATE:
+ return None
+
+
+
+def _class_to_mapper(class_or_mapper):
+ insp = inspection.inspect(class_or_mapper, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ raise exc.UnmappedClassError(class_or_mapper)
+
+
+def _mapper_or_none(entity):
+ """Return the :class:`.Mapper` for the given class or None if the
+ class is not mapped."""
+
+ insp = inspection.inspect(entity, False)
+ if insp is not None:
+ return insp.mapper
+ else:
+ return None
+
+
+def _is_mapped_class(entity):
+ """Return True if the given object is a mapped class,
+ :class:`.Mapper`, or :class:`.AliasedClass`."""
+
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ hasattr(insp, "mapper") and \
+ (
+ insp.is_mapper
+ or insp.is_aliased_class
+ )
+
+def _attr_as_key(attr):
+ if hasattr(attr, 'key'):
+ return attr.key
+ else:
+ return expression._column_as_key(attr)
+
+
+
+def _orm_columns(entity):
+ insp = inspection.inspect(entity, False)
+ if hasattr(insp, 'selectable'):
+ return [c for c in insp.selectable.c]
+ else:
+ return [entity]
+
+
+
+def _is_aliased_class(entity):
+ insp = inspection.inspect(entity, False)
+ return insp is not None and \
+ getattr(insp, "is_aliased_class", False)
+
+
+def _entity_descriptor(entity, key):
+ """Return a class attribute given an entity and string name.
+
+ May return :class:`.InstrumentedAttribute` or user-defined
+ attribute.
+
+ """
+ insp = inspection.inspect(entity)
+ if insp.is_selectable:
+ description = entity
+ entity = insp.c
+ elif insp.is_aliased_class:
+ entity = insp.entity
+ description = entity
+ elif hasattr(insp, "mapper"):
+ description = entity = insp.mapper.class_
+ else:
+ description = entity
+
+ try:
+ return getattr(entity, key)
+ except AttributeError:
+ raise sa_exc.InvalidRequestError(
+ "Entity '%s' has no property '%s'" %
+ (description, key)
+ )
+
+_state_mapper = util.dottedgetter('manager.mapper')
+
+@inspection._inspects(type)
+def _inspect_mapped_class(class_, configure=False):
+ try:
+ class_manager = manager_of_class(class_)
+ if not class_manager.is_mapped:
+ return None
+ mapper = class_manager.mapper
+ if configure and mapper._new_mappers:
+ mapper._configure_all()
+ return mapper
+
+ except exc.NO_STATE:
+ return None
+
+def class_mapper(class_, configure=True):
+ """Given a class, return the primary :class:`.Mapper` associated
+ with the key.
+
+ Raises :exc:`.UnmappedClassError` if no mapping is configured
+ on the given class, or :exc:`.ArgumentError` if a non-class
+ object is passed.
+
+ Equivalent functionality is available via the :func:`.inspect`
+ function as::
+
+ inspect(some_mapped_class)
+
+ Using the inspection system will raise
+ :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
+
+ """
+ mapper = _inspect_mapped_class(class_, configure=configure)
+ if mapper is None:
+ if not isinstance(class_, type):
+ raise sa_exc.ArgumentError(
+ "Class object expected, got '%r'." % (class_, ))
+ raise exc.UnmappedClassError(class_)
+ else:
+ return mapper
+
+
+class _InspectionAttr(object):
+ """A base class applied to all ORM objects that can be returned
+ by the :func:`.inspect` function.
+
+ The attributes defined here allow the usage of simple boolean
+ checks to test basic facts about the object returned.
+
+ While the boolean checks here are basically the same as using
+ the Python isinstance() function, the flags here can be used without
+ the need to import all of these classes, and also such that
+ the SQLAlchemy class system can change while leaving the flags
+ here intact for forwards-compatibility.
+
+ """
+
+ is_selectable = False
+ """Return True if this object is an instance of :class:`.Selectable`."""
+
+ is_aliased_class = False
+ """True if this object is an instance of :class:`.AliasedClass`."""
+
+ is_instance = False
+ """True if this object is an instance of :class:`.InstanceState`."""
+
+ is_mapper = False
+ """True if this object is an instance of :class:`.Mapper`."""
+
+ is_property = False
+ """True if this object is an instance of :class:`.MapperProperty`."""
+
+ is_attribute = False
+ """True if this object is a Python :term:`descriptor`.
+
+ This can refer to one of many types. Usually a
+ :class:`.QueryableAttribute` which handles attributes events on behalf
+ of a :class:`.MapperProperty`. But can also be an extension type
+ such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
+ The :attr:`._InspectionAttr.extension_type` will refer to a constant
+ identifying the specific subtype.
+
+ .. seealso::
+
+ :attr:`.Mapper.all_orm_descriptors`
+
+ """
+
+ is_clause_element = False
+ """True if this object is an instance of :class:`.ClauseElement`."""
+
+ extension_type = NOT_EXTENSION
+ """The extension type, if any.
+ Defaults to :data:`.interfaces.NOT_EXTENSION`
+
+ .. versionadded:: 0.8.0
+
+ .. seealso::
+
+ :data:`.HYBRID_METHOD`
+
+ :data:`.HYBRID_PROPERTY`
+
+ :data:`.ASSOCIATION_PROXY`
+
+ """
+
+class _MappedAttribute(object):
+ """Mixin for attributes which should be replaced by mapper-assigned
+ attributes.
+
+ """
diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py
index 03917d112..87e351b6c 100644
--- a/lib/sqlalchemy/orm/collections.py
+++ b/lib/sqlalchemy/orm/collections.py
@@ -1,5 +1,5 @@
# orm/collections.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -108,8 +108,7 @@ import weakref
from ..sql import expression
from .. import util, exc as sa_exc
-orm_util = util.importlater("sqlalchemy.orm", "util")
-attributes = util.importlater("sqlalchemy.orm", "attributes")
+from . import base
__all__ = ['collection', 'collection_adapter',
@@ -139,8 +138,8 @@ class _PlainColumnGetter(object):
return self.cols
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [
m._get_state_attr_by_column(state, state.dict, col)
@@ -167,8 +166,8 @@ class _SerializableColumnGetter(object):
return _SerializableColumnGetter, (self.colkeys,)
def __call__(self, value):
- state = attributes.instance_state(value)
- m = orm_util._state_mapper(state)
+ state = base.instance_state(value)
+ m = base._state_mapper(state)
key = [m._get_state_attr_by_column(
state, state.dict,
m.mapped_table.columns[k])
@@ -352,7 +351,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'appender')
+ fn._sa_instrument_role = 'appender'
return fn
@staticmethod
@@ -379,7 +378,7 @@ class collection(object):
promulgation to collection events.
"""
- setattr(fn, '_sa_instrument_role', 'remover')
+ fn._sa_instrument_role = 'remover'
return fn
@staticmethod
@@ -393,7 +392,7 @@ class collection(object):
def __iter__(self): ...
"""
- setattr(fn, '_sa_instrument_role', 'iterator')
+ fn._sa_instrument_role = 'iterator'
return fn
@staticmethod
@@ -414,7 +413,7 @@ class collection(object):
def extend(self, items): ...
"""
- setattr(fn, '_sa_instrumented', True)
+ fn._sa_instrumented = True
return fn
@staticmethod
@@ -428,7 +427,7 @@ class collection(object):
that has been linked, or None if unlinking.
"""
- setattr(fn, '_sa_instrument_role', 'linker')
+ fn._sa_instrument_role = 'linker'
return fn
link = linker
@@ -464,7 +463,7 @@ class collection(object):
validation on the values about to be assigned.
"""
- setattr(fn, '_sa_instrument_role', 'converter')
+ fn._sa_instrument_role = 'converter'
return fn
@staticmethod
@@ -484,7 +483,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
+ fn._sa_instrument_before = ('fire_append_event', arg)
return fn
return decorator
@@ -504,8 +503,8 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_before = ('fire_append_event', arg)
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
@@ -526,7 +525,7 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
+ fn._sa_instrument_before = ('fire_remove_event', arg)
return fn
return decorator
@@ -546,32 +545,13 @@ class collection(object):
"""
def decorator(fn):
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
+ fn._sa_instrument_after = 'fire_remove_event'
return fn
return decorator
-# public instrumentation interface for 'internally instrumented'
-# implementations
-def collection_adapter(collection):
- """Fetch the :class:`.CollectionAdapter` for a collection."""
-
- return getattr(collection, '_sa_adapter', None)
-
-
-def collection_iter(collection):
- """Iterate over an object supporting the @iterator or __iter__ protocols.
-
- If the collection is an ORM collection, it need not be attached to an
- object to be iterable.
-
- """
- try:
- return getattr(collection, '_sa_iterator',
- getattr(collection, '__iter__'))()
- except AttributeError:
- raise TypeError("'%s' object is not iterable" %
- type(collection).__name__)
+collection_adapter = operator.attrgetter('_sa_adapter')
+"""Fetch the :class:`.CollectionAdapter` for a collection."""
class CollectionAdapter(object):
@@ -584,8 +564,6 @@ class CollectionAdapter(object):
The ORM uses :class:`.CollectionAdapter` exclusively for interaction with
entity collections.
- The usage of getattr()/setattr() is currently to allow injection
- of custom methods, such as to unwrap Zope security proxies.
"""
invalidated = False
@@ -609,16 +587,19 @@ class CollectionAdapter(object):
return self.owner_state.manager[self._key].impl
def link_to_self(self, data):
- """Link a collection to this adapter, and fire a link event."""
- setattr(data, '_sa_adapter', self)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(self)
+ """Link a collection to this adapter"""
+
+ data._sa_adapter = self
+ if data._sa_linker:
+ data._sa_linker(self)
+
def unlink(self, data):
- """Unlink a collection from any adapter, and fire a link event."""
- setattr(data, '_sa_adapter', None)
- if hasattr(data, '_sa_linker'):
- getattr(data, '_sa_linker')(None)
+ """Unlink a collection from any adapter"""
+
+ del data._sa_adapter
+ if data._sa_linker:
+ data._sa_linker(None)
def adapt_like_to_iterable(self, obj):
"""Converts collection-compatible objects to an iterable of values.
@@ -634,7 +615,7 @@ class CollectionAdapter(object):
a default duck-typing-based implementation is used.
"""
- converter = getattr(self._data(), '_sa_converter', None)
+ converter = self._data()._sa_converter
if converter is not None:
return converter(obj)
@@ -655,60 +636,60 @@ class CollectionAdapter(object):
# If the object is an adapted collection, return the (iterable)
# adapter.
if getattr(obj, '_sa_adapter', None) is not None:
- return getattr(obj, '_sa_adapter')
+ return obj._sa_adapter
elif setting_type == dict:
if util.py3k:
return obj.values()
else:
- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
+ return getattr(obj, 'itervalues', obj.values)()
else:
return iter(obj)
def append_with_event(self, item, initiator=None):
"""Add an entity to the collection, firing mutation events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
+ self._data()._sa_appender(item, _sa_initiator=initiator)
def append_without_event(self, item):
"""Add or restore an entity to the collection, firing no events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
+ self._data()._sa_appender(item, _sa_initiator=False)
def append_multiple_without_event(self, items):
"""Add or restore an entity to the collection, firing no events."""
- appender = getattr(self._data(), '_sa_appender')
+ appender = self._data()._sa_appender
for item in items:
appender(item, _sa_initiator=False)
def remove_with_event(self, item, initiator=None):
"""Remove an entity from the collection, firing mutation events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
+ self._data()._sa_remover(item, _sa_initiator=initiator)
def remove_without_event(self, item):
"""Remove an entity from the collection, firing no events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
+ self._data()._sa_remover(item, _sa_initiator=False)
def clear_with_event(self, initiator=None):
"""Empty the collection, firing a mutation event for each entity."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=initiator)
def clear_without_event(self):
"""Empty the collection, firing no events."""
- remover = getattr(self._data(), '_sa_remover')
+ remover = self._data()._sa_remover
for item in list(self):
remover(item, _sa_initiator=False)
def __iter__(self):
"""Iterate over entities in the collection."""
- return iter(getattr(self._data(), '_sa_iterator')())
+ return iter(self._data()._sa_iterator())
def __len__(self):
"""Count entities in the collection."""
- return len(list(getattr(self._data(), '_sa_iterator')()))
+ return len(list(self._data()._sa_iterator()))
def __bool__(self):
return True
@@ -960,7 +941,12 @@ def _instrument_class(cls):
for role, method_name in roles.items():
setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
- setattr(cls, '_sa_instrumented', id(cls))
+ cls._sa_adapter = None
+ if not hasattr(cls, '_sa_linker'):
+ cls._sa_linker = None
+ if not hasattr(cls, '_sa_converter'):
+ cls._sa_converter = None
+ cls._sa_instrumented = id(cls)
def _instrument_membership_mutator(method, before, argument, after):
@@ -999,7 +985,7 @@ def _instrument_membership_mutator(method, before, argument, after):
if initiator is False:
executor = None
else:
- executor = getattr(args[0], '_sa_adapter', None)
+ executor = args[0]._sa_adapter
if before and executor:
getattr(executor, before)(value, initiator)
@@ -1024,33 +1010,33 @@ def __set(collection, item, _sa_initiator=None):
"""Run set events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
+ item = executor.fire_append_event(item, _sa_initiator)
return item
def __del(collection, item, _sa_initiator=None):
"""Run del events, may eventually be inlined into decorators."""
if _sa_initiator is not False:
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_remove_event')(item, _sa_initiator)
+ executor.fire_remove_event(item, _sa_initiator)
def __before_delete(collection, _sa_initiator=None):
"""Special method to run 'commit existing value' methods"""
- executor = getattr(collection, '_sa_adapter', None)
+ executor = collection._sa_adapter
if executor:
- getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
+ executor.fire_pre_remove_event(_sa_initiator)
def _list_decorators():
"""Tailored instrumentation wrappers for any list-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(list, fn.__name__).__doc__
def append(fn):
def append(self, item, _sa_initiator=None):
@@ -1089,7 +1075,10 @@ def _list_decorators():
start = index.start or 0
if start < 0:
start += len(self)
- stop = index.stop or len(self)
+ if index.stop is not None:
+ stop = index.stop
+ else:
+ stop = len(self)
if stop < 0:
stop += len(self)
@@ -1172,6 +1161,15 @@ def _list_decorators():
_tidy(pop)
return pop
+ if not util.py2k:
+ def clear(fn):
+ def clear(self, index=-1):
+ for item in self:
+ __del(self, item)
+ fn(self)
+ _tidy(clear)
+ return clear
+
# __imul__ : not wrapping this. all members of the collection are already
# present, so no need to fire appends... wrapping it with an explicit
# decorator is still possible, so events on *= can be had if they're
@@ -1186,8 +1184,8 @@ def _dict_decorators():
"""Tailored instrumentation wrappers for any dict-like mapping class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(dict, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
@@ -1288,8 +1286,8 @@ def _set_decorators():
"""Tailored instrumentation wrappers for any set-like class."""
def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
+ fn._sa_instrumented = True
+ fn.__doc__ = getattr(set, fn.__name__).__doc__
Unspecified = util.symbol('Unspecified')
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 9f1e497af..4709a1821 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -1,5 +1,5 @@
# orm/dependency.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py
index e50967253..020b7c718 100644
--- a/lib/sqlalchemy/orm/deprecated_interfaces.py
+++ b/lib/sqlalchemy/orm/deprecated_interfaces.py
@@ -1,5 +1,5 @@
# orm/deprecated_interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,7 @@
from .. import event, util
from .interfaces import EXT_CONTINUE
-
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class MapperExtension(object):
"""Base implementation for :class:`.Mapper` event hooks.
@@ -374,6 +374,7 @@ class MapperExtension(object):
return EXT_CONTINUE
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class SessionExtension(object):
"""Base implementation for :class:`.Session` event hooks.
@@ -494,6 +495,7 @@ class SessionExtension(object):
"""
+@util.langhelpers.dependency_for("sqlalchemy.orm.interfaces")
class AttributeExtension(object):
"""Base implementation for :class:`.AttributeImpl` event hooks, events
that fire upon attribute mutations in user code.
diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py
index c58951339..24b0a15e6 100644
--- a/lib/sqlalchemy/orm/descriptor_props.py
+++ b/lib/sqlalchemy/orm/descriptor_props.py
@@ -1,5 +1,5 @@
# orm/descriptor_props.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -12,10 +12,11 @@ as actively in the load/persist ORM loop.
from .interfaces import MapperProperty, PropComparator
from .util import _none_set
-from . import attributes, strategies
+from . import attributes
from .. import util, sql, exc as sa_exc, event, schema
from ..sql import expression
-properties = util.importlater('sqlalchemy.orm', 'properties')
+from . import properties
+from . import query
class DescriptorProperty(MapperProperty):
@@ -75,6 +76,7 @@ class DescriptorProperty(MapperProperty):
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class CompositeProperty(DescriptorProperty):
"""Defines a "composite" mapped attribute, representing a collection
of columns as one attribute.
@@ -82,12 +84,64 @@ class CompositeProperty(DescriptorProperty):
:class:`.CompositeProperty` is constructed using the :func:`.composite`
function.
- See also:
+ .. seealso::
- :ref:`mapper_composite`
+ :ref:`mapper_composite`
"""
def __init__(self, class_, *attrs, **kwargs):
+ """Return a composite column-based property for use with a Mapper.
+
+ See the mapping documentation section :ref:`mapper_composite` for a full
+ usage example.
+
+ The :class:`.MapperProperty` returned by :func:`.composite`
+ is the :class:`.CompositeProperty`.
+
+ :param class\_:
+ The "composite type" class.
+
+ :param \*cols:
+ List of Column objects to be mapped.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. See the same flag on :func:`.column_property`.
+
+ .. versionchanged:: 0.7
+ This flag specifically becomes meaningful
+ - previously it was a placeholder.
+
+ :param group:
+ A group name for this property when marked as deferred.
+
+ :param deferred:
+ When True, the column property is "deferred", meaning that it does not
+ load immediately, and is instead loaded when the attribute is first
+ accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
+
+ :param comparator_factory: a class which extends
+ :class:`.CompositeProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an :class:`.AttributeExtension` instance,
+ or list of extensions, which will be prepended to the list of
+ attribute listeners for the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ """
+
self.attrs = attrs
self.composite_class = class_
self.active_history = kwargs.get('active_history', False)
@@ -101,6 +155,7 @@ class CompositeProperty(DescriptorProperty):
util.set_creation_order(self)
self._create_descriptor()
+
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
@@ -190,6 +245,11 @@ class CompositeProperty(DescriptorProperty):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
+ else:
+ raise sa_exc.ArgumentError(
+ "Composite expects Column objects or mapped "
+ "attributes/attribute names as arguments, got: %r"
+ % (attr,))
props.append(prop)
@property
@@ -205,7 +265,9 @@ class CompositeProperty(DescriptorProperty):
prop.active_history = self.active_history
if self.deferred:
prop.deferred = self.deferred
- prop.strategy_class = strategies.DeferredColumnLoader
+ prop.strategy_class = prop._strategy_lookup(
+ ("deferred", True),
+ ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
@@ -300,6 +362,18 @@ class CompositeProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+ class CompositeBundle(query.Bundle):
+ def __init__(self, property, expr):
+ self.property = property
+ super(CompositeProperty.CompositeBundle, self).__init__(
+ property.key, *expr)
+
+ def create_row_processor(self, query, procs, labels):
+ def proc(row, result):
+ return self.property.composite_class(*[proc(row, result) for proc in procs])
+ return proc
+
+
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
@@ -319,10 +393,18 @@ class CompositeProperty(DescriptorProperty):
"""
+
+ __hash__ = None
+
+ @property
+ def clauses(self):
+ return self.__clause_element__()
+
def __clause_element__(self):
return expression.ClauseList(group=False, *self._comparable_elements)
- __hash__ = None
+ def _query_clause_element(self):
+ return CompositeProperty.CompositeBundle(self.prop, self.__clause_element__())
@util.memoized_property
def _comparable_elements(self):
@@ -356,6 +438,7 @@ class CompositeProperty(DescriptorProperty):
return str(self.parent.class_.__name__) + "." + self.key
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ConcreteInheritedProperty(DescriptorProperty):
"""A 'do nothing' :class:`.MapperProperty` that disables
an attribute on a concrete subclass that is only present
@@ -404,11 +487,66 @@ class ConcreteInheritedProperty(DescriptorProperty):
self.descriptor = NoninheritedConcreteProp()
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class SynonymProperty(DescriptorProperty):
def __init__(self, name, map_column=None,
descriptor=None, comparator_factory=None,
doc=None):
+ """Denote an attribute name as a synonym to a mapped property,
+ in that the attribute will mirror the value and expression behavior
+ of another attribute.
+
+ :param name: the name of the existing mapped property. This
+ can refer to the string name of any :class:`.MapperProperty`
+ configured on the class, including column-bound attributes
+ and relationships.
+
+ :param descriptor: a Python :term:`descriptor` that will be used
+ as a getter (and potentially a setter) when this attribute is
+ accessed at the instance level.
+
+ :param map_column: if ``True``, the :func:`.synonym` construct will
+ locate the existing named :class:`.MapperProperty` based on the
+ attribute name of this :func:`.synonym`, and assign it to a new
+ attribute linked to the name of this :func:`.synonym`.
+ That is, given a mapping like::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+
+ id = Column(Integer, primary_key=True)
+ job_status = Column(String(50))
+
+ job_status = synonym("_job_status", map_column=True)
+
+ The above class ``MyClass`` will now have the ``job_status``
+ :class:`.Column` object mapped to the attribute named ``_job_status``,
+ and the attribute named ``job_status`` will refer to the synonym
+ itself. This feature is typically used in conjunction with the
+ ``descriptor`` argument in order to link a user-defined descriptor
+ as a "wrapper" for an existing column.
+
+ :param comparator_factory: A subclass of :class:`.PropComparator`
+ that will provide custom comparison behavior at the SQL expression
+ level.
+
+ .. note::
+
+ For the use case of providing an attribute which redefines both
+ Python-level and SQL-expression level behavior of an attribute,
+ please refer to the Hybrid attribute introduced at
+ :ref:`mapper_hybrids` for a more effective technique.
+
+ .. seealso::
+
+ :ref:`synonyms` - examples of functionality.
+
+ :ref:`mapper_hybrids` - Hybrids provide a better approach for
+ more complicated attribute-wrapping schemes than synonyms.
+
+ """
+
self.name = name
self.map_column = map_column
self.descriptor = descriptor
@@ -462,10 +600,72 @@ class SynonymProperty(DescriptorProperty):
self.parent = parent
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
class ComparableProperty(DescriptorProperty):
"""Instruments a Python property for use in query expressions."""
def __init__(self, comparator_factory, descriptor=None, doc=None):
+ """Provides a method of applying a :class:`.PropComparator`
+ to any Python descriptor attribute.
+
+ .. versionchanged:: 0.7
+ :func:`.comparable_property` is superseded by
+ the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
+ at :ref:`hybrid_custom_comparators`.
+
+ Allows any Python descriptor to behave like a SQL-enabled
+ attribute when used at the class level in queries, allowing
+ redefinition of expression operator behavior.
+
+ In the example below we redefine :meth:`.PropComparator.operate`
+ to wrap both sides of an expression in ``func.lower()`` to produce
+ case-insensitive comparison::
+
+ from sqlalchemy.orm import comparable_property
+ from sqlalchemy.orm.interfaces import PropComparator
+ from sqlalchemy.sql import func
+ from sqlalchemy import Integer, String, Column
+ from sqlalchemy.ext.declarative import declarative_base
+
+ class CaseInsensitiveComparator(PropComparator):
+ def __clause_element__(self):
+ return self.prop
+
+ def operate(self, op, other):
+ return op(
+ func.lower(self.__clause_element__()),
+ func.lower(other)
+ )
+
+ Base = declarative_base()
+
+ class SearchWord(Base):
+ __tablename__ = 'search_word'
+ id = Column(Integer, primary_key=True)
+ word = Column(String)
+ word_insensitive = comparable_property(lambda prop, mapper:
+ CaseInsensitiveComparator(mapper.c.word, mapper)
+ )
+
+
+ A mapping like the above allows the ``word_insensitive`` attribute
+ to render an expression like::
+
+ >>> print SearchWord.word_insensitive == "Trucks"
+ lower(search_word.word) = lower(:lower_1)
+
+ :param comparator_factory:
+ A PropComparator subclass or factory that defines operator behavior
+ for this property.
+
+ :param descriptor:
+ Optional when used in a ``properties={}`` declaration. The Python
+ descriptor or property to layer comparison behavior on top of.
+
+ The like-named descriptor will be automatically retrieved from the
+ mapped class if left blank in a ``properties`` declaration.
+
+ """
self.descriptor = descriptor
self.comparator_factory = comparator_factory
self.doc = doc or (descriptor and descriptor.__doc__) or None
@@ -473,3 +673,5 @@ class ComparableProperty(DescriptorProperty):
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
+
+
diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py
index 5814b47ca..bae09d32d 100644
--- a/lib/sqlalchemy/orm/dynamic.py
+++ b/lib/sqlalchemy/orm/dynamic.py
@@ -1,5 +1,5 @@
# orm/dynamic.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -15,11 +15,12 @@ from .. import log, util, exc
from ..sql import operators
from . import (
attributes, object_session, util as orm_util, strategies,
- object_mapper, exc as orm_exc
+ object_mapper, exc as orm_exc, properties
)
from .query import Query
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.is_class_level = True
@@ -39,9 +40,6 @@ class DynaLoader(strategies.AbstractRelationshipLoader):
backref=self.parent_property.back_populates,
)
-log.class_logger(DynaLoader)
-
-
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
accepts_scalar_loader = False
@@ -78,6 +76,14 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
history = self._get_collection_history(state, passive)
return history.added_plus_unchanged
+ @util.memoized_property
+ def _append_token(self):
+ return attributes.Event(self, attributes.OP_APPEND)
+
+ @util.memoized_property
+ def _remove_token(self):
+ return attributes.Event(self, attributes.OP_REMOVE)
+
def fire_append_event(self, state, dict_, value, initiator,
collection_history=None):
if collection_history is None:
@@ -86,7 +92,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
collection_history.add_added(value)
for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
+ value = fn(state, value, initiator or self._append_token)
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), state, True)
@@ -102,7 +108,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
self.sethasparent(attributes.instance_state(value), state, False)
for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
+ fn(state, value, initiator or self._remove_token)
def _modified_event(self, state, dict_):
diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py
index 7a11cd450..e1dd96068 100644
--- a/lib/sqlalchemy/orm/evaluator.py
+++ b/lib/sqlalchemy/orm/evaluator.py
@@ -1,5 +1,5 @@
# orm/evaluator.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py
index 97019bb4e..a09154dd0 100644
--- a/lib/sqlalchemy/orm/events.py
+++ b/lib/sqlalchemy/orm/events.py
@@ -1,5 +1,5 @@
# orm/events.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,10 +8,14 @@
"""
from .. import event, exc, util
-orm = util.importlater("sqlalchemy", "orm")
+from .base import _mapper_or_none
import inspect
import weakref
-
+from . import interfaces
+from . import mapperlib, instrumentation
+from .session import Session, sessionmaker
+from .scoping import scoped_session
+from .attributes import QueryableAttribute
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
@@ -43,17 +47,20 @@ class InstrumentationEvents(event.Events):
"""
_target_class_doc = "SomeBaseClass"
+ _dispatch_target = instrumentation.InstrumentationFactory
+
@classmethod
def _accept_with(cls, target):
- # TODO: there's no coverage for this
if isinstance(target, type):
return _InstrumentationEventsHold(target)
else:
return None
@classmethod
- def _listen(cls, target, identifier, fn, propagate=True):
+ def _listen(cls, event_key, propagate=True):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
def listen(target_cls, *arg):
listen_cls = target()
@@ -63,22 +70,21 @@ class InstrumentationEvents(event.Events):
return fn(target_cls, *arg)
def remove(ref):
- event.Events._remove(orm.instrumentation._instrumentation_factory,
- identifier, listen)
+ key = event.registry._EventKey(None, identifier, listen,
+ instrumentation._instrumentation_factory)
+ getattr(instrumentation._instrumentation_factory.dispatch,
+ identifier).remove(key)
target = weakref.ref(target.class_, remove)
- event.Events._listen(orm.instrumentation._instrumentation_factory,
- identifier, listen)
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of instrumentation events "
- "not yet implemented")
+ event_key.\
+ with_dispatch_target(instrumentation._instrumentation_factory).\
+ with_wrapper(listen).base_listen()
@classmethod
def _clear(cls):
super(InstrumentationEvents, cls)._clear()
- orm.instrumentation._instrumentation_factory.dispatch._clear()
+ instrumentation._instrumentation_factory.dispatch._clear()
def class_instrument(self, cls):
"""Called after the given class is instrumented.
@@ -100,6 +106,7 @@ class InstrumentationEvents(event.Events):
"""Called when an attribute is instrumented."""
+
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
@@ -110,7 +117,6 @@ class _InstrumentationEventsHold(object):
dispatch = event.dispatcher(InstrumentationEvents)
-
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
@@ -121,21 +127,19 @@ class InstanceEvents(event.Events):
def my_load_listener(target, context):
print "on load!"
- event.listen(SomeMappedClass, 'load', my_load_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ event.listen(SomeClass, 'load', my_load_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(target, context):
- log.debug("Instance %s being loaded" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'load', some_listener)
+ .. versionchanged:: 0.8.0 instance events can be associated with
+ unmapped superclasses of mapped classes.
Instance events are closely related to mapper events, but
are more specific to the instance and its instrumentation,
@@ -154,21 +158,28 @@ class InstanceEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+
+ _dispatch_target = instrumentation.ClassManager
@classmethod
- def _accept_with(cls, target):
- if isinstance(target, orm.instrumentation.ClassManager):
+ def _new_classmanager_instance(cls, class_, classmanager):
+ _InstanceEventsHold.populate(class_, classmanager)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
+ if isinstance(target, instrumentation.ClassManager):
return target
- elif isinstance(target, orm.Mapper):
+ elif isinstance(target, mapperlib.Mapper):
return target.class_manager
elif target is orm.mapper:
- return orm.instrumentation.ClassManager
+ return instrumentation.ClassManager
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
- return orm.instrumentation.ClassManager
+ if issubclass(target, mapperlib.Mapper):
+ return instrumentation.ClassManager
else:
- manager = orm.instrumentation.manager_of_class(target)
+ manager = instrumentation.manager_of_class(target)
if manager:
return manager
else:
@@ -176,23 +187,23 @@ class InstanceEvents(event.Events):
return None
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if not raw:
orig_fn = fn
def wrap(state, *arg, **kw):
return orig_fn(state.obj(), *arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(fn)
+
+ event_key.base_listen(propagate=propagate)
- event.Events._listen(target, identifier, fn, propagate=propagate)
if propagate:
for mgr in target.subclass_managers(True):
- event.Events._listen(mgr, identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of instance events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr).base_listen(propagate=True)
@classmethod
def _clear(cls):
@@ -321,8 +332,7 @@ class InstanceEvents(event.Events):
"""
-
-class _EventsHold(object):
+class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
Establish _listen() for that class' mapper/instrumentation when
@@ -337,14 +347,20 @@ class _EventsHold(object):
cls.all_holds.clear()
class HoldEvents(object):
+ _dispatch_target = None
+
@classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
+ def _listen(cls, event_key, raw=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if target.class_ in target.all_holds:
collection = target.all_holds[target.class_]
else:
- collection = target.all_holds[target.class_] = []
+ collection = target.all_holds[target.class_] = {}
- collection.append((identifier, fn, raw, propagate))
+ event.registry._stored_in_collection(event_key, target)
+ collection[event_key._key] = (event_key, raw, propagate)
if propagate:
stack = list(target.class_.__subclasses__())
@@ -353,28 +369,37 @@ class _EventsHold(object):
stack.extend(subclass.__subclasses__())
subject = target.resolve(subclass)
if subject is not None:
- subject.dispatch._listen(subject, identifier, fn,
- raw=raw, propagate=propagate)
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=propagate)
+
+ def remove(self, event_key):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
+ collection = target.all_holds[target.class_]
+ del collection[event_key._key]
@classmethod
def populate(cls, class_, subject):
for subclass in class_.__mro__:
if subclass in cls.all_holds:
- if subclass is class_:
- collection = cls.all_holds.pop(subclass)
- else:
- collection = cls.all_holds[subclass]
- for ident, fn, raw, propagate in collection:
+ collection = cls.all_holds[subclass]
+ for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
- subject.dispatch._listen(subject, ident,
- fn, raw, propagate)
+ # since we can't be sure in what order different classes
+ # in a hierarchy are triggered with populate(),
+ # we rely upon _EventsHold for all event
+ # assignment, instead of using the generic propagate
+ # flag.
+ event_key.with_dispatch_target(subject).\
+ listen(raw=raw, propagate=False)
class _InstanceEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.instrumentation.manager_of_class(class_)
+ return instrumentation.manager_of_class(class_)
class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents):
pass
@@ -396,24 +421,22 @@ class MapperEvents(event.Events):
"select my_special_function(%d)"
% target.special_number)
- # associate the listener function with SomeMappedClass,
+ # associate the listener function with SomeClass,
# to execute during the "before_insert" hook
event.listen(
- SomeMappedClass, 'before_insert', my_before_insert_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
+ SomeClass, 'before_insert', my_before_insert_listener)
- from sqlalchemy.orm import mapper
+ Available targets include:
- def some_listener(mapper, connection, target):
- log.debug("Instance %s being inserted" % target)
+ * mapped classes
+ * unmapped superclasses of mapped or to-be-mapped classes
+ (using the ``propagate=True`` flag)
+ * :class:`.Mapper` objects
+ * the :class:`.Mapper` class itself and the :func:`.mapper`
+ function indicate listening for all mappers.
- # attach to all mappers
- event.listen(mapper, 'before_insert', some_listener)
+ .. versionchanged:: 0.8.0 mapper events can be associated with
+ unmapped superclasses of mapped classes.
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
@@ -455,17 +478,23 @@ class MapperEvents(event.Events):
"""
- _target_class_doc = "SomeMappedClass"
+ _target_class_doc = "SomeClass"
+ _dispatch_target = mapperlib.Mapper
@classmethod
- def _accept_with(cls, target):
+ def _new_mapper_instance(cls, class_, mapper):
+ _MapperEventsHold.populate(class_, mapper)
+
+ @classmethod
+ @util.dependencies("sqlalchemy.orm")
+ def _accept_with(cls, orm, target):
if target is orm.mapper:
- return orm.Mapper
+ return mapperlib.Mapper
elif isinstance(target, type):
- if issubclass(target, orm.Mapper):
+ if issubclass(target, mapperlib.Mapper):
return target
else:
- mapper = orm.util._mapper_or_none(target)
+ mapper = _mapper_or_none(target)
if mapper is not None:
return mapper
else:
@@ -474,8 +503,10 @@ class MapperEvents(event.Events):
return target
@classmethod
- def _listen(cls, target, identifier, fn,
+ def _listen(cls, event_key,
raw=False, retval=False, propagate=False):
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
if not raw or not retval:
if not raw:
@@ -494,16 +525,17 @@ class MapperEvents(event.Events):
arg[target_index] = arg[target_index].obj()
if not retval:
wrapped_fn(*arg, **kw)
- return orm.interfaces.EXT_CONTINUE
+ return interfaces.EXT_CONTINUE
else:
return wrapped_fn(*arg, **kw)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
if propagate:
for mapper in target.self_and_descendants:
- event.Events._listen(mapper, identifier, fn, propagate=True)
+ event_key.with_dispatch_target(mapper).base_listen(propagate=True)
else:
- event.Events._listen(target, identifier, fn)
+ event_key.base_listen()
@classmethod
def _clear(cls):
@@ -517,8 +549,15 @@ class MapperEvents(event.Events):
This event is the earliest phase of mapper construction.
Most attributes of the mapper are not yet initialized.
- This listener can generally only be applied to the :class:`.Mapper`
- class overall.
+ This listener can either be applied to the :class:`.Mapper`
+ class overall, or to any un-mapped class which serves as a base
+ for classes that will be mapped (using the ``propagate=True`` flag)::
+
+ Base = declarative_base()
+
+ @event.listens_for(Base, "instrument_class", propagate=True)
+ def on_new_class(mapper, cls_):
+ " ... "
:param mapper: the :class:`.Mapper` which is the target
of this event.
@@ -1048,17 +1087,11 @@ class MapperEvents(event.Events):
"""
- @classmethod
- def _remove(cls, identifier, target, fn):
- "Removal of mapper events not yet implemented"
- raise NotImplementedError(msg)
-
-
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
- return orm.util._mapper_or_none(class_)
+ return _mapper_or_none(class_)
class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents):
pass
@@ -1083,7 +1116,7 @@ class SessionEvents(event.Events):
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
- of :func:`.sessionmaker` and :func:`.scoped_session`.
+ of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
Additionally, it accepts the :class:`.Session` class which
will apply listeners to all :class:`.Session` instances
@@ -1093,38 +1126,35 @@ class SessionEvents(event.Events):
_target_class_doc = "SomeSessionOrFactory"
+ _dispatch_target = Session
+
@classmethod
def _accept_with(cls, target):
- if isinstance(target, orm.scoped_session):
+ if isinstance(target, scoped_session):
target = target.session_factory
- if not isinstance(target, orm.sessionmaker) and \
+ if not isinstance(target, sessionmaker) and \
(
not isinstance(target, type) or
- not issubclass(target, orm.Session)
+ not issubclass(target, Session)
):
raise exc.ArgumentError(
"Session event listen on a scoped_session "
"requires that its creation callable "
"is associated with the Session class.")
- if isinstance(target, orm.sessionmaker):
+ if isinstance(target, sessionmaker):
return target.class_
elif isinstance(target, type):
- if issubclass(target, orm.scoped_session):
- return orm.Session
- elif issubclass(target, orm.Session):
+ if issubclass(target, scoped_session):
+ return Session
+ elif issubclass(target, Session):
return target
- elif isinstance(target, orm.Session):
+ elif isinstance(target, Session):
return target
else:
return None
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of session events not yet implemented"
- raise NotImplementedError(msg)
-
def after_transaction_create(self, session, transaction):
"""Execute when a new :class:`.SessionTransaction` is created.
@@ -1173,7 +1203,7 @@ class SessionEvents(event.Events):
.. note::
- The :meth:`.before_commit` hook is *not* per-flush,
+ The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
For interception of these events, use the :meth:`~.SessionEvents.before_flush`,
@@ -1265,9 +1295,9 @@ class SessionEvents(event.Events):
:param session: The target :class:`.Session`.
:param previous_transaction: The :class:`.SessionTransaction`
- transactional marker object which was just closed. The current
- :class:`.SessionTransaction` for the given :class:`.Session` is
- available via the :attr:`.Session.transaction` attribute.
+ transactional marker object which was just closed. The current
+ :class:`.SessionTransaction` for the given :class:`.Session` is
+ available via the :attr:`.Session.transaction` attribute.
.. versionadded:: 0.7.3
@@ -1359,7 +1389,7 @@ class SessionEvents(event.Events):
This is called before an add, delete or merge causes
the object to be part of the session.
- .. versionadded:: 0.8. Note that :meth:`.after_attach` now
+ .. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach` now
fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
@@ -1474,7 +1504,7 @@ class AttributeEvents(event.Events):
listen(UserContact.phone, 'set', validate_phone, retval=True)
A validation function like the above can also raise an exception
- such as :class:`.ValueError` to halt the operation.
+ such as :exc:`ValueError` to halt the operation.
Several modifiers are available to the :func:`~.event.listen` function.
@@ -1503,25 +1533,32 @@ class AttributeEvents(event.Events):
"""
_target_class_doc = "SomeClass.some_attribute"
+ _dispatch_target = QueryableAttribute
+
+ @staticmethod
+ def _set_dispatch(cls, dispatch_cls):
+ event.Events._set_dispatch(cls, dispatch_cls)
+ dispatch_cls._active_history = False
@classmethod
def _accept_with(cls, target):
# TODO: coverage
- if isinstance(target, orm.interfaces.MapperProperty):
+ if isinstance(target, interfaces.MapperProperty):
return getattr(target.parent.class_, target.key)
else:
return target
@classmethod
- def _listen(cls, target, identifier, fn, active_history=False,
+ def _listen(cls, event_key, active_history=False,
raw=False, retval=False,
propagate=False):
+
+ target, identifier, fn = \
+ event_key.dispatch_target, event_key.identifier, event_key.fn
+
if active_history:
target.dispatch._active_history = True
- # TODO: for removal, need to package the identity
- # of the wrapper with the original function.
-
if not raw or not retval:
orig_fn = fn
@@ -1534,19 +1571,15 @@ class AttributeEvents(event.Events):
else:
return orig_fn(target, value, *arg)
fn = wrap
+ event_key = event_key.with_wrapper(wrap)
- event.Events._listen(target, identifier, fn, propagate)
+ event_key.base_listen(propagate=propagate)
if propagate:
- manager = orm.instrumentation.manager_of_class(target.class_)
+ manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
- event.Events._listen(mgr[target.key], identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- msg = "Removal of attribute events not yet implemented"
- raise NotImplementedError(msg)
+ event_key.with_dispatch_target(mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
@@ -1558,8 +1591,15 @@ class AttributeEvents(event.Events):
is registered with ``retval=True``, the listener
function must return this value, or a new value which
replaces it.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
@@ -1572,8 +1612,15 @@ class AttributeEvents(event.Events):
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being removed.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: No return value is defined for this event.
"""
@@ -1593,9 +1640,17 @@ class AttributeEvents(event.Events):
the previous value of the attribute will be loaded from
the database if the existing value is currently unloaded
or expired.
- :param initiator: the attribute implementation object
- which initiated this event.
+ :param initiator: An instance of :class:`.attributes.Event`
+ representing the initiation of the event. May be modified
+ from it's original value by backref handlers in order to control
+ chained event propagation.
+
+ .. versionchanged:: 0.9.0 the ``initiator`` argument is now
+ passed as a :class:`.attributes.Event` object, and may be modified
+ by backref handlers within a chain of backref-linked events.
+
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
+
diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py
index 0faa7bd29..d1ef1ded9 100644
--- a/lib/sqlalchemy/orm/exc.py
+++ b/lib/sqlalchemy/orm/exc.py
@@ -1,13 +1,11 @@
# orm/exc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc, util
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-attributes = util.importlater('sqlalchemy.orm', 'attributes')
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
@@ -65,10 +63,11 @@ class DetachedInstanceError(sa_exc.SQLAlchemyError):
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
- def __init__(self, obj, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, obj, msg=None):
if not msg:
try:
- mapper = orm_util.class_mapper(type(obj))
+ base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = ("Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance"
@@ -117,10 +116,11 @@ class ObjectDeletedError(sa_exc.InvalidRequestError):
object.
"""
- def __init__(self, state, msg=None):
+ @util.dependencies("sqlalchemy.orm.base")
+ def __init__(self, base, state, msg=None):
if not msg:
msg = "Instance '%s' has been deleted, or its "\
- "row is otherwise not present." % orm_util.state_str(state)
+ "row is otherwise not present." % base.state_str(state)
sa_exc.InvalidRequestError.__init__(self, msg)
@@ -149,10 +149,10 @@ def _safe_cls_name(cls):
cls_name = repr(cls)
return cls_name
-
-def _default_unmapped(cls):
+@util.dependencies("sqlalchemy.orm.base")
+def _default_unmapped(base, cls):
try:
- mappers = attributes.manager_of_class(cls).mappers
+ mappers = base.manager_of_class(cls).mappers
except NO_STATE:
mappers = {}
except TypeError:
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index d0234a1d3..a91085d28 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -1,5 +1,5 @@
# orm/identity.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -172,7 +172,7 @@ class WeakInstanceDict(IdentityMap):
if util.py2k:
return dict.values(self)
else:
- return list(dict.values(self))
+ return list(dict.values(self))
def discard(self, state):
st = dict.get(self, state.key, None)
diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py
index 368a6a0b1..68b4f0611 100644
--- a/lib/sqlalchemy/orm/instrumentation.py
+++ b/lib/sqlalchemy/orm/instrumentation.py
@@ -1,5 +1,5 @@
# orm/instrumentation.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -29,17 +29,15 @@ alternate instrumentation forms.
"""
-from . import exc, collections, events, interfaces
-from operator import attrgetter
-from .. import event, util
-state = util.importlater("sqlalchemy.orm", "state")
-
+from . import exc, collections, interfaces, state
+from .. import util
+from . import base
class ClassManager(dict):
"""tracks state information at the class level."""
- MANAGER_ATTR = '_sa_class_manager'
- STATE_ATTR = '_sa_instance_state'
+ MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
+ STATE_ATTR = base.DEFAULT_STATE_ATTR
deferred_scalar_loader = None
@@ -63,7 +61,8 @@ class ClassManager(dict):
for base in self._bases:
self.update(base)
- events._InstanceEventsHold.populate(class_, self)
+ self.dispatch._events._new_classmanager_instance(class_, self)
+ #events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
@@ -79,7 +78,11 @@ class ClassManager(dict):
"reference cycles. Please remove this method." %
class_)
- dispatch = event.dispatcher(events.InstanceEvents)
+ def __hash__(self):
+ return id(self)
+
+ def __eq__(self, other):
+ return other is self
@property
def is_mapped(self):
@@ -164,9 +167,7 @@ class ClassManager(dict):
@util.hybridmethod
def manager_getter(self):
- def manager_of_class(cls):
- return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
- return manager_of_class
+ return _default_manager_getter
@util.hybridmethod
def state_getter(self):
@@ -177,11 +178,12 @@ class ClassManager(dict):
instance.
"""
- return attrgetter(self.STATE_ATTR)
+ return _default_state_getter
@util.hybridmethod
def dict_getter(self):
- return attrgetter('__dict__')
+ return _default_dict_getter
+
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
@@ -296,6 +298,9 @@ class ClassManager(dict):
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
+ def _serialize(self, state, state_dict):
+ return _SerializeManager(state, state_dict)
+
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
@@ -335,12 +340,41 @@ class ClassManager(dict):
return '<%s of %r at %x>' % (
self.__class__.__name__, self.class_, id(self))
+class _SerializeManager(object):
+ """Provide serialization of a :class:`.ClassManager`.
+
+ The :class:`.InstanceState` uses ``__init__()`` on serialize
+ and ``__call__()`` on deserialize.
+
+ """
+ def __init__(self, state, d):
+ self.class_ = state.class_
+ manager = state.manager
+ manager.dispatch.pickle(state, d)
+
+ def __call__(self, state, inst, state_dict):
+ state.manager = manager = manager_of_class(self.class_)
+ if manager is None:
+ raise exc.UnmappedInstanceError(
+ inst,
+ "Cannot deserialize object of type %r - "
+ "no mapper() has "
+ "been configured for this class within the current "
+ "Python process!" %
+ self.class_)
+ elif manager.is_mapped and not manager.mapper.configured:
+ manager.mapper._configure_all()
+
+ # setup _sa_instance_state ahead of time so that
+ # unpickle events can access the object normally.
+ # see [ticket:2362]
+ if inst is not None:
+ manager.setup_instance(inst, state)
+ manager.dispatch.unpickle(state, state_dict)
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
- dispatch = event.dispatcher(events.InstrumentationEvents)
-
def create_manager_for_cls(self, class_):
assert class_ is not None
assert manager_of_class(class_) is None
@@ -380,6 +414,14 @@ class InstrumentationFactory(object):
# when importred.
_instrumentation_factory = InstrumentationFactory()
+# these attributes are replaced by sqlalchemy.ext.instrumentation
+# when a non-standard InstrumentationManager class is first
+# used to instrument a class.
+instance_state = _default_state_getter = base.instance_state
+
+instance_dict = _default_dict_getter = base.instance_dict
+
+manager_of_class = _default_manager_getter = base.manager_of_class
def register_class(class_):
"""Register class instrumentation.
@@ -411,15 +453,6 @@ def is_instrumented(instance, key):
return manager_of_class(instance.__class__).\
is_instrumented(key, search=True)
-# these attributes are replaced by sqlalchemy.ext.instrumentation
-# when a non-standard InstrumentationManager class is first
-# used to instrument a class.
-instance_state = _default_state_getter = ClassManager.state_getter()
-
-instance_dict = _default_dict_getter = ClassManager.dict_getter()
-
-manager_of_class = _default_manager_getter = ClassManager.manager_getter()
-
def _generate_init(class_, class_manager):
"""Build an __init__ decorator that triggers ClassManager events."""
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 150277be2..3d5559be9 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -1,5 +1,5 @@
# orm/interfaces.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -21,9 +21,11 @@ from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
+from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
+from .base import _InspectionAttr, _MappedAttribute
+from .path_registry import PathRegistry
+import collections
-orm_util = util.importlater('sqlalchemy.orm', 'util')
-collections = util.importlater('sqlalchemy.orm', 'collections')
__all__ = (
'AttributeExtension',
@@ -42,97 +44,6 @@ __all__ = (
'StrategizedProperty',
)
-EXT_CONTINUE = util.symbol('EXT_CONTINUE')
-EXT_STOP = util.symbol('EXT_STOP')
-
-ONETOMANY = util.symbol('ONETOMANY')
-MANYTOONE = util.symbol('MANYTOONE')
-MANYTOMANY = util.symbol('MANYTOMANY')
-
-from .deprecated_interfaces import AttributeExtension, \
- SessionExtension, \
- MapperExtension
-
-
-NOT_EXTENSION = util.symbol('NOT_EXTENSION')
-"""Symbol indicating an :class:`_InspectionAttr` that's
- not part of sqlalchemy.ext.
-
- Is assigned to the :attr:`._InspectionAttr.extension_type`
- attibute.
-
-"""
-
-class _InspectionAttr(object):
- """A base class applied to all ORM objects that can be returned
- by the :func:`.inspect` function.
-
- The attributes defined here allow the usage of simple boolean
- checks to test basic facts about the object returned.
-
- While the boolean checks here are basically the same as using
- the Python isinstance() function, the flags here can be used without
- the need to import all of these classes, and also such that
- the SQLAlchemy class system can change while leaving the flags
- here intact for forwards-compatibility.
-
- """
-
- is_selectable = False
- """Return True if this object is an instance of :class:`.Selectable`."""
-
- is_aliased_class = False
- """True if this object is an instance of :class:`.AliasedClass`."""
-
- is_instance = False
- """True if this object is an instance of :class:`.InstanceState`."""
-
- is_mapper = False
- """True if this object is an instance of :class:`.Mapper`."""
-
- is_property = False
- """True if this object is an instance of :class:`.MapperProperty`."""
-
- is_attribute = False
- """True if this object is a Python :term:`descriptor`.
-
- This can refer to one of many types. Usually a
- :class:`.QueryableAttribute` which handles attributes events on behalf
- of a :class:`.MapperProperty`. But can also be an extension type
- such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
- The :attr:`._InspectionAttr.extension_type` will refer to a constant
- identifying the specific subtype.
-
- .. seealso::
-
- :attr:`.Mapper.all_orm_descriptors`
-
- """
-
- is_clause_element = False
- """True if this object is an instance of :class:`.ClauseElement`."""
-
- extension_type = NOT_EXTENSION
- """The extension type, if any.
- Defaults to :data:`.interfaces.NOT_EXTENSION`
-
- .. versionadded:: 0.8.0
-
- .. seealso::
-
- :data:`.HYBRID_METHOD`
-
- :data:`.HYBRID_PROPERTY`
-
- :data:`.ASSOCIATION_PROXY`
-
- """
-
-class _MappedAttribute(object):
- """Mixin for attributes which should be replaced by mapper-assigned
- attributes.
-
- """
class MapperProperty(_MappedAttribute, _InspectionAttr):
@@ -235,7 +146,26 @@ class MapperProperty(_MappedAttribute, _InspectionAttr):
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
- MapperProperty."""
+ :class:`.MapperProperty`.
+
+ This is basically a ``getattr()`` call::
+
+ return getattr(self.parent.class_, self.key)
+
+ I.e. if this :class:`.MapperProperty` were named ``addresses``,
+ and the class to which it is mapped is ``User``, this sequence
+ is possible::
+
+ >>> from sqlalchemy import inspect
+ >>> mapper = inspect(User)
+ >>> addresses_property = mapper.attrs.addresses
+ >>> addresses_property.class_attribute is User.addresses
+ True
+ >>> User.addresses.property is addresses_property
+ True
+
+
+ """
return getattr(self.parent.class_, self.key)
@@ -389,6 +319,9 @@ class PropComparator(operators.ColumnOperators):
def __clause_element__(self):
raise NotImplementedError("%r" % self)
+ def _query_clause_element(self):
+ return self.__clause_element__()
+
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
@@ -490,51 +423,57 @@ class StrategizedProperty(MapperProperty):
strategy_wildcard_key = None
- @util.memoized_property
- def _wildcard_path(self):
- if self.strategy_wildcard_key:
- return ('loaderstrategy', (self.strategy_wildcard_key,))
- else:
- return None
+ def _get_context_loader(self, context, path):
+ load = None
- def _get_context_strategy(self, context, path):
- strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
+ # use EntityRegistry.__getitem__()->PropRegistry here so
+ # that the path is stated in terms of our base
+ search_path = dict.__getitem__(path, self)
- if not strategy_cls:
- wc_key = self._wildcard_path
- if wc_key and wc_key in context.attributes:
- strategy_cls = context.attributes[wc_key]
+ # search among: exact match, "attr.*", "default" strategy
+ # if any.
+ for path_key in (
+ search_path._loader_key,
+ search_path._wildcard_path_loader_key,
+ search_path._default_path_loader_key
+ ):
+ if path_key in context.attributes:
+ load = context.attributes[path_key]
+ break
- if strategy_cls:
- try:
- return self._strategies[strategy_cls]
- except KeyError:
- return self.__init_strategy(strategy_cls)
- return self.strategy
+ return load
- def _get_strategy(self, cls):
+ def _get_strategy(self, key):
try:
- return self._strategies[cls]
+ return self._strategies[key]
except KeyError:
- return self.__init_strategy(cls)
+ cls = self._strategy_lookup(*key)
+ self._strategies[key] = self._strategies[cls] = strategy = cls(self)
+ return strategy
- def __init_strategy(self, cls):
- self._strategies[cls] = strategy = cls(self)
- return strategy
+ def _get_strategy_by_cls(self, cls):
+ return self._get_strategy(cls._strategy_keys[0])
def setup(self, context, entity, path, adapter, **kwargs):
- self._get_context_strategy(context, path).\
- setup_query(context, entity, path,
- adapter, **kwargs)
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ strat.setup_query(context, entity, path, loader, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
- return self._get_context_strategy(context, path).\
- create_row_processor(context, path,
+ loader = self._get_context_loader(context, path)
+ if loader and loader.strategy:
+ strat = self._get_strategy(loader.strategy)
+ else:
+ strat = self.strategy
+ return strat.create_row_processor(context, path, loader,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
- self.strategy = self.__init_strategy(self.strategy_class)
+ self.strategy = self._get_strategy_by_cls(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
@@ -542,6 +481,30 @@ class StrategizedProperty(MapperProperty):
self.strategy.init_class_attribute(mapper)
+ _strategies = collections.defaultdict(dict)
+
+ @classmethod
+ def strategy_for(cls, **kw):
+ def decorate(dec_cls):
+ dec_cls._strategy_keys = []
+ key = tuple(sorted(kw.items()))
+ cls._strategies[cls][key] = dec_cls
+ dec_cls._strategy_keys.append(key)
+ return dec_cls
+ return decorate
+
+ @classmethod
+ def _strategy_lookup(cls, *key):
+ for prop_cls in cls.__mro__:
+ if prop_cls in cls._strategies:
+ strategies = cls._strategies[prop_cls]
+ try:
+ return strategies[key]
+ except KeyError:
+ pass
+ raise Exception("can't locate strategy for %s %s" % (cls, key))
+
+
class MapperOption(object):
"""Describe a modification to a Query."""
@@ -563,241 +526,6 @@ class MapperOption(object):
self.process_query(query)
-class PropertyOption(MapperOption):
- """A MapperOption that is applied to a property off the mapper or
- one of its child mappers, identified by a dot-separated key
- or list of class-bound attributes. """
-
- def __init__(self, key, mapper=None):
- self.key = key
- self.mapper = mapper
-
- def process_query(self, query):
- self._process(query, True)
-
- def process_query_conditionally(self, query):
- self._process(query, False)
-
- def _process(self, query, raiseerr):
- paths = self._process_paths(query, raiseerr)
- if paths:
- self.process_query_property(query, paths)
-
- def process_query_property(self, query, paths):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['key'] = ret = []
- for token in util.to_list(self.key):
- if isinstance(token, PropComparator):
- ret.append((token._parentmapper.class_, token.key))
- else:
- ret.append(token)
- return d
-
- def __setstate__(self, state):
- ret = []
- for key in state['key']:
- if isinstance(key, tuple):
- cls, propkey = key
- ret.append(getattr(cls, propkey))
- else:
- ret.append(key)
- state['key'] = tuple(ret)
- self.__dict__ = state
-
- def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
- if orm_util._is_aliased_class(mapper):
- searchfor = mapper
- else:
- searchfor = orm_util._class_to_mapper(mapper)
- for ent in query._mapper_entities:
- if ent.corresponds_to(searchfor):
- return ent
- else:
- if raiseerr:
- if not list(query._mapper_entities):
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- raise sa_exc.ArgumentError(
- "Can't find property '%s' on any entity "
- "specified in this Query. Note the full path "
- "from root (%s) to target entity must be specified."
- % (token, ",".join(str(x) for
- x in query._mapper_entities))
- )
- else:
- return None
-
- def _find_entity_basestring(self, query, token, raiseerr):
- for ent in query._mapper_entities:
- # return only the first _MapperEntity when searching
- # based on string prop name. Ideally object
- # attributes are used to specify more exactly.
- return ent
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- return None
-
- def _process_paths(self, query, raiseerr):
- """reconcile the 'key' for this PropertyOption with
- the current path and entities of the query.
-
- Return a list of affected paths.
-
- """
- path = orm_util.PathRegistry.root
- entity = None
- paths = []
- no_result = []
-
- # _current_path implies we're in a
- # secondary load with an existing path
- current_path = list(query._current_path.path)
-
- tokens = deque(self.key)
- while tokens:
- token = tokens.popleft()
- if isinstance(token, str):
- # wildcard token
- if token.endswith(':*'):
- return [path.token(token)]
- sub_tokens = token.split(".", 1)
- token = sub_tokens[0]
- tokens.extendleft(sub_tokens[1:])
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[1].key == token:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_basestring(
- query,
- token,
- raiseerr)
- if entity is None:
- return no_result
- path_element = entity.entity_zero
- mapper = entity.mapper
-
- if hasattr(mapper.class_, token):
- prop = getattr(mapper.class_, token).property
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Can't find property named '%s' on the "
- "mapped entity %s in this Query. " % (
- token, mapper)
- )
- else:
- return no_result
- elif isinstance(token, PropComparator):
- prop = token.property
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[0:2] == \
- [token._parententity, prop]:
- current_path = current_path[2:]
- continue
- else:
- return no_result
-
- if not entity:
- entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token._parententity,
- raiseerr)
- if not entity:
- return no_result
-
- path_element = entity.entity_zero
- mapper = entity.mapper
- else:
- raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
- assert prop is not None
- if raiseerr and not prop.parent.common_parent(mapper):
- raise sa_exc.ArgumentError("Attribute '%s' does not "
- "link from element '%s'" % (token, path_element))
-
- path = path[path_element][prop]
-
- paths.append(path)
-
- if getattr(token, '_of_type', None):
- ac = token._of_type
- ext_info = inspect(ac)
- path_element = mapper = ext_info.mapper
- if not ext_info.is_aliased_class:
- ac = orm_util.with_polymorphic(
- ext_info.mapper.base_mapper,
- ext_info.mapper, aliased=True,
- _use_mapper_path=True)
- ext_info = inspect(ac)
- path.set(query._attributes, "path_with_polymorphic", ext_info)
- else:
- path_element = mapper = getattr(prop, 'mapper', None)
- if mapper is None and tokens:
- raise sa_exc.ArgumentError(
- "Attribute '%s' of entity '%s' does not "
- "refer to a mapped entity" %
- (token, entity)
- )
-
- if current_path:
- # ran out of tokens before
- # current_path was exhausted.
- assert not tokens
- return no_result
-
- return paths
-
-
-class StrategizedOption(PropertyOption):
- """A MapperOption that affects which LoaderStrategy will be used
- for an operation by a StrategizedProperty.
- """
-
- chained = False
-
- def process_query_property(self, query, paths):
- strategy = self.get_strategy_class()
- if self.chained:
- for path in paths:
- path.set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
- else:
- paths[-1].set(
- query._attributes,
- "loaderstrategy",
- strategy
- )
-
- def get_strategy_class(self):
- raise NotImplementedError()
class LoaderStrategy(object):
@@ -832,10 +560,10 @@ class LoaderStrategy(object):
def init_class_attribute(self, mapper):
pass
- def setup_query(self, context, entity, path, adapter, **kwargs):
+ def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
pass
- def create_row_processor(self, context, path, mapper,
+ def create_row_processor(self, context, path, loadopt, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py
index 1641f509e..af77fe3e0 100644
--- a/lib/sqlalchemy/orm/loading.py
+++ b/lib/sqlalchemy/orm/loading.py
@@ -1,5 +1,5 @@
# orm/loading.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -19,7 +19,6 @@ from .interfaces import EXT_CONTINUE
from ..sql import util as sql_util
from .util import _none_set, state_str
from .. import exc as sa_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
_new_runid = util.counter()
@@ -34,7 +33,8 @@ def instances(query, cursor, context):
for ent in query._entities]
filtered = id in filter_fns
- single_entity = filtered and len(query._entities) == 1
+ single_entity = len(query._entities) == 1 and \
+ query._entities[0].supports_single_entity
if filtered:
if single_entity:
@@ -44,7 +44,7 @@ def instances(query, cursor, context):
return tuple(fn(x) for x, fn in zip(row, filter_fns))
custom_rows = single_entity and \
- query._entities[0].mapper.dispatch.append_result
+ query._entities[0].custom_rows
(process, labels) = \
list(zip(*[
@@ -98,11 +98,10 @@ def instances(query, cursor, context):
break
-def merge_result(query, iterator, load=True):
+@util.dependencies("sqlalchemy.orm.query")
+def merge_result(querylib, query, iterator, load=True):
"""Merge a result into this :class:`.Query` object's Session."""
- from . import query as querylib
-
session = query.session
if load:
# flush current contents if we expect to load data
@@ -175,8 +174,6 @@ def load_on_ident(query, key,
only_load_props=None):
"""Load the given identity key from the database."""
- lockmode = lockmode or query._lockmode
-
if key is not None:
ident = key[1]
else:
@@ -214,10 +211,17 @@ def load_on_ident(query, key,
q._params = params
if lockmode is not None:
- q._lockmode = lockmode
+ version_check = True
+ q = q.with_lockmode(lockmode)
+ elif query._for_update_arg is not None:
+ version_check = True
+ q._for_update_arg = query._for_update_arg
+ else:
+ version_check = False
+
q._get_options(
populate_existing=bool(refresh_state),
- version_check=(lockmode is not None),
+ version_check=version_check,
only_load_props=only_load_props,
refresh_state=refresh_state)
q._order_by = None
@@ -547,7 +551,7 @@ def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
#assert mapper is _state_mapper(state)
- session = sessionlib._state_session(state)
+ session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
"Instance %s is not bound to a Session; "
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index 5929aea6c..fc75a0cb5 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1,5 +1,5 @@
# orm/mapper.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -22,26 +22,18 @@ from collections import deque
from .. import sql, util, log, exc as sa_exc, event, schema, inspection
from ..sql import expression, visitors, operators, util as sql_util
-from . import instrumentation, attributes, \
- exc as orm_exc, events, loading, dependency
+from . import instrumentation, attributes, exc as orm_exc, loading, dependency
+from . import properties
from .interfaces import MapperProperty, _InspectionAttr, _MappedAttribute
-from .util import _INSTRUMENTOR, _class_to_mapper, \
- _state_mapper, class_mapper, \
- PathRegistry, state_str
+from .base import _class_to_mapper, _state_mapper, class_mapper, \
+ state_str, _INSTRUMENTOR
+from .path_registry import PathRegistry
+
import sys
-properties = util.importlater("sqlalchemy.orm", "properties")
-descriptor_props = util.importlater("sqlalchemy.orm", "descriptor_props")
-__all__ = (
- 'Mapper',
- '_mapper_registry',
- 'class_mapper',
- 'object_mapper',
- )
_mapper_registry = weakref.WeakKeyDictionary()
-_new_mappers = False
_already_compiling = False
_memoized_configured_property = util.group_expirable_memoized_property()
@@ -56,6 +48,8 @@ NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
_CONFIGURE_MUTEX = util.threading.RLock()
+@inspection._self_inspects
+@log.class_logger
class Mapper(_InspectionAttr):
"""Define the correlation of class attributes to database table
columns.
@@ -88,9 +82,12 @@ class Mapper(_InspectionAttr):
"""
+
+ _new_mappers = False
+
def __init__(self,
class_,
- local_table,
+ local_table=None,
properties=None,
primary_key=None,
non_primary=False,
@@ -118,10 +115,380 @@ class Mapper(_InspectionAttr):
legacy_is_orphan=False,
_compiled_cache_size=100,
):
- """Construct a new mapper.
+ """Return a new :class:`~.Mapper` object.
+
+ This function is typically used behind the scenes
+ via the Declarative extension. When using Declarative,
+ many of the usual :func:`.mapper` arguments are handled
+ by the Declarative extension itself, including ``class_``,
+ ``local_table``, ``properties``, and ``inherits``.
+ Other options are passed to :func:`.mapper` using
+ the ``__mapper_args__`` class variable::
+
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ id = Column(Integer, primary_key=True)
+ type = Column(String(50))
+ alt = Column("some_alt", Integer)
+
+ __mapper_args__ = {
+ 'polymorphic_on' : type
+ }
+
+
+ Explicit use of :func:`.mapper`
+ is often referred to as *classical mapping*. The above
+ declarative example is equivalent in classical form to::
+
+ my_table = Table("my_table", metadata,
+ Column('id', Integer, primary_key=True),
+ Column('type', String(50)),
+ Column("some_alt", Integer)
+ )
+
+ class MyClass(object):
+ pass
+
+ mapper(MyClass, my_table,
+ polymorphic_on=my_table.c.type,
+ properties={
+ 'alt':my_table.c.some_alt
+ })
+
+ .. seealso::
+
+ :ref:`classical_mapping` - discussion of direct usage of
+ :func:`.mapper`
+
+ :param class\_: The class to be mapped. When using Declarative,
+ this argument is automatically passed as the declared class
+ itself.
+
+ :param local_table: The :class:`.Table` or other selectable
+ to which the class is mapped. May be ``None`` if
+ this mapper inherits from another mapper using single-table
+ inheritance. When using Declarative, this argument is
+ automatically passed by the extension, based on what
+ is configured via the ``__table__`` argument or via the
+ :class:`.Table` produced as a result of the ``__tablename__``
+ and :class:`.Column` arguments present.
+
+ :param always_refresh: If True, all query operations for this mapped
+ class will overwrite all data within object instances that already
+ exist within the session, erasing any in-memory changes with
+ whatever information was loaded from the database. Usage of this
+ flag is highly discouraged; as an alternative, see the method
+ :meth:`.Query.populate_existing`.
+
+ :param allow_partial_pks: Defaults to True. Indicates that a
+ composite primary key with some NULL values should be considered as
+ possibly existing within the database. This affects whether a
+ mapper will assign an incoming row to an existing identity, as well
+ as if :meth:`.Session.merge` will check the database first for a
+ particular primary key value. A "partial primary key" can occur if
+ one has mapped to an OUTER JOIN, for example.
+
+ :param batch: Defaults to ``True``, indicating that save operations
+ of multiple entities can be batched together for efficiency.
+ Setting to False indicates
+ that an instance will be fully saved before saving the next
+ instance. This is used in the extremely rare case that a
+ :class:`.MapperEvents` listener requires being called
+ in between individual row persistence operations.
+
+ :param column_prefix: A string which will be prepended
+ to the mapped attribute name when :class:`.Column`
+ objects are automatically assigned as attributes to the
+ mapped class. Does not affect explicitly specified
+ column-based properties.
+
+ See the section :ref:`column_prefix` for an example.
+
+ :param concrete: If True, indicates this mapper should use concrete
+ table inheritance with its parent mapper.
+
+ See the section :ref:`concrete_inheritance` for an example.
+
+ :param eager_defaults: if True, the ORM will immediately fetch the
+ value of server-generated default values after an INSERT or UPDATE,
+ rather than leaving them as expired to be fetched on next access.
+ This can be used for event schemes where the server-generated values
+ are needed immediately before the flush completes. By default,
+ this scheme will emit an individual ``SELECT`` statement per row
+ inserted or updated, which note can add significant performance
+ overhead. However, if the
+ target database supports :term:`RETURNING`, the default values will be
+ returned inline with the INSERT or UPDATE statement, which can
+ greatly enhance performance for an application that needs frequent
+ access to just-generated server defaults.
+
+ .. versionchanged:: 0.9.0 The ``eager_defaults`` option can now
+ make use of :term:`RETURNING` for backends which support it.
+
+ :param exclude_properties: A list or set of string column names to
+ be excluded from mapping.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param extension: A :class:`.MapperExtension` instance or
+ list of :class:`.MapperExtension` instances which will be applied
+ to all operations by this :class:`.Mapper`. **Deprecated.**
+ Please see :class:`.MapperEvents`.
+
+ :param include_properties: An inclusive list or set of string column
+ names to map.
+
+ See :ref:`include_exclude_cols` for an example.
+
+ :param inherits: A mapped class or the corresponding :class:`.Mapper`
+ of one indicating a superclass to which this :class:`.Mapper`
+ should *inherit* from. The mapped class here must be a subclass
+ of the other mapper's class. When using Declarative, this argument
+ is passed automatically as a result of the natural class
+ hierarchy of the declared classes.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param inherit_condition: For joined table inheritance, a SQL
+ expression which will
+ define how the two tables are joined; defaults to a natural join
+ between the two tables.
+
+ :param inherit_foreign_keys: When ``inherit_condition`` is used and the
+ columns present are missing a :class:`.ForeignKey` configuration,
+ this parameter can be used to specify which columns are "foreign".
+ In most cases can be left as ``None``.
+
+ :param legacy_is_orphan: Boolean, defaults to ``False``.
+ When ``True``, specifies that "legacy" orphan consideration
+ is to be applied to objects mapped by this mapper, which means
+ that a pending (that is, not persistent) object is auto-expunged
+ from an owning :class:`.Session` only when it is de-associated
+ from *all* parents that specify a ``delete-orphan`` cascade towards
+ this mapper. The new default behavior is that the object is auto-expunged
+ when it is de-associated with *any* of its parents that specify
+ ``delete-orphan`` cascade. This behavior is more consistent with
+ that of a persistent object, and allows behavior to be consistent
+ in more scenarios independently of whether or not an orphanable
+ object has been flushed yet or not.
+
+ See the change note and example at :ref:`legacy_is_orphan_addition`
+ for more detail on this change.
+
+ .. versionadded:: 0.8 - the consideration of a pending object as
+ an "orphan" has been modified to more closely match the
+ behavior as that of persistent objects, which is that the object
+ is expunged from the :class:`.Session` as soon as it is
+ de-associated from any of its orphan-enabled parents. Previously,
+ the pending object would be expunged only if de-associated
+ from all of its orphan-enabled parents. The new flag ``legacy_is_orphan``
+ is added to :func:`.orm.mapper` which re-establishes the
+ legacy behavior.
+
+ :param non_primary: Specify that this :class:`.Mapper` is in addition
+ to the "primary" mapper, that is, the one used for persistence.
+ The :class:`.Mapper` created here may be used for ad-hoc
+ mapping of the class to an alternate selectable, for loading
+ only.
+
+ The ``non_primary`` feature is rarely needed with modern
+ usage.
+
+ :param order_by: A single :class:`.Column` or list of :class:`.Column`
+ objects for which selection operations should use as the default
+ ordering for entities. By default mappers have no pre-defined
+ ordering.
+
+ :param passive_updates: Indicates UPDATE behavior of foreign key
+ columns when a primary key column changes on a joined-table
+ inheritance mapping. Defaults to ``True``.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will handle
+ propagation of an UPDATE from a source column to dependent columns
+ on joined-table rows.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The :class:`.Mapper` here will
+ emit an UPDATE statement for the dependent columns during a
+ primary key change.
+
+ ..seealso::
+
+ :ref:`passive_updates` - description of a similar feature as
+ used with :func:`.relationship`
+
+ :param polymorphic_on: Specifies the column, attribute, or
+ SQL expression used to determine the target class for an
+ incoming row, when inheriting classes are present.
+
+ This value is commonly a :class:`.Column` object that's
+ present in the mapped :class:`.Table`::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":discriminator,
+ "polymorphic_identity":"employee"
+ }
+
+ It may also be specified
+ as a SQL expression, as in this example where we
+ use the :func:`.case` construct to provide a conditional
+ approach::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+
+ __mapper_args__ = {
+ "polymorphic_on":case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee"),
+ "polymorphic_identity":"employee"
+ }
+
+ It may also refer to any attribute
+ configured with :func:`.column_property`, or to the
+ string name of one::
+
+ class Employee(Base):
+ __tablename__ = 'employee'
+
+ id = Column(Integer, primary_key=True)
+ discriminator = Column(String(50))
+ employee_type = column_property(
+ case([
+ (discriminator == "EN", "engineer"),
+ (discriminator == "MA", "manager"),
+ ], else_="employee")
+ )
- Mappers are normally constructed via the
- :func:`~sqlalchemy.orm.mapper` function. See for details.
+ __mapper_args__ = {
+ "polymorphic_on":employee_type,
+ "polymorphic_identity":"employee"
+ }
+
+ .. versionchanged:: 0.7.4
+ ``polymorphic_on`` may be specified as a SQL expression,
+ or refer to any attribute configured with
+ :func:`.column_property`, or to the string name of one.
+
+ When setting ``polymorphic_on`` to reference an
+ attribute or expression that's not present in the
+ locally mapped :class:`.Table`, yet the value
+ of the discriminator should be persisted to the database,
+ the value of the
+ discriminator is not automatically set on new
+ instances; this must be handled by the user,
+ either through manual means or via event listeners.
+ A typical approach to establishing such a listener
+ looks like::
+
+ from sqlalchemy import event
+ from sqlalchemy.orm import object_mapper
+
+ @event.listens_for(Employee, "init", propagate=True)
+ def set_identity(instance, *arg, **kw):
+ mapper = object_mapper(instance)
+ instance.discriminator = mapper.polymorphic_identity
+
+ Where above, we assign the value of ``polymorphic_identity``
+ for the mapped class to the ``discriminator`` attribute,
+ thus persisting the value to the ``discriminator`` column
+ in the database.
+
+ .. seealso::
+
+ :ref:`inheritance_toplevel`
+
+ :param polymorphic_identity: Specifies the value which
+ identifies this particular class as returned by the
+ column expression referred to by the ``polymorphic_on``
+ setting. As rows are received, the value corresponding
+ to the ``polymorphic_on`` column expression is compared
+ to this value, indicating which subclass should
+ be used for the newly reconstructed object.
+
+ :param properties: A dictionary mapping the string names of object
+ attributes to :class:`.MapperProperty` instances, which define the
+ persistence behavior of that attribute. Note that :class:`.Column`
+ objects present in
+ the mapped :class:`.Table` are automatically placed into
+ ``ColumnProperty`` instances upon mapping, unless overridden.
+ When using Declarative, this argument is passed automatically,
+ based on all those :class:`.MapperProperty` instances declared
+ in the declared class body.
+
+ :param primary_key: A list of :class:`.Column` objects which define the
+ primary key to be used against this mapper's selectable unit.
+ This is normally simply the primary key of the ``local_table``, but
+ can be overridden here.
+
+ :param version_id_col: A :class:`.Column`
+ that will be used to keep a running version id of rows
+ in the table. This is used to detect concurrent updates or
+ the presence of stale data in a flush. The methodology is to
+ detect if an UPDATE statement does not match the last known
+ version id, a
+ :class:`~sqlalchemy.orm.exc.StaleDataError` exception is
+ thrown.
+ By default, the column must be of :class:`.Integer` type,
+ unless ``version_id_generator`` specifies an alternative version
+ generator.
+
+ .. seealso::
+
+ :ref:`mapper_version_counter` - discussion of version counting
+ and rationale.
+
+ :param version_id_generator: Define how new version ids should
+ be generated. Defaults to ``None``, which indicates that
+ a simple integer counting scheme be employed. To provide a custom
+ versioning scheme, provide a callable function of the form::
+
+ def generate_version(version):
+ return next_version
+
+ Alternatively, server-side versioning functions such as triggers,
+ or programmatic versioning schemes outside of the version id generator
+ may be used, by specifying the value ``False``.
+ Please see :ref:`server_side_version_counter` for a discussion
+ of important points when using this option.
+
+ .. versionadded:: 0.9.0 ``version_id_generator`` supports server-side
+ version number generation.
+
+ .. seealso::
+
+ :ref:`custom_version_counter`
+
+ :ref:`server_side_version_counter`
+
+
+ :param with_polymorphic: A tuple in the form ``(<classes>,
+ <selectable>)`` indicating the default style of "polymorphic"
+ loading, that is, which tables are queried at once. <classes> is
+ any single or list of mappers and/or classes indicating the
+ inherited classes that should be loaded at once. The special value
+ ``'*'`` may be used to indicate all descending classes should be
+ loaded immediately. The second tuple argument <selectable>
+ indicates a selectable that will be used to query for multiple
+ classes.
+
+ .. seealso::
+
+ :ref:`with_polymorphic` - discussion of polymorphic querying techniques.
"""
@@ -138,9 +505,19 @@ class Mapper(_InspectionAttr):
self.order_by = order_by
self.always_refresh = always_refresh
- self.version_id_col = version_id_col
- self.version_id_generator = version_id_generator or \
- (lambda x: (x or 0) + 1)
+
+ if isinstance(version_id_col, MapperProperty):
+ self.version_id_prop = version_id_col
+ self.version_id_col = None
+ else:
+ self.version_id_col = version_id_col
+ if version_id_generator is False:
+ self.version_id_generator = False
+ elif version_id_generator is None:
+ self.version_id_generator = lambda x: (x or 0) + 1
+ else:
+ self.version_id_generator = version_id_generator
+
self.concrete = concrete
self.single = False
self.inherits = inherits
@@ -218,7 +595,7 @@ class Mapper(_InspectionAttr):
# configure_mappers() until construction succeeds)
_CONFIGURE_MUTEX.acquire()
try:
- events._MapperEventsHold.populate(class_, self)
+ self.dispatch._events._new_mapper_instance(class_, self)
self._configure_inheritance()
self._configure_legacy_instrument_class()
self._configure_class_instrumentation()
@@ -226,8 +603,7 @@ class Mapper(_InspectionAttr):
self._configure_properties()
self._configure_polymorphic_setter()
self._configure_pks()
- global _new_mappers
- _new_mappers = True
+ Mapper._new_mappers = True
self._log("constructed")
self._expire_memoizations()
finally:
@@ -252,7 +628,7 @@ class Mapper(_InspectionAttr):
def entity(self):
"""Part of the inspection API.
- Returns self.class_.
+ Returns self.class\_.
"""
return self.class_
@@ -272,7 +648,9 @@ class Mapper(_InspectionAttr):
this :class:`.Mapper` represents. If this mapper is a
single-table inheriting mapper, local_table will be ``None``.
- See also :attr:`~.Mapper.mapped_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.mapped_table`.
"""
@@ -290,7 +668,9 @@ class Mapper(_InspectionAttr):
subclass. For single-table inheritance mappers, mapped_table
references the base table.
- See also :attr:`~.Mapper.local_table`.
+ .. seealso::
+
+ :attr:`~.Mapper.local_table`.
"""
@@ -309,7 +689,9 @@ class Mapper(_InspectionAttr):
This is a *read only* attribute determined during mapper construction.
Behavior is undefined if directly modified.
- See also :func:`.configure_mappers`.
+ .. seealso::
+
+ :func:`.configure_mappers`.
"""
@@ -478,8 +860,6 @@ class Mapper(_InspectionAttr):
c = None
"""A synonym for :attr:`~.Mapper.columns`."""
- dispatch = event.dispatcher(events.MapperEvents)
-
@util.memoized_property
def _path_registry(self):
return PathRegistry.per_mapper(self)
@@ -489,7 +869,7 @@ class Mapper(_InspectionAttr):
being present."""
# a set of all mappers which inherit from this one.
- self._inheriting_mappers = util.WeakSet()
+ self._inheriting_mappers = util.WeakSequence()
if self.inherits:
if isinstance(self.inherits, type):
@@ -563,7 +943,7 @@ class Mapper(_InspectionAttr):
self.polymorphic_map = self.inherits.polymorphic_map
self.batch = self.inherits.batch
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.base_mapper = self.inherits.base_mapper
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
@@ -630,7 +1010,7 @@ class Mapper(_InspectionAttr):
self.batch = self.inherits.batch
for mp in self.self_and_descendants:
mp.base_mapper = self.inherits.base_mapper
- self.inherits._inheriting_mappers.add(self)
+ self.inherits._inheriting_mappers.append(self)
self.passive_updates = self.inherits.passive_updates
self._all_tables = self.inherits._all_tables
for key, prop in mapper._props.items():
@@ -735,30 +1115,20 @@ class Mapper(_InspectionAttr):
self._reconstructor = method
event.listen(manager, 'load', _event_on_load, raw=True)
elif hasattr(method, '__sa_validators__'):
- include_removes = getattr(method,
- "__sa_include_removes__", False)
+ validation_opts = method.__sa_validation_opts__
for name in method.__sa_validators__:
self.validators = self.validators.union(
- {name: (method, include_removes)}
+ {name: (method, validation_opts)}
)
manager.info[_INSTRUMENTOR] = self
- @util.deprecated("0.7", message=":meth:`.Mapper.compile` "
- "is replaced by :func:`.configure_mappers`")
- def compile(self):
- """Initialize the inter-mapper relationships of all mappers that
- have been constructed thus far.
+ @classmethod
+ def _configure_all(cls):
+ """Class-level path to the :func:`.configure_mappers` call.
"""
configure_mappers()
- return self
-
- @property
- @util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
- "is replaced by :attr:`.Mapper.configured`")
- def compiled(self):
- return self.configured
def dispose(self):
# Disable any attribute-based compilation.
@@ -956,7 +1326,7 @@ class Mapper(_InspectionAttr):
prop = self.polymorphic_on
self.polymorphic_on = prop.columns[0]
polymorphic_key = prop.key
- elif not expression.is_column(self.polymorphic_on):
+ elif not expression._is_column(self.polymorphic_on):
# polymorphic_on is not a Column and not a ColumnProperty;
# not supported right now.
raise sa_exc.ArgumentError(
@@ -1080,6 +1450,13 @@ class Mapper(_InspectionAttr):
_validate_polymorphic_identity = None
@_memoized_configured_property
+ def _version_id_prop(self):
+ if self.version_id_col is not None:
+ return self._columntoproperty[self.version_id_col]
+ else:
+ return None
+
+ @_memoized_configured_property
def _acceptable_polymorphic_identities(self):
identities = set()
@@ -1205,7 +1582,7 @@ class Mapper(_InspectionAttr):
# generate a properties.ColumnProperty
columns = util.to_list(prop)
column = columns[0]
- if not expression.is_column(column):
+ if not expression._is_column(column):
raise sa_exc.ArgumentError(
"%s=%r is not an instance of MapperProperty or Column"
% (key, prop))
@@ -1369,7 +1746,7 @@ class Mapper(_InspectionAttr):
"""return a MapperProperty associated with the given key.
"""
- if _configure_mappers and _new_mappers:
+ if _configure_mappers and Mapper._new_mappers:
configure_mappers()
try:
@@ -1387,7 +1764,7 @@ class Mapper(_InspectionAttr):
@property
def iterate_properties(self):
"""return an iterator of all MapperProperty objects."""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return iter(self._props.values())
@@ -1461,7 +1838,7 @@ class Mapper(_InspectionAttr):
@_memoized_configured_property
def _with_polymorphic_mappers(self):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if not self.with_polymorphic:
return []
@@ -1493,7 +1870,7 @@ class Mapper(_InspectionAttr):
Normally, this is equivalent to :attr:`.mapped_table`, unless
the ``with_polymorphic`` feature is in use, in which case the
- full "polymoprhic" selectable is returned.
+ full "polymorphic" selectable is returned.
"""
return self._with_polymorphic_selectable
@@ -1568,7 +1945,7 @@ class Mapper(_InspectionAttr):
:attr:`.Mapper.all_orm_descriptors`
"""
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(self._props)
@@ -1611,23 +1988,23 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.SynonymProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.SynonymProperty)
+ return self._filter_properties(properties.SynonymProperty)
@_memoized_configured_property
def column_attrs(self):
"""Return a namespace of all :class:`.ColumnProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.ColumnProperty)
@@ -1637,10 +2014,10 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.RelationshipProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
return self._filter_properties(properties.RelationshipProperty)
@@ -1650,16 +2027,16 @@ class Mapper(_InspectionAttr):
"""Return a namespace of all :class:`.CompositeProperty`
properties maintained by this :class:`.Mapper`.
- See also:
+ .. seealso::
- :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
- objects.
+ :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty`
+ objects.
"""
- return self._filter_properties(descriptor_props.CompositeProperty)
+ return self._filter_properties(properties.CompositeProperty)
def _filter_properties(self, type_):
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
return util.ImmutableProperties(util.OrderedDict(
(k, v) for k, v in self._props.items()
@@ -1805,7 +2182,7 @@ class Mapper(_InspectionAttr):
while stack:
item = stack.popleft()
descendants.append(item)
- stack.extend(sorted(item._inheriting_mappers, key=lambda m: m.class_.__name__))
+ stack.extend(item._inheriting_mappers)
return util.WeakSequence(descendants)
def polymorphic_iterator(self):
@@ -1835,10 +2212,11 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from the identity map.
- row
- A ``sqlalchemy.engine.RowProxy`` instance or a
- dictionary corresponding result-set ``ColumnElement``
- instances to their values within a row.
+ :param row: A :class:`.RowProxy` instance. The columns which are mapped
+ by this :class:`.Mapper` should be locatable in the row, preferably
+ via the :class:`.Column` object directly (as is the case when a
+ :func:`.select` construct is executed), or via string names of the form
+ ``<tablename>_<colname>``.
"""
pk_cols = self.primary_key
@@ -1852,8 +2230,7 @@ class Mapper(_InspectionAttr):
"""Return an identity-map key for use in storing/retrieving an
item from an identity map.
- primary_key
- A list of values indicating the identifier.
+ :param primary_key: A list of values indicating the identifier.
"""
return self._identity_class, tuple(primary_key)
@@ -1862,6 +2239,11 @@ class Mapper(_InspectionAttr):
"""Return the identity key for the given instance, based on
its primary key attributes.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
This value is typically also found on the instance state under the
attribute name `key`.
@@ -1882,6 +2264,11 @@ class Mapper(_InspectionAttr):
"""Return the list of primary key values for the given
instance.
+ If the instance's state is expired, calling this method
+ will result in a database check to see if the object has been deleted.
+ If the row no longer exists,
+ :class:`~sqlalchemy.orm.exc.ObjectDeletedError` is raised.
+
"""
state = attributes.instance_state(instance)
return self._primary_key_from_state(state)
@@ -2070,9 +2457,9 @@ class Mapper(_InspectionAttr):
dep is not None and \
dep is not parent and \
dep.inherit_condition is not None:
- cols = set(sql_util.find_columns(dep.inherit_condition))
+ cols = set(sql_util._find_columns(dep.inherit_condition))
if parent.inherit_condition is not None:
- cols = cols.union(sql_util.find_columns(
+ cols = cols.union(sql_util._find_columns(
parent.inherit_condition))
return fk.parent not in cols and fk.column not in cols
else:
@@ -2107,14 +2494,13 @@ class Mapper(_InspectionAttr):
for m in self.iterate_to_root():
if m._inherits_equated_pairs and \
cols.intersection(
- [l for l, r in m._inherits_equated_pairs]):
+ util.reduce(set.union,
+ [l.proxy_set for l, r in m._inherits_equated_pairs])
+ ):
result[table].append((m, m._inherits_equated_pairs))
return result
-inspection._self_inspects(Mapper)
-log.class_logger(Mapper)
-
def configure_mappers():
"""Initialize the inter-mapper relationships of all mappers that
@@ -2125,8 +2511,7 @@ def configure_mappers():
"""
- global _new_mappers
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
_call_configured = None
@@ -2139,7 +2524,7 @@ def configure_mappers():
try:
# double-check inside mutex
- if not _new_mappers:
+ if not Mapper._new_mappers:
return
# initialize properties on all mappers
@@ -2168,7 +2553,7 @@ def configure_mappers():
mapper._configure_failed = exc
raise
- _new_mappers = False
+ Mapper._new_mappers = False
finally:
_already_compiling = False
finally:
@@ -2220,13 +2605,28 @@ def validates(*names, **kw):
argument "is_remove" which will be a boolean.
.. versionadded:: 0.7.7
+ :param include_backrefs: defaults to ``True``; if ``False``, the
+ validation function will not emit if the originator is an attribute
+ event related via a backref. This can be used for bi-directional
+ :func:`.validates` usage where only one validator should emit per
+ attribute operation.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`simple_validators` - usage examples for :func:`.validates`
"""
include_removes = kw.pop('include_removes', False)
+ include_backrefs = kw.pop('include_backrefs', True)
def wrap(fn):
fn.__sa_validators__ = names
- fn.__sa_include_removes__ = include_removes
+ fn.__sa_validation_opts__ = {
+ "include_removes": include_removes,
+ "include_backrefs": include_backrefs
+ }
return fn
return wrap
@@ -2247,7 +2647,7 @@ def _event_on_first_init(manager, cls):
instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
@@ -2262,7 +2662,7 @@ def _event_on_init(state, args, kwargs):
instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
if instrumenting_mapper:
- if _new_mappers:
+ if Mapper._new_mappers:
configure_mappers()
if instrumenting_mapper._set_polymorphic_identity:
instrumenting_mapper._set_polymorphic_identity(state)
diff --git a/lib/sqlalchemy/orm/path_registry.py b/lib/sqlalchemy/orm/path_registry.py
new file mode 100644
index 000000000..3397626b8
--- /dev/null
+++ b/lib/sqlalchemy/orm/path_registry.py
@@ -0,0 +1,261 @@
+# orm/path_registry.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Path tracking utilities, representing mapper graph traversals.
+
+"""
+
+from .. import inspection
+from .. import util
+from .. import exc
+from itertools import chain
+from .base import class_mapper
+
+def _unreduce_path(path):
+ return PathRegistry.deserialize(path)
+
+
+_WILDCARD_TOKEN = "*"
+_DEFAULT_TOKEN = "_sa_default"
+
+class PathRegistry(object):
+ """Represent query load paths and registry functions.
+
+ Basically represents structures like:
+
+ (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
+
+ These structures are generated by things like
+ query options (joinedload(), subqueryload(), etc.) and are
+ used to compose keys stored in the query._attributes dictionary
+ for various options.
+
+ They are then re-composed at query compile/result row time as
+ the query is formed and as rows are fetched, where they again
+ serve to compose keys to look up options in the context.attributes
+ dictionary, which is copied from query._attributes.
+
+ The path structure has a limited amount of caching, where each
+ "root" ultimately pulls from a fixed registry associated with
+ the first mapper, that also contains elements for each of its
+ property keys. However paths longer than two elements, which
+ are the exception rather than the rule, are generated on an
+ as-needed basis.
+
+ """
+
+ def __eq__(self, other):
+ return other is not None and \
+ self.path == other.path
+
+ def set(self, attributes, key, value):
+ attributes[(key, self.path)] = value
+
+ def setdefault(self, attributes, key, value):
+ attributes.setdefault((key, self.path), value)
+
+ def get(self, attributes, key, value=None):
+ key = (key, self.path)
+ if key in attributes:
+ return attributes[key]
+ else:
+ return value
+
+ def __len__(self):
+ return len(self.path)
+
+ @property
+ def length(self):
+ return len(self.path)
+
+ def pairs(self):
+ path = self.path
+ for i in range(0, len(path), 2):
+ yield path[i], path[i + 1]
+
+ def contains_mapper(self, mapper):
+ for path_mapper in [
+ self.path[i] for i in range(0, len(self.path), 2)
+ ]:
+ if path_mapper.is_mapper and \
+ path_mapper.isa(mapper):
+ return True
+ else:
+ return False
+
+ def contains(self, attributes, key):
+ return (key, self.path) in attributes
+
+ def __reduce__(self):
+ return _unreduce_path, (self.serialize(), )
+
+ def serialize(self):
+ path = self.path
+ return list(zip(
+ [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
+ [path[i].key for i in range(1, len(path), 2)] + [None]
+ ))
+
+ @classmethod
+ def deserialize(cls, path):
+ if path is None:
+ return None
+
+ p = tuple(chain(*[(class_mapper(mcls),
+ class_mapper(mcls).attrs[key]
+ if key is not None else None)
+ for mcls, key in path]))
+ if p and p[-1] is None:
+ p = p[0:-1]
+ return cls.coerce(p)
+
+ @classmethod
+ def per_mapper(cls, mapper):
+ return EntityRegistry(
+ cls.root, mapper
+ )
+
+ @classmethod
+ def coerce(cls, raw):
+ return util.reduce(lambda prev, next: prev[next], raw, cls.root)
+
+ def token(self, token):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ return TokenRegistry(self, token)
+ elif token.endswith(":" + _DEFAULT_TOKEN):
+ return TokenRegistry(self.root, token)
+ else:
+ raise exc.ArgumentError("invalid token: %s" % token)
+
+ def __add__(self, other):
+ return util.reduce(
+ lambda prev, next: prev[next],
+ other.path, self)
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.path, )
+
+
+class RootRegistry(PathRegistry):
+ """Root registry, defers to mappers so that
+ paths are maintained per-root-mapper.
+
+ """
+ path = ()
+ has_entity = False
+ def __getitem__(self, entity):
+ return entity._path_registry
+
+PathRegistry.root = RootRegistry()
+
+class TokenRegistry(PathRegistry):
+ def __init__(self, parent, token):
+ self.token = token
+ self.parent = parent
+ self.path = parent.path + (token,)
+
+ has_entity = False
+
+ def __getitem__(self, entity):
+ raise NotImplementedError()
+
+class PropRegistry(PathRegistry):
+ def __init__(self, parent, prop):
+ # restate this path in terms of the
+ # given MapperProperty's parent.
+ insp = inspection.inspect(parent[-1])
+ if not insp.is_aliased_class or insp._use_mapper_path:
+ parent = parent.parent[prop.parent]
+ elif insp.is_aliased_class and insp.with_polymorphic_mappers:
+ if prop.parent is not insp.mapper and \
+ prop.parent in insp.with_polymorphic_mappers:
+ subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
+ parent = parent.parent[subclass_entity]
+
+ self.prop = prop
+ self.parent = parent
+ self.path = parent.path + (prop,)
+
+ @util.memoized_property
+ def has_entity(self):
+ return hasattr(self.prop, "mapper")
+
+ @util.memoized_property
+ def entity(self):
+ return self.prop.mapper
+
+ @util.memoized_property
+ def _wildcard_path_loader_key(self):
+ """Given a path (mapper A, prop X), replace the prop with the wildcard,
+ e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
+ return within the ("loader", path) structure.
+
+ """
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _default_path_loader_key(self):
+ return ("loader",
+ self.parent.token(
+ "%s:%s" % (self.prop.strategy_wildcard_key, _DEFAULT_TOKEN)
+ ).path
+ )
+
+ @util.memoized_property
+ def _loader_key(self):
+ return ("loader", self.path)
+
+ @property
+ def mapper(self):
+ return self.entity
+
+ @property
+ def entity_path(self):
+ return self[self.entity]
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return EntityRegistry(
+ self, entity
+ )
+
+class EntityRegistry(PathRegistry, dict):
+ is_aliased_class = False
+ has_entity = True
+
+ def __init__(self, parent, entity):
+ self.key = entity
+ self.parent = parent
+ self.is_aliased_class = entity.is_aliased_class
+ self.entity = entity
+ self.path = parent.path + (entity,)
+ self.entity_path = self
+
+ @property
+ def mapper(self):
+ return inspection.inspect(self.entity).mapper
+
+ def __bool__(self):
+ return True
+ __nonzero__ = __bool__
+
+ def __getitem__(self, entity):
+ if isinstance(entity, (int, slice)):
+ return self.path[entity]
+ else:
+ return dict.__getitem__(self, entity)
+
+ def __missing__(self, key):
+ self[key] = item = PropRegistry(self, key)
+ return item
+
+
+
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 1f5507edf..b0fa620e3 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -1,5 +1,5 @@
# orm/persistence.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,7 +17,7 @@ import operator
from itertools import groupby
from .. import sql, util, exc as sa_exc, schema
from . import attributes, sync, exc as orm_exc, evaluator
-from .util import _state_mapper, state_str, _attr_as_key
+from .base import _state_mapper, state_str, _attr_as_key
from ..sql import expression
from . import loading
@@ -61,7 +61,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False):
if insert:
_emit_insert_statements(base_mapper, uowtransaction,
cached_connections,
- table, insert)
+ mapper, table, insert)
_finalize_insert_update_commands(base_mapper, uowtransaction,
states_to_insert, states_to_update)
@@ -246,9 +246,12 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
value_params = {}
has_all_pks = True
+ has_all_defaults = True
for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col.key] = mapper.version_id_generator(None)
+ if col is mapper.version_id_col and \
+ mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(None)
+ params[col.key] = val
else:
# pull straight from the dict for
# pending objects
@@ -261,6 +264,9 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
elif col.default is None and \
col.server_default is None:
params[col.key] = value
+ elif col.server_default is not None and \
+ mapper.base_mapper.eager_defaults:
+ has_all_defaults = False
elif isinstance(value, sql.ClauseElement):
value_params[col] = value
@@ -268,7 +274,8 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[col.key] = value
insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks))
+ connection, value_params, has_all_pks,
+ has_all_defaults))
return insert
@@ -315,19 +322,20 @@ def _collect_update_commands(base_mapper, uowtransaction,
params[col.key] = history.added[0]
hasdata = True
else:
- params[col.key] = mapper.version_id_generator(
- params[col._label])
-
- # HACK: check for history, in case the
- # history is only
- # in a different table than the one
- # where the version_id_col is.
- for prop in mapper._columntoproperty.values():
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- hasdata = True
+ if mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(params[col._label])
+ params[col.key] = val
+
+ # HACK: check for history, in case the
+ # history is only
+ # in a different table than the one
+ # where the version_id_col is.
+ for prop in mapper._columntoproperty.values():
+ history = attributes.get_state_history(
+ state, prop.key,
+ attributes.PASSIVE_NO_INITIALIZE)
+ if history.added:
+ hasdata = True
else:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -409,6 +417,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
mapper._get_state_attr_by_column(
state,
state_dict, col)
+
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = attributes.get_state_history(
@@ -478,7 +487,13 @@ def _emit_update_statements(base_mapper, uowtransaction,
sql.bindparam(mapper.version_id_col._label,
type_=mapper.version_id_col.type))
- return table.update(clause)
+ stmt = table.update(clause)
+ if mapper.base_mapper.eager_defaults:
+ stmt = stmt.return_defaults()
+ elif mapper.version_id_col is not None:
+ stmt = stmt.return_defaults(mapper.version_id_col)
+
+ return stmt
statement = base_mapper._memo(('update', table), update_stmt)
@@ -500,8 +515,7 @@ def _emit_update_statements(base_mapper, uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
c.context.compiled_parameters[0],
value_params)
rows += c.rowcount
@@ -521,44 +535,55 @@ def _emit_update_statements(base_mapper, uowtransaction,
def _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections, table, insert):
+ cached_connections, mapper, table, insert):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
statement = base_mapper._memo(('insert', table), table.insert)
- for (connection, pkeys, hasvalue, has_all_pks), \
+ for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(insert,
lambda rec: (rec[4],
list(rec[2].keys()),
bool(rec[5]),
- rec[6])
+ rec[6], rec[7])
):
- if has_all_pks and not hasvalue:
+ if \
+ (
+ has_all_defaults
+ or not base_mapper.eager_defaults
+ or not connection.dialect.implicit_returning
+ ) and has_all_pks and not hasvalue:
+
records = list(records)
multiparams = [rec[2] for rec in records]
+
c = cached_connections[connection].\
execute(statement, multiparams)
- for (state, state_dict, params, mapper,
- conn, value_params, has_all_pks), \
+ for (state, state_dict, params, mapper_rec,
+ conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
+ c,
last_inserted_params,
value_params)
else:
- for state, state_dict, params, mapper, \
+ if not has_all_defaults and base_mapper.eager_defaults:
+ statement = statement.return_defaults()
+ elif mapper.version_id_col is not None:
+ statement = statement.return_defaults(mapper.version_id_col)
+
+ for state, state_dict, params, mapper_rec, \
connection, value_params, \
- has_all_pks in records:
+ has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
@@ -574,23 +599,22 @@ def _emit_insert_statements(base_mapper, uowtransaction,
# set primary key attributes
for pk, col in zip(primary_key,
mapper._pks_by_table[table]):
- prop = mapper._columntoproperty[col]
+ prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
# TODO: would rather say:
#state_dict[prop.key] = pk
- mapper._set_state_attr_by_column(
+ mapper_rec._set_state_attr_by_column(
state,
state_dict,
col, pk)
_postfetch(
- mapper,
+ mapper_rec,
uowtransaction,
table,
state,
state_dict,
- result.context.prefetch_cols,
- result.context.postfetch_cols,
+ result,
result.context.compiled_parameters[0],
value_params)
@@ -699,14 +723,25 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
if readonly:
state._expire_attributes(state.dict, readonly)
- # if eager_defaults option is enabled,
- # refresh whatever has been expired.
- if base_mapper.eager_defaults and state.unloaded:
+ # if eager_defaults option is enabled, load
+ # all expired cols. Else if we have a version_id_col, make sure
+ # it isn't expired.
+ toload_now = []
+
+ if base_mapper.eager_defaults:
+ toload_now.extend(state._unloaded_non_object)
+ elif mapper.version_id_col is not None and \
+ mapper.version_id_generator is False:
+ prop = mapper._columntoproperty[mapper.version_id_col]
+ if prop.key in state.unloaded:
+ toload_now.extend([prop.key])
+
+ if toload_now:
state.key = base_mapper._identity_key_from_state(state)
loading.load_on_ident(
uowtransaction.session.query(base_mapper),
state.key, refresh_state=state,
- only_load_props=state.unloaded)
+ only_load_props=toload_now)
# call after_XXX extensions
if not has_identity:
@@ -716,15 +751,26 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction,
def _postfetch(mapper, uowtransaction, table,
- state, dict_, prefetch_cols, postfetch_cols,
- params, value_params):
+ state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
+ prefetch_cols = result.context.prefetch_cols
+ postfetch_cols = result.context.postfetch_cols
+ returning_cols = result.context.returning_cols
+
if mapper.version_id_col is not None:
prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
+ if returning_cols:
+ row = result.context.returned_defaults
+ if row is not None:
+ for col in returning_cols:
+ if col.primary_key:
+ continue
+ mapper._set_state_attr_by_column(state, dict_, col, row[col])
+
for c in prefetch_cols:
if c.key in params and c in mapper._columntoproperty:
mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 5986556db..a0def7d31 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -1,5 +1,5 @@
# orm/properties.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,30 +10,20 @@ This is a private module which defines the behavior of invidual ORM-
mapped attributes.
"""
+from __future__ import absolute_import
-from .. import sql, util, log, exc as sa_exc, inspect
-from ..sql import operators, expression
-from . import (
- attributes, mapper,
- strategies, configure_mappers, relationships,
- dependency
- )
-from .util import CascadeOptions, \
- _orm_annotate, _orm_deannotate, _orm_full_deannotate
+from .. import util, log
+from ..sql import expression
+from . import attributes
+from .util import _orm_full_deannotate
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY,\
- PropComparator, StrategizedProperty
-
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-NoneType = type(None)
-
-from .descriptor_props import CompositeProperty, SynonymProperty, \
- ComparableProperty, ConcreteInheritedProperty
+from .interfaces import PropComparator, StrategizedProperty
__all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
- 'ComparableProperty', 'RelationshipProperty', 'RelationProperty']
+ 'ComparableProperty', 'RelationshipProperty']
+@log.class_logger
class ColumnProperty(StrategizedProperty):
"""Describes an object attribute that corresponds to a table column.
@@ -41,31 +31,81 @@ class ColumnProperty(StrategizedProperty):
"""
+ strategy_wildcard_key = 'column'
+
def __init__(self, *columns, **kwargs):
- """Construct a ColumnProperty.
+ """Provide a column-level property for use with a Mapper.
- Note the public constructor is the :func:`.orm.column_property`
- function.
+ Column-based properties can normally be applied to the mapper's
+ ``properties`` dictionary using the :class:`.Column` element directly.
+ Use this function when the given column is not directly present within the
+ mapper's selectable; examples include SQL expressions, functions, and
+ scalar SELECT queries.
- :param \*columns: The list of `columns` describes a single
- object property. If there are multiple tables joined
- together for the mapper, this list represents the equivalent
- column as it appears across each table.
+ Columns that aren't present in the mapper's selectable won't be persisted
+ by the mapper and are effectively "read-only" attributes.
- :param group:
+ :param \*cols:
+ list of Column objects to be mapped.
- :param deferred:
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ scalar attribute should be loaded when replaced, if not
+ already loaded. Normally, history tracking logic for
+ simple non-primary-key scalar values only needs to be
+ aware of the "new" value in order to perform a flush. This
+ flag is available for applications that make use of
+ :func:`.attributes.get_history` or :meth:`.Session.is_modified`
+ which also need to know
+ the "previous" value of the attribute.
- :param comparator_factory:
+ .. versionadded:: 0.6.6
- :param descriptor:
+ :param comparator_factory: a class which extends
+ :class:`.ColumnProperty.Comparator` which provides custom SQL clause
+ generation for comparison operations.
- :param expire_on_flush:
+ :param group:
+ a group name for this property when marked as deferred.
- :param extension:
+ :param deferred:
+ when True, the column property is "deferred", meaning that
+ it does not load immediately, and is instead loaded when the
+ attribute is first accessed on an instance. See also
+ :func:`~sqlalchemy.orm.deferred`.
+
+ :param doc:
+ optional string that will be applied as the doc on the
+ class-bound descriptor.
+
+ :param expire_on_flush=True:
+ Disable expiry on flush. A column_property() which refers
+ to a SQL expression (and not a single table-bound column)
+ is considered to be a "read only" property; populating it
+ has no effect on the state of data, and it can only return
+ database state. For this reason a column_property()'s value
+ is expired whenever the parent object is involved in a
+ flush, that is, has any kind of "dirty" state within a flush.
+ Setting this parameter to ``False`` will have the effect of
+ leaving any existing value present after the flush proceeds.
+ Note however that the :class:`.Session` with default expiration
+ settings still expires
+ all attributes after a :meth:`.Session.commit` call, however.
+
+ .. versionadded:: 0.7.3
:param info: Optional data dictionary which will be populated into the
- :attr:`.info` attribute of this object.
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param extension:
+ an
+ :class:`.AttributeExtension`
+ instance, or list of extensions, which will be prepended
+ to the list of attribute listeners for the resulting
+ descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
"""
self._orig_columns = [expression._labeled(c) for c in columns]
@@ -102,12 +142,11 @@ class ColumnProperty(StrategizedProperty):
', '.join(sorted(kwargs.keys()))))
util.set_creation_order(self)
- if not self.instrument:
- self.strategy_class = strategies.UninstrumentedColumnLoader
- elif self.deferred:
- self.strategy_class = strategies.DeferredColumnLoader
- else:
- self.strategy_class = strategies.ColumnLoader
+
+ self.strategy_class = self._strategy_lookup(
+ ("deferred", self.deferred),
+ ("instrument", self.instrument)
+ )
@property
def expression(self):
@@ -215,1101 +254,6 @@ class ColumnProperty(StrategizedProperty):
col = self.__clause_element__()
return op(col._bind_param(op, other), col, **kwargs)
- # TODO: legacy..do we need this ? (0.5)
- ColumnComparator = Comparator
-
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
-log.class_logger(ColumnProperty)
-
-
-class RelationshipProperty(StrategizedProperty):
- """Describes an object property that holds a single item or list
- of items that correspond to a related database table.
-
- Public constructor is the :func:`.orm.relationship` function.
-
- See also:
-
- :ref:`relationship_config_toplevel`
-
- """
-
- strategy_wildcard_key = 'relationship:*'
-
- _dependency_processor = None
-
- def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None,
- info=None):
-
- self.uselist = uselist
- self.argument = argument
- self.secondary = secondary
- self.primaryjoin = primaryjoin
- self.secondaryjoin = secondaryjoin
- self.post_update = post_update
- self.direction = None
- self.viewonly = viewonly
- self.lazy = lazy
- self.single_parent = single_parent
- self._user_defined_foreign_keys = foreign_keys
- self.collection_class = collection_class
- self.passive_deletes = passive_deletes
- self.cascade_backrefs = cascade_backrefs
- self.passive_updates = passive_updates
- self.remote_side = remote_side
- self.enable_typechecks = enable_typechecks
- self.query_class = query_class
- self.innerjoin = innerjoin
- self.doc = doc
- self.active_history = active_history
- self.join_depth = join_depth
- self.local_remote_pairs = _local_remote_pairs
- self.extension = extension
- self.load_on_pending = load_on_pending
- self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
- self.comparator = self.comparator_factory(self, None)
- util.set_creation_order(self)
-
- if info is not None:
- self.info = info
-
- if strategy_class:
- self.strategy_class = strategy_class
- elif self.lazy == 'dynamic':
- from sqlalchemy.orm import dynamic
- self.strategy_class = dynamic.DynaLoader
- else:
- self.strategy_class = strategies.factory(self.lazy)
-
- self._reverse_property = set()
-
- self.cascade = cascade if cascade is not False \
- else "save-update, merge"
-
- self.order_by = order_by
-
- self.back_populates = back_populates
-
- if self.back_populates:
- if backref:
- raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
- self.backref = None
- else:
- self.backref = backref
-
- def instrument_class(self, mapper):
- attributes.register_descriptor(
- mapper.class_,
- self.key,
- comparator=self.comparator_factory(self, mapper),
- parententity=mapper,
- doc=self.doc,
- )
-
- class Comparator(PropComparator):
- """Produce boolean, comparison, and other operators for
- :class:`.RelationshipProperty` attributes.
-
- See the documentation for :class:`.PropComparator` for a brief overview
- of ORM level operator definition.
-
- See also:
-
- :class:`.PropComparator`
-
- :class:`.ColumnProperty.Comparator`
-
- :class:`.ColumnOperators`
-
- :ref:`types_operators`
-
- :attr:`.TypeEngine.comparator_factory`
-
- """
-
- _of_type = None
-
- def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
- """Construction of :class:`.RelationshipProperty.Comparator`
- is internal to the ORM's attribute mechanics.
-
- """
- self.prop = prop
- self._parentmapper = parentmapper
- self._adapt_to_entity = adapt_to_entity
- if of_type:
- self._of_type = of_type
-
- def adapt_to_entity(self, adapt_to_entity):
- return self.__class__(self.property, self._parentmapper,
- adapt_to_entity=adapt_to_entity,
- of_type=self._of_type)
-
- @util.memoized_property
- def mapper(self):
- """The target :class:`.Mapper` referred to by this
- :class:`.RelationshipProperty.Comparator.
-
- This is the "target" or "remote" side of the
- :func:`.relationship`.
-
- """
- return self.property.mapper
-
- @util.memoized_property
- def _parententity(self):
- return self.property.parent
-
- def _source_selectable(self):
- elem = self.property.parent._with_polymorphic_selectable
- if self.adapter:
- return self.adapter(elem)
- else:
- return elem
-
- def __clause_element__(self):
- adapt_from = self._source_selectable()
- if self._of_type:
- of_type = inspect(self._of_type).mapper
- else:
- of_type = None
-
- pj, sj, source, dest, \
- secondary, target_adapter = self.property._create_joins(
- source_selectable=adapt_from,
- source_polymorphic=True,
- of_type=of_type)
- if sj is not None:
- return pj & sj
- else:
- return pj
-
- def of_type(self, cls):
- """Produce a construct that represents a particular 'subtype' of
- attribute for the parent class.
-
- Currently this is usable in conjunction with :meth:`.Query.join`
- and :meth:`.Query.outerjoin`.
-
- """
- return RelationshipProperty.Comparator(
- self.property,
- self._parentmapper,
- adapt_to_entity=self._adapt_to_entity,
- of_type=cls)
-
- def in_(self, other):
- """Produce an IN clause - this is not implemented
- for :func:`~.orm.relationship`-based attributes at this time.
-
- """
- raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
-
- __hash__ = None
-
- def __eq__(self, other):
- """Implement the ``==`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop == <some object>
-
- this will typically produce a
- clause such as::
-
- mytable.related_id == <some id>
-
- Where ``<some id>`` is the primary key of the given
- object.
-
- The ``==`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use :meth:`~.RelationshipProperty.Comparator.contains`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce a NOT EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction in [ONETOMANY, MANYTOMANY]:
- return ~self._criterion_exists()
- else:
- return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
- else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
-
- def _criterion_exists(self, criterion=None, **kwargs):
- if getattr(self, '_of_type', None):
- info = inspect(self._of_type)
- target_mapper, to_selectable, is_aliased_class = \
- info.mapper, info.selectable, info.is_aliased_class
- if self.property._is_self_referential and not is_aliased_class:
- to_selectable = to_selectable.alias()
-
- single_crit = target_mapper._single_table_criterion
- if single_crit is not None:
- if criterion is not None:
- criterion = single_crit & criterion
- else:
- criterion = single_crit
- else:
- is_aliased_class = False
- to_selectable = None
-
- if self.adapter:
- source_selectable = self._source_selectable()
- else:
- source_selectable = None
-
- pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
-
- for k in kwargs:
- crit = getattr(self.property.mapper.class_, k) == kwargs[k]
- if criterion is None:
- criterion = crit
- else:
- criterion = criterion & crit
-
- # annotate the *local* side of the join condition, in the case
- # of pj + sj this is the full primaryjoin, in the case of just
- # pj its the local side of the primaryjoin.
- if sj is not None:
- j = _orm_annotate(pj) & sj
- else:
- j = _orm_annotate(pj, exclude=self.property.remote_side)
-
- if criterion is not None and target_adapter and not is_aliased_class:
- # limit this adapter to annotated only?
- criterion = target_adapter.traverse(criterion)
-
- # only have the "joined left side" of what we
- # return be subject to Query adaption. The right
- # side of it is used for an exists() subquery and
- # should not correlate or otherwise reach out
- # to anything in the enclosing query.
- if criterion is not None:
- criterion = criterion._annotate(
- {'no_replacement_traverse': True})
-
- crit = j & criterion
-
- ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
- if secondary is not None:
- ex = ex.correlate_except(secondary)
- return ex
-
- def any(self, criterion=None, **kwargs):
- """Produce an expression that tests a collection against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.any(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
- AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.any` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.any` is particularly
- useful for testing for empty collections::
-
- session.query(MyClass).filter(
- ~MyClass.somereference.any()
- )
-
- will produce::
-
- SELECT * FROM my_table WHERE
- NOT EXISTS (SELECT 1 FROM related WHERE
- related.my_id=my_table.id)
-
- :meth:`~.RelationshipProperty.Comparator.any` is only
- valid for collections, i.e. a :func:`.relationship`
- that has ``uselist=True``. For scalar references,
- use :meth:`~.RelationshipProperty.Comparator.has`.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
-
- return self._criterion_exists(criterion, **kwargs)
-
- def has(self, criterion=None, **kwargs):
- """Produce an expression that tests a scalar reference against
- particular criterion, using EXISTS.
-
- An expression like::
-
- session.query(MyClass).filter(
- MyClass.somereference.has(SomeRelated.x==2)
- )
-
-
- Will produce a query like::
-
- SELECT * FROM my_table WHERE
- EXISTS (SELECT 1 FROM related WHERE
- related.id==my_table.related_id AND related.x=2)
-
- Because :meth:`~.RelationshipProperty.Comparator.has` uses
- a correlated subquery, its performance is not nearly as
- good when compared against large target tables as that of
- using a join.
-
- :meth:`~.RelationshipProperty.Comparator.has` is only
- valid for scalar references, i.e. a :func:`.relationship`
- that has ``uselist=False``. For collection references,
- use :meth:`~.RelationshipProperty.Comparator.any`.
-
- """
- if self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
- return self._criterion_exists(criterion, **kwargs)
-
- def contains(self, other, **kwargs):
- """Return a simple expression that tests a collection for
- containment of a particular item.
-
- :meth:`~.RelationshipProperty.Comparator.contains` is
- only valid for a collection, i.e. a
- :func:`~.orm.relationship` that implements
- one-to-many or many-to-many with ``uselist=True``.
-
- When used in a simple one-to-many context, an
- expression like::
-
- MyClass.contains(other)
-
- Produces a clause like::
-
- mytable.id == <some id>
-
- Where ``<some id>`` is the value of the foreign key
- attribute on ``other`` which refers to the primary
- key of its parent object. From this it follows that
- :meth:`~.RelationshipProperty.Comparator.contains` is
- very useful when used with simple one-to-many
- operations.
-
- For many-to-many operations, the behavior of
- :meth:`~.RelationshipProperty.Comparator.contains`
- has more caveats. The association table will be
- rendered in the statement, producing an "implicit"
- join, that is, includes multiple tables in the FROM
- clause which are equated in the WHERE clause::
-
- query(MyClass).filter(MyClass.contains(other))
-
- Produces a query like::
-
- SELECT * FROM my_table, my_association_table AS
- my_association_table_1 WHERE
- my_table.id = my_association_table_1.parent_id
- AND my_association_table_1.child_id = <some id>
-
- Where ``<some id>`` would be the primary key of
- ``other``. From the above, it is clear that
- :meth:`~.RelationshipProperty.Comparator.contains`
- will **not** work with many-to-many collections when
- used in queries that move beyond simple AND
- conjunctions, such as multiple
- :meth:`~.RelationshipProperty.Comparator.contains`
- expressions joined by OR. In such cases subqueries or
- explicit "outer joins" will need to be used instead.
- See :meth:`~.RelationshipProperty.Comparator.any` for
- a less-performant alternative using EXISTS, or refer
- to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
- for more details on constructing outer joins.
-
- """
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
-
- if self.property.secondaryjoin is not None:
- clause.negation_clause = \
- self.__negated_contains_or_equals(other)
-
- return clause
-
- def __negated_contains_or_equals(self, other):
- if self.property.direction == MANYTOONE:
- state = attributes.instance_state(other)
-
- def state_bindparam(x, state, col):
- o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda: \
- self.property.mapper._get_committed_attr_by_column(o, col))
-
- def adapt(col):
- if self.adapter:
- return self.adapter(col)
- else:
- return col
-
- if self.property._use_get:
- return sql.and_(*[
- sql.or_(
- adapt(x) != state_bindparam(adapt(x), state, y),
- adapt(x) == None)
- for (x, y) in self.property.local_remote_pairs])
-
- criterion = sql.and_(*[x == y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
- return ~self._criterion_exists(criterion)
-
- def __ne__(self, other):
- """Implement the ``!=`` operator.
-
- In a many-to-one context, such as::
-
- MyClass.some_prop != <some object>
-
- This will typically produce a clause such as::
-
- mytable.related_id != <some id>
-
- Where ``<some id>`` is the primary key of the
- given object.
-
- The ``!=`` operator provides partial functionality for non-
- many-to-one comparisons:
-
- * Comparisons against collections are not supported.
- Use
- :meth:`~.RelationshipProperty.Comparator.contains`
- in conjunction with :func:`~.expression.not_`.
- * Compared to a scalar one-to-many, will produce a
- clause that compares the target columns in the parent to
- the given target.
- * Compared to a scalar many-to-many, an alias
- of the association table will be rendered as
- well, forming a natural join that is part of the
- main body of the query. This will not work for
- queries that go beyond simple AND conjunctions of
- comparisons, such as those which use OR. Use
- explicit joins, outerjoins, or
- :meth:`~.RelationshipProperty.Comparator.has` in
- conjunction with :func:`~.expression.not_` for
- more comprehensive non-many-to-one scalar
- membership tests.
- * Comparisons against ``None`` given in a one-to-many
- or many-to-many context produce an EXISTS clause.
-
- """
- if isinstance(other, (NoneType, expression.Null)):
- if self.property.direction == MANYTOONE:
- return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
- else:
- return self._criterion_exists()
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
- else:
- return self.__negated_contains_or_equals(other)
-
- @util.memoized_property
- def property(self):
- if mapperlib.module._new_mappers:
- configure_mappers()
- return self.prop
-
- def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
- if op == operators.eq:
- if value is None:
- if self.uselist:
- return ~sql.exists([1], self.primaryjoin)
- else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return op(self.comparator, value)
-
- def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
- if value is not None:
- value = attributes.instance_state(value)
- return self._get_strategy(strategies.LazyLoader).lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
- def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
-
- if load:
- for r in self._reverse_property:
- if (source_state, r) in _recursive:
- return
-
- if not "merge" in self._cascade:
- return
-
- if self.key not in source_dict:
- return
-
- if self.uselist:
- instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
- if hasattr(instances, '_sa_adapter'):
- # convert collections to adapters to get a true iterator
- instances = instances._sa_adapter
-
- if load:
- # for a full merge, pre-load the destination collection,
- # so that individual _merge of each item pulls from identity
- # map for those already present.
- # also assumes CollectionAttrbiuteImpl behavior of loading
- # "old" list in any case
- dest_state.get_impl(self.key).get(dest_state, dest_dict)
-
- dest_list = []
- for current in instances:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- if obj is not None:
- dest_list.append(obj)
-
- if not load:
- coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
- for c in dest_list:
- coll.append_without_event(c)
- else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
- else:
- current = source_dict[self.key]
- if current is not None:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- else:
- obj = None
-
- if not load:
- dest_dict[self.key] = obj
- else:
- dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
-
- def _value_as_iterable(self, state, dict_, key,
- passive=attributes.PASSIVE_OFF):
- """Return a list of tuples (state, obj) for the given
- key.
-
- returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
- """
-
- impl = state.manager[key].impl
- x = impl.get(state, dict_, passive=passive)
- if x is attributes.PASSIVE_NO_RESULT or x is None:
- return []
- elif hasattr(impl, 'get_collection'):
- return [
- (attributes.instance_state(o), o) for o in
- impl.get_collection(state, dict_, x, passive=passive)
- ]
- else:
- return [(attributes.instance_state(x), x)]
-
- def cascade_iterator(self, type_, state, dict_,
- visited_states, halt_on=None):
- #assert type_ in self._cascade
-
- # only actively lazy load on the 'delete' cascade
- if type_ != 'delete' or self.passive_deletes:
- passive = attributes.PASSIVE_NO_INITIALIZE
- else:
- passive = attributes.PASSIVE_OFF
-
- if type_ == 'save-update':
- tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
-
- else:
- tuples = self._value_as_iterable(state, dict_, self.key,
- passive=passive)
-
- skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
- not in self._cascade
-
- for instance_state, c in tuples:
- if instance_state in visited_states:
- continue
-
- if c is None:
- # would like to emit a warning here, but
- # would not be consistent with collection.append(None)
- # current behavior of silently skipping.
- # see [ticket:2229]
- continue
-
- instance_dict = attributes.instance_dict(c)
-
- if halt_on and halt_on(instance_state):
- continue
-
- if skip_pending and not instance_state.key:
- continue
-
- instance_mapper = instance_state.manager.mapper
-
- if not instance_mapper.isa(self.mapper.class_manager.mapper):
- raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
-
- visited_states.add(instance_state)
-
- yield c, instance_mapper, instance_state, instance_dict
-
- def _add_reverse_property(self, key):
- other = self.mapper.get_property(key, _configure_mappers=False)
- self._reverse_property.add(other)
- other._reverse_property.add(self)
-
- if not other.mapper.common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
- if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
-
- @util.memoized_property
- def mapper(self):
- """Return the targeted :class:`.Mapper` for this
- :class:`.RelationshipProperty`.
-
- This is a lazy-initializing static attribute.
-
- """
- if isinstance(self.argument, type):
- mapper_ = mapper.class_mapper(self.argument,
- configure=False)
- elif isinstance(self.argument, mapper.Mapper):
- mapper_ = self.argument
- elif util.callable(self.argument):
-
- # accept a callable to suit various deferred-
- # configurational schemes
-
- mapper_ = mapper.class_mapper(self.argument(),
- configure=False)
- else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(self.argument)))
- assert isinstance(mapper_, mapper.Mapper), mapper_
- return mapper_
-
- @util.memoized_property
- @util.deprecated("0.7", "Use .target")
- def table(self):
- """Return the selectable linked to this
- :class:`.RelationshipProperty` object's target
- :class:`.Mapper`."""
- return self.target
-
- def do_init(self):
- self._check_conflicts()
- self._process_dependent_arguments()
- self._setup_join_conditions()
- self._check_cascade_settings(self._cascade)
- self._post_init()
- self._generate_backref()
- super(RelationshipProperty, self).do_init()
-
- def _process_dependent_arguments(self):
- """Convert incoming configuration arguments to their
- proper form.
-
- Callables are resolved, ORM annotations removed.
-
- """
- # accept callables for other attributes which may require
- # deferred initialization. This technique is used
- # by declarative "string configs" and some recipes.
- for attr in (
- 'order_by', 'primaryjoin', 'secondaryjoin',
- 'secondary', '_user_defined_foreign_keys', 'remote_side',
- ):
- attr_value = getattr(self, attr)
- if util.callable(attr_value):
- setattr(self, attr, attr_value())
-
- # remove "annotations" which are present if mapped class
- # descriptors are used to create the join expression.
- for attr in 'primaryjoin', 'secondaryjoin':
- val = getattr(self, attr)
- if val is not None:
- setattr(self, attr, _orm_deannotate(
- expression._only_column_elements(val, attr))
- )
-
- # ensure expressions in self.order_by, foreign_keys,
- # remote_side are all columns, not strings.
- if self.order_by is not False and self.order_by is not None:
- self.order_by = [
- expression._only_column_elements(x, "order_by")
- for x in
- util.to_list(self.order_by)]
-
- self._user_defined_foreign_keys = \
- util.column_set(
- expression._only_column_elements(x, "foreign_keys")
- for x in util.to_column_set(
- self._user_defined_foreign_keys
- ))
-
- self.remote_side = \
- util.column_set(
- expression._only_column_elements(x, "remote_side")
- for x in
- util.to_column_set(self.remote_side))
-
- self.target = self.mapper.mapped_table
-
-
- def _setup_join_conditions(self):
- self._join_condition = jc = relationships.JoinCondition(
- parent_selectable=self.parent.mapped_table,
- child_selectable=self.mapper.mapped_table,
- parent_local_selectable=self.parent.local_table,
- child_local_selectable=self.mapper.local_table,
- primaryjoin=self.primaryjoin,
- secondary=self.secondary,
- secondaryjoin=self.secondaryjoin,
- parent_equivalents=self.parent._equivalent_columns,
- child_equivalents=self.mapper._equivalent_columns,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- local_remote_pairs=self.local_remote_pairs,
- remote_side=self.remote_side,
- self_referential=self._is_self_referential,
- prop=self,
- support_sync=not self.viewonly,
- can_be_synced_fn=self._columns_are_mapped
- )
- self.primaryjoin = jc.deannotated_primaryjoin
- self.secondaryjoin = jc.deannotated_secondaryjoin
- self.direction = jc.direction
- self.local_remote_pairs = jc.local_remote_pairs
- self.remote_side = jc.remote_columns
- self.local_columns = jc.local_columns
- self.synchronize_pairs = jc.synchronize_pairs
- self._calculated_foreign_keys = jc.foreign_key_columns
- self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
-
- def _check_conflicts(self):
- """Test that this relationship is legal, warn about
- inheritance conflicts."""
-
- if not self.is_primary() \
- and not mapper.class_mapper(
- self.parent.class_,
- configure=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
-
- # check for conflicting relationship() on superclass
- if not self.parent.concrete:
- for inheriting in self.parent.iterate_to_root():
- if inheriting is not self.parent \
- and inheriting.has_property(self.key):
- util.warn("Warning: relationship '%s' on mapper "
- "'%s' supersedes the same relationship "
- "on inherited mapper '%s'; this can "
- "cause dependency issues during flush"
- % (self.key, self.parent, inheriting))
-
- def _get_cascade(self):
- """Return the current cascade setting for this
- :class:`.RelationshipProperty`.
- """
- return self._cascade
-
- def _set_cascade(self, cascade):
- cascade = CascadeOptions(cascade)
- if 'mapper' in self.__dict__:
- self._check_cascade_settings(cascade)
- self._cascade = cascade
-
- if self._dependency_processor:
- self._dependency_processor.cascade = cascade
-
- cascade = property(_get_cascade, _set_cascade)
-
- def _check_cascade_settings(self, cascade):
- if cascade.delete_orphan and not self.single_parent \
- and (self.direction is MANYTOMANY or self.direction
- is MANYTOONE):
- raise sa_exc.ArgumentError(
- 'On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
- if self.direction is MANYTOONE and self.passive_deletes:
- util.warn("On %s, 'passive_deletes' is normally configured "
- "on one-to-many, one-to-one, many-to-many "
- "relationships only."
- % self)
-
- if self.passive_deletes == 'all' and \
- ("delete" in cascade or
- "delete-orphan" in cascade):
- raise sa_exc.ArgumentError(
- "On %s, can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade" % self)
-
- if cascade.delete_orphan:
- self.mapper.primary_mapper()._delete_orphans.append(
- (self.key, self.parent.class_)
- )
-
- def _columns_are_mapped(self, *cols):
- """Return True if all columns in the given collection are
- mapped by the tables referenced by this :class:`.Relationship`.
-
- """
- for c in cols:
- if self.secondary is not None \
- and self.secondary.c.contains_column(c):
- continue
- if not self.parent.mapped_table.c.contains_column(c) and \
- not self.target.c.contains_column(c):
- return False
- return True
-
- def _generate_backref(self):
- """Interpret the 'backref' instruction to create a
- :func:`.relationship` complementary to this one."""
-
- if not self.is_primary():
- return
- if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, str):
- backref_key, kwargs = self.backref, {}
- else:
- backref_key, kwargs = self.backref
- mapper = self.mapper.primary_mapper()
-
- check = set(mapper.iterate_to_root()).\
- union(mapper.self_and_descendants)
- for m in check:
- if m.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, m))
-
- # determine primaryjoin/secondaryjoin for the
- # backref. Use the one we had, so that
- # a custom join doesn't have to be specified in
- # both directions.
- if self.secondary is not None:
- # for many to many, just switch primaryjoin/
- # secondaryjoin. use the annotated
- # pj/sj on the _join_condition.
- pj = kwargs.pop('primaryjoin',
- self._join_condition.secondaryjoin_minus_local)
- sj = kwargs.pop('secondaryjoin',
- self._join_condition.primaryjoin_minus_local)
- else:
- pj = kwargs.pop('primaryjoin',
- self._join_condition.primaryjoin_reverse_remote)
- sj = kwargs.pop('secondaryjoin', None)
- if sj:
- raise sa_exc.InvalidRequestError(
- "Can't assign 'secondaryjoin' on a backref "
- "against a non-secondary relationship."
- )
-
- foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
- parent = self.parent.primary_mapper()
- kwargs.setdefault('viewonly', self.viewonly)
- kwargs.setdefault('post_update', self.post_update)
- kwargs.setdefault('passive_updates', self.passive_updates)
- self.back_populates = backref_key
- relationship = RelationshipProperty(
- parent, self.secondary,
- pj, sj,
- foreign_keys=foreign_keys,
- back_populates=self.key,
- **kwargs)
- mapper._configure_property(backref_key, relationship)
-
- if self.back_populates:
- self._add_reverse_property(self.back_populates)
-
- def _post_init(self):
- if self.uselist is None:
- self.uselist = self.direction is not MANYTOONE
- if not self.viewonly:
- self._dependency_processor = \
- dependency.DependencyProcessor.from_relationship(self)
-
- @util.memoized_property
- def _use_get(self):
- """memoize the 'use_get' attribute of this RelationshipLoader's
- lazyloader."""
-
- strategy = self._get_strategy(strategies.LazyLoader)
- return strategy.use_get
-
- @util.memoized_property
- def _is_self_referential(self):
- return self.mapper.common_parent(self.parent)
-
- def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
- if source_selectable is None:
- if source_polymorphic and self.parent.with_polymorphic:
- source_selectable = self.parent._with_polymorphic_selectable
-
- aliased = False
- if dest_selectable is None:
- if dest_polymorphic and self.mapper.with_polymorphic:
- dest_selectable = self.mapper._with_polymorphic_selectable
- aliased = True
- else:
- dest_selectable = self.mapper.mapped_table
-
- if self._is_self_referential and source_selectable is None:
- dest_selectable = dest_selectable.alias()
- aliased = True
- else:
- aliased = True
-
- dest_mapper = of_type or self.mapper
-
- single_crit = dest_mapper._single_table_criterion
- aliased = aliased or (source_selectable is not None)
-
- primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
- self._join_condition.join_targets(
- source_selectable, dest_selectable, aliased, single_crit
- )
- if source_selectable is None:
- source_selectable = self.parent.local_table
- if dest_selectable is None:
- dest_selectable = self.mapper.local_table
- return (primaryjoin, secondaryjoin, source_selectable,
- dest_selectable, secondary, target_adapter)
-
-
-PropertyLoader = RelationProperty = RelationshipProperty
-log.class_logger(RelationshipProperty)
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index f6fd07e61..6bd465e9c 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -1,5 +1,5 @@
# orm/query.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -24,37 +24,28 @@ from . import (
attributes, interfaces, object_mapper, persistence,
exc as orm_exc, loading
)
+from .base import _entity_descriptor, _is_aliased_class, \
+ _is_mapped_class, _orm_columns, _generative
+from .path_registry import PathRegistry
from .util import (
- AliasedClass, ORMAdapter, _entity_descriptor, PathRegistry,
- _is_aliased_class, _is_mapped_class, _orm_columns,
- join as orm_join, with_parent, aliased
+ AliasedClass, ORMAdapter, join as orm_join, with_parent, aliased
)
-from .. import sql, util, log, exc as sa_exc, inspect, inspection, \
- types as sqltypes
+from .. import sql, util, log, exc as sa_exc, inspect, inspection
from ..sql.expression import _interpret_as_from
from ..sql import (
util as sql_util,
expression, visitors
)
+from ..sql.base import ColumnCollection
+from . import properties
__all__ = ['Query', 'QueryContext', 'aliased']
-def _generative(*assertions):
- """Mark a method as generative."""
-
- @util.decorator
- def generate(fn, *args, **kw):
- self = args[0]._clone()
- for assertion in assertions:
- assertion(self, fn.__name__)
- fn(self, *args[1:], **kw)
- return self
- return generate
-
_path_registry = PathRegistry.root
-
+@inspection._self_inspects
+@log.class_logger
class Query(object):
"""ORM-level SQL construction object.
@@ -77,7 +68,6 @@ class Query(object):
_with_labels = False
_criterion = None
_yield_per = None
- _lockmode = None
_order_by = False
_group_by = False
_having = None
@@ -85,6 +75,7 @@ class Query(object):
_prefixes = None
_offset = None
_limit = None
+ _for_update_arg = None
_statement = None
_correlate = frozenset()
_populate_existing = False
@@ -118,6 +109,7 @@ class Query(object):
if entity_wrapper is None:
entity_wrapper = _QueryEntity
self._entities = []
+ self._primary_entity = None
for ent in util.to_list(entities):
entity_wrapper(self, ent)
@@ -299,11 +291,8 @@ class Query(object):
@property
def _mapper_entities(self):
- # TODO: this is wrong, its hardcoded to "primary entity" when
- # for the case of __all_equivs() it should not be
- # the name of this accessor is wrong too
for ent in self._entities:
- if hasattr(ent, 'primary_entity'):
+ if isinstance(ent, _MapperEntity):
yield ent
def _joinpoint_zero(self):
@@ -313,9 +302,10 @@ class Query(object):
)
def _mapper_zero_or_none(self):
- if not getattr(self._entities[0], 'primary_entity', False):
+ if self._primary_entity:
+ return self._primary_entity.mapper
+ else:
return None
- return self._entities[0].mapper
def _only_mapper_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -327,16 +317,11 @@ class Query(object):
return self._mapper_zero()
def _only_full_mapper_zero(self, methname):
- if len(self._entities) != 1:
+ if self._entities != [self._primary_entity]:
raise sa_exc.InvalidRequestError(
"%s() can only be used against "
"a single mapped class." % methname)
- entity = self._entity_zero()
- if not hasattr(entity, 'primary_entity'):
- raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
- return entity.entity_zero
+ return self._primary_entity.entity_zero
def _only_entity_zero(self, rationale=None):
if len(self._entities) > 1:
@@ -555,7 +540,7 @@ class Query(object):
:class:`.Query`, converted
to a scalar subquery with a label of the given name.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.label`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.label`.
.. versionadded:: 0.6.5
@@ -567,7 +552,7 @@ class Query(object):
"""Return the full SELECT statement represented by this
:class:`.Query`, converted to a scalar subquery.
- Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.as_scalar`.
+ Analogous to :meth:`sqlalchemy.sql.expression.SelectBase.as_scalar`.
.. versionadded:: 0.6.5
@@ -698,7 +683,7 @@ class Query(object):
"""
- if not getattr(self._entities[0], 'primary_entity', False):
+ if not self._primary_entity:
raise sa_exc.InvalidRequestError(
"No primary mapper set up for this Query.")
entity = self._entities[0]._clone()
@@ -811,7 +796,7 @@ class Query(object):
if not self._populate_existing and \
not mapper.always_refresh and \
- self._lockmode is None:
+ self._for_update_arg is None:
instance = loading.get_from_identity(
self.session, key, attributes.PASSIVE_OFF)
@@ -903,11 +888,10 @@ class Query(object):
"""
if property is None:
- from sqlalchemy.orm import properties
mapper = object_mapper(instance)
for prop in mapper.iterate_properties:
- if isinstance(prop, properties.PropertyLoader) and \
+ if isinstance(prop, properties.RelationshipProperty) and \
prop.mapper is self._mapper_zero():
property = prop
break
@@ -936,7 +920,7 @@ class Query(object):
@_generative()
def with_session(self, session):
- """Return a :class:`Query` that will use the given :class:`.Session`.
+ """Return a :class:`.Query` that will use the given :class:`.Session`.
"""
@@ -1140,32 +1124,63 @@ class Query(object):
@_generative()
def with_lockmode(self, mode):
- """Return a new Query object with the specified locking mode.
+ """Return a new :class:`.Query` object with the specified "locking mode",
+ which essentially refers to the ``FOR UPDATE`` clause.
- :param mode: a string representing the desired locking mode. A
- corresponding value is passed to the ``for_update`` parameter of
- :meth:`~sqlalchemy.sql.expression.select` when the query is
- executed. Valid values are:
+ .. deprecated:: 0.9.0 superseded by :meth:`.Query.with_for_update`.
- ``'update'`` - passes ``for_update=True``, which translates to
- ``FOR UPDATE`` (standard SQL, supported by most dialects)
+ :param mode: a string representing the desired locking mode.
+ Valid values are:
- ``'update_nowait'`` - passes ``for_update='nowait'``, which
- translates to ``FOR UPDATE NOWAIT`` (supported by Oracle,
- PostgreSQL 8.1 upwards)
+ * ``None`` - translates to no lockmode
- ``'read'`` - passes ``for_update='read'``, which translates to
- ``LOCK IN SHARE MODE`` (for MySQL), and ``FOR SHARE`` (for
- PostgreSQL)
+ * ``'update'`` - translates to ``FOR UPDATE``
+ (standard SQL, supported by most dialects)
- ``'read_nowait'`` - passes ``for_update='read_nowait'``, which
- translates to ``FOR SHARE NOWAIT`` (supported by PostgreSQL).
+ * ``'update_nowait'`` - translates to ``FOR UPDATE NOWAIT``
+ (supported by Oracle, PostgreSQL 8.1 upwards)
+
+ * ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
+ and ``FOR SHARE`` (for PostgreSQL)
+
+ .. seealso::
+
+ :meth:`.Query.with_for_update` - improved API for
+ specifying the ``FOR UPDATE`` clause.
- .. versionadded:: 0.7.7
- ``FOR SHARE`` and ``FOR SHARE NOWAIT`` (PostgreSQL).
"""
+ self._for_update_arg = LockmodeArg.parse_legacy_query(mode)
+
+ @_generative()
+ def with_for_update(self, read=False, nowait=False, of=None):
+ """return a new :class:`.Query` with the specified options for the
+ ``FOR UPDATE`` clause.
+
+ The behavior of this method is identical to that of
+ :meth:`.SelectBase.with_for_update`. When called with no arguments,
+ the resulting ``SELECT`` statement will have a ``FOR UPDATE`` clause
+ appended. When additional arguments are specified, backend-specific
+ options such as ``FOR UPDATE NOWAIT`` or ``LOCK IN SHARE MODE``
+ can take effect.
+
+ E.g.::
+
+ q = sess.query(User).with_for_update(nowait=True, of=User)
+
+ The above query on a Postgresql backend will render like::
- self._lockmode = mode
+ SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
+
+ .. versionadded:: 0.9.0 :meth:`.Query.with_for_update` supersedes
+ the :meth:`.Query.with_lockmode` method.
+
+ .. seealso::
+
+ :meth:`.GenerativeSelect.with_for_update` - Core level method with
+ full argument and behavioral description.
+
+ """
+ self._for_update_arg = LockmodeArg(read=read, nowait=nowait, of=of)
@_generative()
def params(self, *args, **kwargs):
@@ -1300,7 +1315,7 @@ class Query(object):
"""apply a HAVING criterion to the query and return the
newly resulting :class:`.Query`.
- :meth:`having` is used in conjunction with :meth:`group_by`.
+ :meth:`~.Query.having` is used in conjunction with :meth:`~.Query.group_by`.
HAVING criterion makes it possible to use filters on aggregate
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
@@ -1478,7 +1493,7 @@ class Query(object):
q = session.query(User).join(Address)
- The above calling form of :meth:`.join` will raise an error if
+ The above calling form of :meth:`~.Query.join` will raise an error if
either there are no foreign keys between the two entities, or if
there are multiple foreign key linkages between them. In the
above calling form, :meth:`~.Query.join` is called upon to
@@ -1640,14 +1655,14 @@ class Query(object):
example :ref:`examples_xmlpersistence` which illustrates
an XPath-like query system using algorithmic joins.
- :param *props: A collection of one or more join conditions,
+ :param \*props: A collection of one or more join conditions,
each consisting of a relationship-bound attribute or string
relationship name representing an "on clause", or a single
target entity, or a tuple in the form of ``(target, onclause)``.
A special two-argument calling form of the form ``target, onclause``
is also accepted.
:param aliased=False: If True, indicate that the JOIN target should be
- anonymously aliased. Subsequent calls to :class:`~.Query.filter`
+ anonymously aliased. Subsequent calls to :meth:`~.Query.filter`
and similar will adapt the incoming criterion to the target
alias, until :meth:`~.Query.reset_joinpoint` is called.
:param from_joinpoint=False: When using ``aliased=True``, a setting
@@ -1827,14 +1842,30 @@ class Query(object):
raise sa_exc.InvalidRequestError(
"Can't construct a join from %s to %s, they "
"are the same entity" %
- (left, right))
+ (left, right))
l_info = inspect(left)
r_info = inspect(right)
- overlap = not create_aliases and \
- sql_util.selectables_overlap(l_info.selectable,
- r_info.selectable)
+
+ overlap = False
+ if not create_aliases:
+ right_mapper = getattr(r_info, "mapper", None)
+ # if the target is a joined inheritance mapping,
+ # be more liberal about auto-aliasing.
+ if right_mapper and (
+ right_mapper.with_polymorphic or
+ isinstance(right_mapper.mapped_table, expression.Join)
+ ):
+ for from_obj in self._from_obj or [l_info.selectable]:
+ if sql_util.selectables_overlap(l_info.selectable, from_obj) and \
+ sql_util.selectables_overlap(from_obj, r_info.selectable):
+ overlap = True
+ break
+ elif sql_util.selectables_overlap(l_info.selectable, r_info.selectable):
+ overlap = True
+
+
if overlap and l_info.selectable is r_info.selectable:
raise sa_exc.InvalidRequestError(
"Can't join table/selectable '%s' to itself" %
@@ -2219,7 +2250,7 @@ class Query(object):
``Query``.
:param \*prefixes: optional prefixes, typically strings,
- not using any commas. In particular is useful for MySQL keywords.
+ not using any commas. In particular is useful for MySQL keywords.
e.g.::
@@ -2414,10 +2445,10 @@ class Query(object):
"""
return [
{
- 'name':ent._label_name,
- 'type':ent.type,
- 'aliased':getattr(ent, 'is_aliased_class', False),
- 'expr':ent.expr
+ 'name': ent._label_name,
+ 'type': ent.type,
+ 'aliased': getattr(ent, 'is_aliased_class', False),
+ 'expr': ent.expr
}
for ent in self._entities
]
@@ -2500,7 +2531,7 @@ class Query(object):
.. versionadded:: 0.8.1
"""
- return sql.exists(self.with_entities('1').statement)
+ return sql.exists(self.with_labels().statement.with_only_columns(['1']))
def count(self):
"""Return a count of rows this Query would return.
@@ -2571,19 +2602,37 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows deleted, excluding any cascades.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE is configured for any foreign key
- references which require it. The Session needs to be expired (occurs
- automatically after commit(), or call expire_all()) in order for the
- state of dependent objects subject to delete or delete-orphan cascade
- to be correctly represented.
+ This method has several key caveats:
- Note that the :meth:`.MapperEvents.before_delete` and
- :meth:`.MapperEvents.after_delete`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_delete`.
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON DELETE CASCADE/SET NULL/etc. is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the DELETE, dependent objects in the :class:`.Session` which
+ were impacted by an ON DELETE may not contain the current
+ state, or may have been deleted. This issue is resolved once the
+ :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`. Accessing an expired object
+ whose row has been deleted will invoke a SELECT to locate the
+ row; when the row is not found, an :class:`~sqlalchemy.orm.exc.ObjectDeletedError`
+ is raised.
+
+ * The :meth:`.MapperEvents.before_delete` and
+ :meth:`.MapperEvents.after_delete`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_delete` method is provided to act
+ upon a mass DELETE of entity rows.
+
+ .. seealso::
+
+ :meth:`.Query.update`
+
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
#TODO: cascades need handling.
@@ -2622,20 +2671,50 @@ class Query(object):
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
- Returns the number of rows matched by the update.
+ :return: the count of rows matched as returned by the database's
+ "row count" feature.
+
+ This method has several key caveats:
+
+ * The method does **not** offer in-Python cascading of relationships - it
+ is assumed that ON UPDATE CASCADE is configured for any foreign key
+ references which require it, otherwise the database may emit an
+ integrity violation if foreign key references are being enforced.
+
+ After the UPDATE, dependent objects in the :class:`.Session` which
+ were impacted by an ON UPDATE CASCADE may not contain the current
+ state; this issue is resolved once the :class:`.Session` is expired,
+ which normally occurs upon :meth:`.Session.commit` or can be forced
+ by using :meth:`.Session.expire_all`.
+
+ * As of 0.8, this method will support multiple table updates, as detailed
+ in :ref:`multi_table_updates`, and this behavior does extend to support
+ updates of joined-inheritance and other multiple table mappings. However,
+ the **join condition of an inheritance mapper is currently not
+ automatically rendered**.
+ Care must be taken in any multiple-table update to explicitly include
+ the joining condition between those tables, even in mappings where
+ this is normally automatic.
+ E.g. if a class ``Engineer`` subclasses ``Employee``, an UPDATE of the
+ ``Engineer`` local table using criteria against the ``Employee``
+ local table might look like::
+
+ session.query(Engineer).\\
+ filter(Engineer.id == Employee.id).\\
+ filter(Employee.name == 'dilbert').\\
+ update({"engineer_type": "programmer"})
+
+ * The :meth:`.MapperEvents.before_update` and
+ :meth:`.MapperEvents.after_update`
+ events are **not** invoked from this method. Instead, the
+ :meth:`.SessionEvents.after_bulk_update` method is provided to act
+ upon a mass UPDATE of entity rows.
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it.
+ .. seealso::
- The Session needs to be expired (occurs automatically after commit(),
- or call expire_all()) in order for the state of dependent objects
- subject foreign key cascade to be correctly represented.
+ :meth:`.Query.delete`
- Note that the :meth:`.MapperEvents.before_update` and
- :meth:`.MapperEvents.after_update`
- events are **not** invoked from this method. It instead
- invokes :meth:`.SessionEvents.after_bulk_update`.
+ :ref:`inserts_and_updates` - Core SQL tutorial
"""
@@ -2650,13 +2729,6 @@ class Query(object):
update_op.exec_()
return update_op.rowcount
- _lockmode_lookup = {
- 'read': 'read',
- 'read_nowait': 'read_nowait',
- 'update': True,
- 'update_nowait': 'nowait',
- None: False
- }
def _compile_context(self, labels=True):
context = QueryContext(self)
@@ -2666,12 +2738,8 @@ class Query(object):
context.labels = labels
- if self._lockmode:
- try:
- context.for_update = self._lockmode_lookup[self._lockmode]
- except KeyError:
- raise sa_exc.ArgumentError(
- "Unknown lockmode %r" % self._lockmode)
+ context._for_update_arg = self._for_update_arg
+
for entity in self._entities:
entity.setup_context(self, context)
@@ -2755,9 +2823,10 @@ class Query(object):
statement = sql.select(
[inner] + context.secondary_columns,
- for_update=context.for_update,
use_labels=context.labels)
+ statement._for_update_arg = context._for_update_arg
+
from_clause = inner
for eager_join in context.eager_joins.values():
# EagerLoader places a 'stop_on' attribute on the join,
@@ -2800,11 +2869,12 @@ class Query(object):
context.whereclause,
from_obj=context.froms,
use_labels=context.labels,
- for_update=context.for_update,
order_by=context.order_by,
**self._select_args
)
+ statement._for_update_arg = context._for_update_arg
+
for hint in self._with_hints:
statement = statement.with_hint(*hint)
@@ -2832,14 +2902,34 @@ class Query(object):
if adapter:
single_crit = adapter.traverse(single_crit)
single_crit = self._adapt_clause(single_crit, False, False)
- context.whereclause = sql.and_(context.whereclause,
- single_crit)
+ context.whereclause = sql.and_(
+ sql.True_._ifnone(context.whereclause),
+ single_crit)
def __str__(self):
return str(self._compile_context().statement)
-inspection._self_inspects(Query)
+from ..sql.selectable import ForUpdateArg
+class LockmodeArg(ForUpdateArg):
+ @classmethod
+ def parse_legacy_query(self, mode):
+ if mode in (None, False):
+ return None
+
+ if mode == "read":
+ read = True
+ nowait = False
+ elif mode == "update":
+ read = nowait = False
+ elif mode == "update_nowait":
+ nowait = True
+ read = False
+ else:
+ raise sa_exc.ArgumentError(
+ "Unknown with_lockmode argument: %r" % mode)
+
+ return LockmodeArg(read=read, nowait=nowait)
class _QueryEntity(object):
"""represent an entity column returned within a Query result."""
@@ -2850,6 +2940,8 @@ class _QueryEntity(object):
if not isinstance(entity, util.string_types) and \
_is_mapped_class(entity):
cls = _MapperEntity
+ elif isinstance(entity, Bundle):
+ cls = _BundleEntity
else:
cls = _ColumnEntity
return object.__new__(cls)
@@ -2864,12 +2956,15 @@ class _MapperEntity(_QueryEntity):
"""mapper/class/AliasedClass entity"""
def __init__(self, query, entity):
- self.primary_entity = not query._entities
+ if not query._primary_entity:
+ query._primary_entity = self
query._entities.append(self)
self.entities = [entity]
self.expr = entity
+ supports_single_entity = True
+
def setup_entity(self, ext_info, aliased_adapter):
self.mapper = ext_info.mapper
self.aliased_adapter = aliased_adapter
@@ -2884,6 +2979,7 @@ class _MapperEntity(_QueryEntity):
else:
self._label_name = self.mapper.class_.__name__
self.path = self.entity_zero._path_registry
+ self.custom_rows = bool(self.mapper.dispatch.append_result)
def set_with_polymorphic(self, query, cls_or_mappers,
selectable, polymorphic_on):
@@ -2939,10 +3035,8 @@ class _MapperEntity(_QueryEntity):
return entity.common_parent(self.entity_zero)
- #_adapted_selectable = None
def adapt_to_selectable(self, query, sel):
query._entities.append(self)
- # self._adapted_selectable = sel
def _get_entity_clauses(self, query, context):
@@ -2980,7 +3074,7 @@ class _MapperEntity(_QueryEntity):
self.selectable,
self.mapper._equivalent_columns)
- if self.primary_entity:
+ if query._primary_entity is self:
_instance = loading.instance_processor(
self.mapper,
context,
@@ -3050,6 +3144,187 @@ class _MapperEntity(_QueryEntity):
def __str__(self):
return str(self.mapper)
+@inspection._self_inspects
+class Bundle(object):
+ """A grouping of SQL expressions that are returned by a :class:`.Query`
+ under one namespace.
+
+ The :class:`.Bundle` essentially allows nesting of the tuple-based
+ results returned by a column-oriented :class:`.Query` object. It also
+ is extensible via simple subclassing, where the primary capability
+ to override is that of how the set of expressions should be returned,
+ allowing post-processing as well as custom return types, without
+ involving ORM identity-mapped classes.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`bundles`
+
+ """
+
+ single_entity = False
+ """If True, queries for a single Bundle will be returned as a single
+ entity, rather than an element within a keyed tuple."""
+
+ def __init__(self, name, *exprs, **kw):
+ """Construct a new :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
+ print(row.mybundle.x, row.mybundle.y)
+
+ :param name: name of the bundle.
+ :param \*exprs: columns or SQL expressions comprising the bundle.
+ :param single_entity=False: if True, rows for this :class:`.Bundle`
+ can be returned as a "single entity" outside of any enclosing tuple
+ in the same manner as a mapped entity.
+
+ """
+ self.name = self._label = name
+ self.exprs = exprs
+ self.c = self.columns = ColumnCollection()
+ self.columns.update((getattr(col, "key", col._label), col)
+ for col in exprs)
+ self.single_entity = kw.pop('single_entity', self.single_entity)
+
+ columns = None
+ """A namespace of SQL expressions referred to by this :class:`.Bundle`.
+
+ e.g.::
+
+ bn = Bundle("mybundle", MyClass.x, MyClass.y)
+
+ q = sess.query(bn).filter(bn.c.x == 5)
+
+ Nesting of bundles is also supported::
+
+ b1 = Bundle("b1",
+ Bundle('b2', MyClass.a, MyClass.b),
+ Bundle('b3', MyClass.x, MyClass.y)
+ )
+
+ q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
+
+ .. seealso::
+
+ :attr:`.Bundle.c`
+
+ """
+
+ c = None
+ """An alias for :attr:`.Bundle.columns`."""
+
+ def _clone(self):
+ cloned = self.__class__.__new__(self.__class__)
+ cloned.__dict__.update(self.__dict__)
+ return cloned
+
+ def __clause_element__(self):
+ return expression.ClauseList(group=False, *self.c)
+
+ @property
+ def clauses(self):
+ return self.__clause_element__().clauses
+
+ def label(self, name):
+ """Provide a copy of this :class:`.Bundle` passing a new label."""
+
+ cloned = self._clone()
+ cloned.name = name
+ return cloned
+
+ def create_row_processor(self, query, procs, labels):
+ """Produce the "row processing" function for this :class:`.Bundle`.
+
+ May be overridden by subclasses.
+
+ .. seealso::
+
+ :ref:`bundles` - includes an example of subclassing.
+
+ """
+ def proc(row, result):
+ return util.KeyedTuple([proc(row, None) for proc in procs], labels)
+ return proc
+
+
+class _BundleEntity(_QueryEntity):
+ def __init__(self, query, bundle, setup_entities=True):
+ query._entities.append(self)
+ self.bundle = self.expr = bundle
+ self.type = type(bundle)
+ self._label_name = bundle.name
+ self._entities = []
+
+ if setup_entities:
+ for expr in bundle.exprs:
+ if isinstance(expr, Bundle):
+ _BundleEntity(self, expr)
+ else:
+ _ColumnEntity(self, expr, namespace=self)
+
+ self.entities = ()
+
+ self.filter_fn = lambda item: item
+
+ self.supports_single_entity = self.bundle.single_entity
+
+ custom_rows = False
+
+ @property
+ def entity_zero(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def corresponds_to(self, entity):
+ # TODO: this seems to have no effect for
+ # _ColumnEntity either
+ return False
+
+ @property
+ def entity_zero_or_selectable(self):
+ for ent in self._entities:
+ ezero = ent.entity_zero_or_selectable
+ if ezero is not None:
+ return ezero
+ else:
+ return None
+
+ def adapt_to_selectable(self, query, sel):
+ c = _BundleEntity(query, self.bundle, setup_entities=False)
+ #c._label_name = self._label_name
+ #c.entity_zero = self.entity_zero
+ #c.entities = self.entities
+
+ for ent in self._entities:
+ ent.adapt_to_selectable(c, sel)
+
+ def setup_entity(self, ext_info, aliased_adapter):
+ for ent in self._entities:
+ ent.setup_entity(ext_info, aliased_adapter)
+
+ def setup_context(self, query, context):
+ for ent in self._entities:
+ ent.setup_context(query, context)
+
+ def row_processor(self, query, context, custom_rows):
+ procs, labels = zip(
+ *[ent.row_processor(query, context, custom_rows)
+ for ent in self._entities]
+ )
+
+ proc = self.bundle.create_row_processor(query, procs, labels)
+
+ return proc, self._label_name
class _ColumnEntity(_QueryEntity):
"""Column/expression based entity."""
@@ -3066,7 +3341,7 @@ class _ColumnEntity(_QueryEntity):
interfaces.PropComparator
)):
self._label_name = column.key
- column = column.__clause_element__()
+ column = column._query_clause_element()
else:
self._label_name = getattr(column, 'key', None)
@@ -3079,6 +3354,9 @@ class _ColumnEntity(_QueryEntity):
if c is not column:
return
+ elif isinstance(column, Bundle):
+ _BundleEntity(query, column)
+ return
if not isinstance(column, sql.ColumnElement):
raise sa_exc.InvalidRequestError(
@@ -3086,7 +3364,7 @@ class _ColumnEntity(_QueryEntity):
"expected - got '%r'" % (column, )
)
- type_ = column.type
+ self.type = type_ = column.type
if type_.hashable:
self.filter_fn = lambda item: item
else:
@@ -3129,6 +3407,9 @@ class _ColumnEntity(_QueryEntity):
else:
self.entity_zero = None
+ supports_single_entity = False
+ custom_rows = False
+
@property
def entity_zero_or_selectable(self):
if self.entity_zero is not None:
@@ -3138,10 +3419,6 @@ class _ColumnEntity(_QueryEntity):
else:
return None
- @property
- def type(self):
- return self.column.type
-
def adapt_to_selectable(self, query, sel):
c = _ColumnEntity(query, sel.corresponding_column(self.column))
c._label_name = self._label_name
@@ -3154,6 +3431,8 @@ class _ColumnEntity(_QueryEntity):
self.froms.add(ext_info.selectable)
def corresponds_to(self, entity):
+ # TODO: just returning False here,
+ # no tests fail
if self.entity_zero is None:
return False
elif _is_aliased_class(entity):
@@ -3188,14 +3467,11 @@ class _ColumnEntity(_QueryEntity):
return str(self.column)
-log.class_logger(Query)
-
-
class QueryContext(object):
multi_row_eager_loaders = False
adapter = None
froms = ()
- for_update = False
+ for_update = None
def __init__(self, query):
@@ -3230,6 +3506,38 @@ class QueryContext(object):
class AliasOption(interfaces.MapperOption):
def __init__(self, alias):
+ """Return a :class:`.MapperOption` that will indicate to the :class:`.Query`
+ that the main table has been aliased.
+
+ This is a seldom-used option to suit the
+ very rare case that :func:`.contains_eager`
+ is being used in conjunction with a user-defined SELECT
+ statement that aliases the parent table. E.g.::
+
+ # define an aliased UNION called 'ulist'
+ ulist = users.select(users.c.user_id==7).\\
+ union(users.select(users.c.user_id>7)).\\
+ alias('ulist')
+
+ # add on an eager load of "addresses"
+ statement = ulist.outerjoin(addresses).\\
+ select().apply_labels()
+
+ # create query, indicating "ulist" will be an
+ # alias for the main table, "addresses"
+ # property should be eager loaded
+ query = session.query(User).options(
+ contains_alias(ulist),
+ contains_eager(User.addresses))
+
+ # then get results via the statement
+ results = query.from_statement(statement).all()
+
+ :param alias: is the string name of an alias, or a
+ :class:`~.sql.expression.Alias` object representing
+ the alias.
+
+ """
self.alias = alias
def process_query(self, query):
@@ -3238,3 +3546,5 @@ class AliasOption(interfaces.MapperOption):
else:
alias = self.alias
query._from_obj_alias = sql_util.ColumnAdapter(alias)
+
+
diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py
index 33377d3ec..6fdedd382 100644
--- a/lib/sqlalchemy/orm/relationships.py
+++ b/lib/sqlalchemy/orm/relationships.py
@@ -1,5 +1,5 @@
# orm/relationships.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,15 +13,20 @@ and `secondaryjoin` aspects of :func:`.relationship`.
"""
-from .. import sql, util, exc as sa_exc, schema
+from .. import sql, util, exc as sa_exc, schema, log
+
+from .util import CascadeOptions, _orm_annotate, _orm_deannotate
+from . import dependency
+from . import attributes
from ..sql.util import (
ClauseAdapter,
join_condition, _shallow_annotate, visit_binary_product,
- _deep_deannotate, find_tables, selectables_overlap
+ _deep_deannotate, selectables_overlap
)
from ..sql import operators, expression, visitors
-from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY
-
+from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY, StrategizedProperty, PropComparator
+from ..inspection import inspect
+from . import mapper as mapperlib
def remote(expr):
"""Annotate a portion of a primaryjoin expression
@@ -64,6 +69,1607 @@ def foreign(expr):
{"foreign": True})
+@log.class_logger
+@util.langhelpers.dependency_for("sqlalchemy.orm.properties")
+class RelationshipProperty(StrategizedProperty):
+ """Describes an object property that holds a single item or list
+ of items that correspond to a related database table.
+
+ Public constructor is the :func:`.orm.relationship` function.
+
+ See also:
+
+ :ref:`relationship_config_toplevel`
+
+ """
+
+ strategy_wildcard_key = 'relationship'
+
+ _dependency_processor = None
+
+ def __init__(self, argument,
+ secondary=None, primaryjoin=None,
+ secondaryjoin=None,
+ foreign_keys=None,
+ uselist=None,
+ order_by=False,
+ backref=None,
+ back_populates=None,
+ post_update=False,
+ cascade=False, extension=None,
+ viewonly=False, lazy=True,
+ collection_class=None, passive_deletes=False,
+ passive_updates=True, remote_side=None,
+ enable_typechecks=True, join_depth=None,
+ comparator_factory=None,
+ single_parent=False, innerjoin=False,
+ distinct_target_key=None,
+ doc=None,
+ active_history=False,
+ cascade_backrefs=True,
+ load_on_pending=False,
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None,
+ info=None):
+ """Provide a relationship of a primary Mapper to a secondary Mapper.
+
+ This corresponds to a parent-child or associative table relationship. The
+ constructed class is an instance of :class:`.RelationshipProperty`.
+
+ A typical :func:`.relationship`, used in a classical mapping::
+
+ mapper(Parent, properties={
+ 'children': relationship(Child)
+ })
+
+ Some arguments accepted by :func:`.relationship` optionally accept a
+ callable function, which when called produces the desired value.
+ The callable is invoked by the parent :class:`.Mapper` at "mapper
+ initialization" time, which happens only when mappers are first used, and
+ is assumed to be after all mappings have been constructed. This can be
+ used to resolve order-of-declaration and other dependency issues, such as
+ if ``Child`` is declared below ``Parent`` in the same file::
+
+ mapper(Parent, properties={
+ "children":relationship(lambda: Child,
+ order_by=lambda: Child.id)
+ })
+
+ When using the :ref:`declarative_toplevel` extension, the Declarative
+ initializer allows string arguments to be passed to :func:`.relationship`.
+ These string arguments are converted into callables that evaluate
+ the string as Python code, using the Declarative
+ class-registry as a namespace. This allows the lookup of related
+ classes to be automatic via their string name, and removes the need to
+ import related classes at all into the local module space::
+
+ from sqlalchemy.ext.declarative import declarative_base
+
+ Base = declarative_base()
+
+ class Parent(Base):
+ __tablename__ = 'parent'
+ id = Column(Integer, primary_key=True)
+ children = relationship("Child", order_by="Child.id")
+
+ A full array of examples and reference documentation regarding
+ :func:`.relationship` is at :ref:`relationship_config_toplevel`.
+
+ :param argument:
+ a mapped class, or actual :class:`.Mapper` instance, representing the
+ target of the relationship.
+
+ ``argument`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param secondary:
+ for a many-to-many relationship, specifies the intermediary
+ table, and is an instance of :class:`.Table`. The ``secondary`` keyword
+ argument should generally only
+ be used for a table that is not otherwise expressed in any class
+ mapping, unless this relationship is declared as view only, otherwise
+ conflicting persistence operations can occur.
+
+ ``secondary`` may
+ also be passed as a callable function which is evaluated at
+ mapper initialization time.
+
+ :param active_history=False:
+ When ``True``, indicates that the "previous" value for a
+ many-to-one reference should be loaded when replaced, if
+ not already loaded. Normally, history tracking logic for
+ simple many-to-ones only needs to be aware of the "new"
+ value in order to perform a flush. This flag is available
+ for applications that make use of
+ :func:`.attributes.get_history` which also need to know
+ the "previous" value of the attribute.
+
+ :param backref:
+ indicates the string name of a property to be placed on the related
+ mapper's class that will handle this relationship in the other
+ direction. The other property will be created automatically
+ when the mappers are configured. Can also be passed as a
+ :func:`backref` object to control the configuration of the
+ new relationship.
+
+ :param back_populates:
+ Takes a string name and has the same meaning as ``backref``,
+ except the complementing property is **not** created automatically,
+ and instead must be configured explicitly on the other mapper. The
+ complementing property should also indicate ``back_populates``
+ to this relationship to ensure proper functioning.
+
+ :param cascade:
+ a comma-separated list of cascade rules which determines how
+ Session operations should be "cascaded" from parent to child.
+ This defaults to ``False``, which means the default cascade
+ should be used. The default value is ``"save-update, merge"``.
+
+ Available cascades are:
+
+ * ``save-update`` - cascade the :meth:`.Session.add`
+ operation. This cascade applies both to future and
+ past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
+ meaning new items added to a collection or scalar relationship
+ get placed into the same session as that of the parent, and
+ also applies to items which have been removed from this
+ relationship but are still part of unflushed history.
+
+ * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
+ operation
+
+ * ``expunge`` - cascade the :meth:`.Session.expunge`
+ operation
+
+ * ``delete`` - cascade the :meth:`.Session.delete`
+ operation
+
+ * ``delete-orphan`` - if an item of the child's type is
+ detached from its parent, mark it for deletion.
+
+ .. versionchanged:: 0.7
+ This option does not prevent
+ a new instance of the child object from being persisted
+ without a parent to start with; to constrain against
+ that case, ensure the child's foreign key column(s)
+ is configured as NOT NULL
+
+ * ``refresh-expire`` - cascade the :meth:`.Session.expire`
+ and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
+
+ * ``all`` - shorthand for "save-update,merge, refresh-expire,
+ expunge, delete"
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param cascade_backrefs=True:
+ a boolean value indicating if the ``save-update`` cascade should
+ operate along an assignment event intercepted by a backref.
+ When set to ``False``,
+ the attribute managed by this relationship will not cascade
+ an incoming transient object into the session of a
+ persistent parent, if the event is received via backref.
+
+ That is::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B, backref="a", cascade_backrefs=False)
+ })
+
+ If an ``A()`` is present in the session, assigning it to
+ the "a" attribute on a transient ``B()`` will not place
+ the ``B()`` into the session. To set the flag in the other
+ direction, i.e. so that ``A().bs.append(B())`` won't add
+ a transient ``A()`` into the session for a persistent ``B()``::
+
+ mapper(A, a_table, properties={
+ 'bs':relationship(B,
+ backref=backref("a", cascade_backrefs=False)
+ )
+ })
+
+ See the section :ref:`unitofwork_cascades` for more background
+ on configuring cascades.
+
+ :param collection_class:
+ a class or callable that returns a new list-holding object. will
+ be used in place of a plain list for storing elements.
+ Behavior of this attribute is described in detail at
+ :ref:`custom_collections`.
+
+ :param comparator_factory:
+ a class which extends :class:`.RelationshipProperty.Comparator` which
+ provides custom SQL clause generation for comparison operations.
+
+ :param distinct_target_key=None:
+ Indicate if a "subquery" eager load should apply the DISTINCT
+ keyword to the innermost SELECT statement. When left as ``None``,
+ the DISTINCT keyword will be applied in those cases when the target
+ columns do not comprise the full primary key of the target table.
+ When set to ``True``, the DISTINCT keyword is applied to the innermost
+ SELECT unconditionally.
+
+ It may be desirable to set this flag to False when the DISTINCT is
+ reducing performance of the innermost subquery beyond that of what
+ duplicate innermost rows may be causing.
+
+ .. versionadded:: 0.8.3 - distinct_target_key allows the
+ subquery eager loader to apply a DISTINCT modifier to the
+ innermost SELECT.
+
+ .. versionchanged:: 0.9.0 - distinct_target_key now defaults to
+ ``None``, so that the feature enables itself automatically for
+ those cases where the innermost query targets a non-unique
+ key.
+
+ :param doc:
+ docstring which will be applied to the resulting descriptor.
+
+ :param extension:
+ an :class:`.AttributeExtension` instance, or list of extensions,
+ which will be prepended to the list of attribute listeners for
+ the resulting descriptor placed on the class.
+ **Deprecated.** Please see :class:`.AttributeEvents`.
+
+ :param foreign_keys:
+ a list of columns which are to be used as "foreign key" columns,
+ or columns which refer to the value in a remote column, within the
+ context of this :func:`.relationship` object's ``primaryjoin``
+ condition. That is, if the ``primaryjoin`` condition of this
+ :func:`.relationship` is ``a.id == b.a_id``, and the values in ``b.a_id``
+ are required to be present in ``a.id``, then the "foreign key" column
+ of this :func:`.relationship` is ``b.a_id``.
+
+ In normal cases, the ``foreign_keys`` parameter is **not required.**
+ :func:`.relationship` will **automatically** determine which columns
+ in the ``primaryjoin`` conditition are to be considered "foreign key"
+ columns based on those :class:`.Column` objects that specify
+ :class:`.ForeignKey`, or are otherwise listed as referencing columns
+ in a :class:`.ForeignKeyConstraint` construct. ``foreign_keys`` is only
+ needed when:
+
+ 1. There is more than one way to construct a join from the local
+ table to the remote table, as there are multiple foreign key
+ references present. Setting ``foreign_keys`` will limit the
+ :func:`.relationship` to consider just those columns specified
+ here as "foreign".
+
+ .. versionchanged:: 0.8
+ A multiple-foreign key join ambiguity can be resolved by
+ setting the ``foreign_keys`` parameter alone, without the
+ need to explicitly set ``primaryjoin`` as well.
+
+ 2. The :class:`.Table` being mapped does not actually have
+ :class:`.ForeignKey` or :class:`.ForeignKeyConstraint`
+ constructs present, often because the table
+ was reflected from a database that does not support foreign key
+ reflection (MySQL MyISAM).
+
+ 3. The ``primaryjoin`` argument is used to construct a non-standard
+ join condition, which makes use of columns or expressions that do
+ not normally refer to their "parent" column, such as a join condition
+ expressed by a complex comparison using a SQL function.
+
+ The :func:`.relationship` construct will raise informative error messages
+ that suggest the use of the ``foreign_keys`` parameter when presented
+ with an ambiguous condition. In typical cases, if :func:`.relationship`
+ doesn't raise any exceptions, the ``foreign_keys`` parameter is usually
+ not needed.
+
+ ``foreign_keys`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. seealso::
+
+ :ref:`relationship_foreign_keys`
+
+ :ref:`relationship_custom_foreign`
+
+ :func:`.foreign` - allows direct annotation of the "foreign" columns
+ within a ``primaryjoin`` condition.
+
+ .. versionadded:: 0.8
+ The :func:`.foreign` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "foreign".
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.MapperProperty.info` attribute of this object.
+
+ .. versionadded:: 0.8
+
+ :param innerjoin=False:
+ when ``True``, joined eager loads will use an inner join to join
+ against related tables instead of an outer join. The purpose
+ of this option is generally one of performance, as inner joins
+ generally perform better than outer joins. Another reason can be
+ the use of ``with_lockmode``, which does not support outer joins.
+
+ This flag can be set to ``True`` when the relationship references an
+ object via many-to-one using local foreign keys that are not nullable,
+ or when the reference is one-to-one or a collection that is guaranteed
+ to have one or at least one entry.
+
+ :param join_depth:
+ when non-``None``, an integer value indicating how many levels
+ deep "eager" loaders should join on a self-referring or cyclical
+ relationship. The number counts how many times the same Mapper
+ shall be present in the loading condition along a particular join
+ branch. When left at its default of ``None``, eager loaders
+ will stop chaining when they encounter a the same target mapper
+ which is already higher up in the chain. This option applies
+ both to joined- and subquery- eager loaders.
+
+ :param lazy='select': specifies
+ how the related items should be loaded. Default value is
+ ``select``. Values include:
+
+ * ``select`` - items should be loaded lazily when the property is first
+ accessed, using a separate SELECT statement, or identity map
+ fetch for simple many-to-one references.
+
+ * ``immediate`` - items should be loaded as the parents are loaded,
+ using a separate SELECT statement, or identity map fetch for
+ simple many-to-one references.
+
+ .. versionadded:: 0.6.5
+
+ * ``joined`` - items should be loaded "eagerly" in the same query as
+ that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
+ the join is "outer" or not is determined by the ``innerjoin``
+ parameter.
+
+ * ``subquery`` - items should be loaded "eagerly" as the parents are
+ loaded, using one additional SQL statement, which issues a JOIN to a
+ subquery of the original statement, for each collection requested.
+
+ * ``noload`` - no loading should occur at any time. This is to
+ support "write-only" attributes, or attributes which are
+ populated in some manner specific to the application.
+
+ * ``dynamic`` - the attribute will return a pre-configured
+ :class:`~sqlalchemy.orm.query.Query` object for all read
+ operations, onto which further filtering operations can be
+ applied before iterating the results. See
+ the section :ref:`dynamic_relationship` for more details.
+
+ * True - a synonym for 'select'
+
+ * False - a synonym for 'joined'
+
+ * None - a synonym for 'noload'
+
+ Detailed discussion of loader strategies is at :doc:`/orm/loading`.
+
+ :param load_on_pending=False:
+ Indicates loading behavior for transient or pending parent objects.
+
+ When set to ``True``, causes the lazy-loader to
+ issue a query for a parent object that is not persistent, meaning it has
+ never been flushed. This may take effect for a pending object when
+ autoflush is disabled, or for a transient object that has been
+ "attached" to a :class:`.Session` but is not part of its pending
+ collection.
+
+ The load_on_pending flag does not improve behavior
+ when the ORM is used normally - object references should be constructed
+ at the object level, not at the foreign key level, so that they
+ are present in an ordinary way before flush() proceeds. This flag
+ is not not intended for general use.
+
+ .. versionadded:: 0.6.5
+
+ .. seealso::
+
+ :meth:`.Session.enable_relationship_loading` - this method establishes
+ "load on pending" behavior for the whole object, and also allows
+ loading on objects that remain transient or detached.
+
+ :param order_by:
+ indicates the ordering that should be applied when loading these
+ items. ``order_by`` is expected to refer to one of the :class:`.Column`
+ objects to which the target class is mapped, or
+ the attribute itself bound to the target class which refers
+ to the column.
+
+ ``order_by`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param passive_deletes=False:
+ Indicates loading behavior during delete operations.
+
+ A value of True indicates that unloaded child items should not
+ be loaded during a delete operation on the parent. Normally,
+ when a parent item is deleted, all child items are loaded so
+ that they can either be marked as deleted, or have their
+ foreign key to the parent set to NULL. Marking this flag as
+ True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
+ place which will handle updating/deleting child rows on the
+ database side.
+
+ Additionally, setting the flag to the string value 'all' will
+ disable the "nulling out" of the child foreign keys, when there
+ is no delete or delete-orphan cascade enabled. This is
+ typically used when a triggering or error raise scenario is in
+ place on the database side. Note that the foreign key
+ attributes on in-session child objects will not be changed
+ after a flush occurs so this is a very special use-case
+ setting.
+
+ :param passive_updates=True:
+ Indicates loading and INSERT/UPDATE/DELETE behavior when the
+ source of a foreign key value changes (i.e. an "on update"
+ cascade), which are typically the primary key columns of the
+ source row.
+
+ When True, it is assumed that ON UPDATE CASCADE is configured on
+ the foreign key in the database, and that the database will
+ handle propagation of an UPDATE from a source column to
+ dependent rows. Note that with databases which enforce
+ referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
+ ON UPDATE CASCADE is required for this operation. The
+ relationship() will update the value of the attribute on related
+ items which are locally present in the session during a flush.
+
+ When False, it is assumed that the database does not enforce
+ referential integrity and will not be issuing its own CASCADE
+ operation for an update. The relationship() will issue the
+ appropriate UPDATE statements to the database in response to the
+ change of a referenced key, and items locally present in the
+ session during a flush will also be refreshed.
+
+ This flag should probably be set to False if primary key changes
+ are expected and the database in use doesn't support CASCADE
+ (i.e. SQLite, MySQL MyISAM tables).
+
+ Also see the passive_updates flag on ``mapper()``.
+
+ A future SQLAlchemy release will provide a "detect" feature for
+ this flag.
+
+ :param post_update:
+ this indicates that the relationship should be handled by a
+ second UPDATE statement after an INSERT or before a
+ DELETE. Currently, it also will issue an UPDATE after the
+ instance was UPDATEd as well, although this technically should
+ be improved. This flag is used to handle saving bi-directional
+ dependencies between two individual rows (i.e. each row
+ references the other), where it would otherwise be impossible to
+ INSERT or DELETE both rows fully since one row exists before the
+ other. Use this flag when a particular mapping arrangement will
+ incur two rows that are dependent on each other, such as a table
+ that has a one-to-many relationship to a set of child rows, and
+ also has a column that references a single child row within that
+ list (i.e. both tables contain a foreign key to each other). If
+ a ``flush()`` operation returns an error that a "cyclical
+ dependency" was detected, this is a cue that you might want to
+ use ``post_update`` to "break" the cycle.
+
+ :param primaryjoin:
+ a SQL expression that will be used as the primary
+ join of this child object against the parent object, or in a
+ many-to-many relationship the join of the primary object to the
+ association table. By default, this value is computed based on the
+ foreign key relationships of the parent and child tables (or association
+ table).
+
+ ``primaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param remote_side:
+ used for self-referential relationships, indicates the column or
+ list of columns that form the "remote side" of the relationship.
+
+ ``remote_side`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ .. versionchanged:: 0.8
+ The :func:`.remote` annotation can also be applied
+ directly to the ``primaryjoin`` expression, which is an alternate,
+ more specific system of describing which columns in a particular
+ ``primaryjoin`` should be considered "remote".
+
+ :param query_class:
+ a :class:`.Query` subclass that will be used as the base of the
+ "appender query" returned by a "dynamic" relationship, that
+ is, a relationship that specifies ``lazy="dynamic"`` or was
+ otherwise constructed using the :func:`.orm.dynamic_loader`
+ function.
+
+ :param secondaryjoin:
+ a SQL expression that will be used as the join of
+ an association table to the child object. By default, this value is
+ computed based on the foreign key relationships of the association and
+ child tables.
+
+ ``secondaryjoin`` may also be passed as a callable function
+ which is evaluated at mapper initialization time, and may be passed as a
+ Python-evaluable string when using Declarative.
+
+ :param single_parent=(True|False):
+ when True, installs a validator which will prevent objects
+ from being associated with more than one parent at a time.
+ This is used for many-to-one or many-to-many relationships that
+ should be treated either as one-to-one or one-to-many. Its
+ usage is optional unless delete-orphan cascade is also
+ set on this relationship(), in which case its required.
+
+ :param uselist=(True|False):
+ a boolean that indicates if this property should be loaded as a
+ list or a scalar. In most cases, this value is determined
+ automatically by ``relationship()``, based on the type and direction
+ of the relationship - one to many forms a list, many to one
+ forms a scalar, many to many is a list. If a scalar is desired
+ where normally a list would be present, such as a bi-directional
+ one-to-one relationship, set uselist to False.
+
+ :param viewonly=False:
+ when set to True, the relationship is used only for loading objects
+ within the relationship, and has no effect on the unit-of-work
+ flush process. Relationships with viewonly can specify any kind of
+ join conditions to provide additional views of related objects
+ onto a parent object. Note that the functionality of a viewonly
+ relationship has its limits - complicated join conditions may
+ not compile into eager or lazy loaders properly. If this is the
+ case, use an alternative method.
+
+ .. versionchanged:: 0.6
+ :func:`relationship` was renamed from its previous name
+ :func:`relation`.
+
+ """
+
+ self.uselist = uselist
+ self.argument = argument
+ self.secondary = secondary
+ self.primaryjoin = primaryjoin
+ self.secondaryjoin = secondaryjoin
+ self.post_update = post_update
+ self.direction = None
+ self.viewonly = viewonly
+ self.lazy = lazy
+ self.single_parent = single_parent
+ self._user_defined_foreign_keys = foreign_keys
+ self.collection_class = collection_class
+ self.passive_deletes = passive_deletes
+ self.cascade_backrefs = cascade_backrefs
+ self.passive_updates = passive_updates
+ self.remote_side = remote_side
+ self.enable_typechecks = enable_typechecks
+ self.query_class = query_class
+ self.innerjoin = innerjoin
+ self.distinct_target_key = distinct_target_key
+ self.doc = doc
+ self.active_history = active_history
+ self.join_depth = join_depth
+ self.local_remote_pairs = _local_remote_pairs
+ self.extension = extension
+ self.load_on_pending = load_on_pending
+ self.comparator_factory = comparator_factory or \
+ RelationshipProperty.Comparator
+ self.comparator = self.comparator_factory(self, None)
+ util.set_creation_order(self)
+
+ if info is not None:
+ self.info = info
+
+ if strategy_class:
+ self.strategy_class = strategy_class
+ else:
+ self.strategy_class = self._strategy_lookup(("lazy", self.lazy))
+
+ self._reverse_property = set()
+
+ self.cascade = cascade if cascade is not False \
+ else "save-update, merge"
+
+ self.order_by = order_by
+
+ self.back_populates = back_populates
+
+ if self.back_populates:
+ if backref:
+ raise sa_exc.ArgumentError(
+ "backref and back_populates keyword arguments "
+ "are mutually exclusive")
+ self.backref = None
+ else:
+ self.backref = backref
+
+ def instrument_class(self, mapper):
+ attributes.register_descriptor(
+ mapper.class_,
+ self.key,
+ comparator=self.comparator_factory(self, mapper),
+ parententity=mapper,
+ doc=self.doc,
+ )
+
+ class Comparator(PropComparator):
+ """Produce boolean, comparison, and other operators for
+ :class:`.RelationshipProperty` attributes.
+
+ See the documentation for :class:`.PropComparator` for a brief overview
+ of ORM level operator definition.
+
+ See also:
+
+ :class:`.PropComparator`
+
+ :class:`.ColumnProperty.Comparator`
+
+ :class:`.ColumnOperators`
+
+ :ref:`types_operators`
+
+ :attr:`.TypeEngine.comparator_factory`
+
+ """
+
+ _of_type = None
+
+ def __init__(self, prop, parentmapper, adapt_to_entity=None, of_type=None):
+ """Construction of :class:`.RelationshipProperty.Comparator`
+ is internal to the ORM's attribute mechanics.
+
+ """
+ self.prop = prop
+ self._parentmapper = parentmapper
+ self._adapt_to_entity = adapt_to_entity
+ if of_type:
+ self._of_type = of_type
+
+ def adapt_to_entity(self, adapt_to_entity):
+ return self.__class__(self.property, self._parentmapper,
+ adapt_to_entity=adapt_to_entity,
+ of_type=self._of_type)
+
+ @util.memoized_property
+ def mapper(self):
+ """The target :class:`.Mapper` referred to by this
+ :class:`.RelationshipProperty.Comparator`.
+
+ This is the "target" or "remote" side of the
+ :func:`.relationship`.
+
+ """
+ return self.property.mapper
+
+ @util.memoized_property
+ def _parententity(self):
+ return self.property.parent
+
+ def _source_selectable(self):
+ if self._adapt_to_entity:
+ return self._adapt_to_entity.selectable
+ else:
+ return self.property.parent._with_polymorphic_selectable
+
+ def __clause_element__(self):
+ adapt_from = self._source_selectable()
+ if self._of_type:
+ of_type = inspect(self._of_type).mapper
+ else:
+ of_type = None
+
+ pj, sj, source, dest, \
+ secondary, target_adapter = self.property._create_joins(
+ source_selectable=adapt_from,
+ source_polymorphic=True,
+ of_type=of_type)
+ if sj is not None:
+ return pj & sj
+ else:
+ return pj
+
+ def of_type(self, cls):
+ """Produce a construct that represents a particular 'subtype' of
+ attribute for the parent class.
+
+ Currently this is usable in conjunction with :meth:`.Query.join`
+ and :meth:`.Query.outerjoin`.
+
+ """
+ return RelationshipProperty.Comparator(
+ self.property,
+ self._parentmapper,
+ adapt_to_entity=self._adapt_to_entity,
+ of_type=cls)
+
+ def in_(self, other):
+ """Produce an IN clause - this is not implemented
+ for :func:`~.orm.relationship`-based attributes at this time.
+
+ """
+ raise NotImplementedError('in_() not yet supported for '
+ 'relationships. For a simple many-to-one, use '
+ 'in_() against the set of foreign key values.')
+
+ __hash__ = None
+
+ def __eq__(self, other):
+ """Implement the ``==`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop == <some object>
+
+ this will typically produce a
+ clause such as::
+
+ mytable.related_id == <some id>
+
+ Where ``<some id>`` is the primary key of the given
+ object.
+
+ The ``==`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use :meth:`~.RelationshipProperty.Comparator.contains`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce a NOT EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction in [ONETOMANY, MANYTOMANY]:
+ return ~self._criterion_exists()
+ else:
+ return _orm_annotate(self.property._optimized_compare(
+ None, adapt_source=self.adapter))
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a colle"
+ "ction to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return _orm_annotate(self.property._optimized_compare(other,
+ adapt_source=self.adapter))
+
+ def _criterion_exists(self, criterion=None, **kwargs):
+ if getattr(self, '_of_type', None):
+ info = inspect(self._of_type)
+ target_mapper, to_selectable, is_aliased_class = \
+ info.mapper, info.selectable, info.is_aliased_class
+ if self.property._is_self_referential and not is_aliased_class:
+ to_selectable = to_selectable.alias()
+
+ single_crit = target_mapper._single_table_criterion
+ if single_crit is not None:
+ if criterion is not None:
+ criterion = single_crit & criterion
+ else:
+ criterion = single_crit
+ else:
+ is_aliased_class = False
+ to_selectable = None
+
+ if self.adapter:
+ source_selectable = self._source_selectable()
+ else:
+ source_selectable = None
+
+ pj, sj, source, dest, secondary, target_adapter = \
+ self.property._create_joins(dest_polymorphic=True,
+ dest_selectable=to_selectable,
+ source_selectable=source_selectable)
+
+ for k in kwargs:
+ crit = getattr(self.property.mapper.class_, k) == kwargs[k]
+ if criterion is None:
+ criterion = crit
+ else:
+ criterion = criterion & crit
+
+ # annotate the *local* side of the join condition, in the case
+ # of pj + sj this is the full primaryjoin, in the case of just
+ # pj its the local side of the primaryjoin.
+ if sj is not None:
+ j = _orm_annotate(pj) & sj
+ else:
+ j = _orm_annotate(pj, exclude=self.property.remote_side)
+
+ if criterion is not None and target_adapter and not is_aliased_class:
+ # limit this adapter to annotated only?
+ criterion = target_adapter.traverse(criterion)
+
+ # only have the "joined left side" of what we
+ # return be subject to Query adaption. The right
+ # side of it is used for an exists() subquery and
+ # should not correlate or otherwise reach out
+ # to anything in the enclosing query.
+ if criterion is not None:
+ criterion = criterion._annotate(
+ {'no_replacement_traverse': True})
+
+ crit = j & sql.True_._ifnone(criterion)
+
+ ex = sql.exists([1], crit, from_obj=dest).correlate_except(dest)
+ if secondary is not None:
+ ex = ex.correlate_except(secondary)
+ return ex
+
+ def any(self, criterion=None, **kwargs):
+ """Produce an expression that tests a collection against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.any(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
+ AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.any` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.any` is particularly
+ useful for testing for empty collections::
+
+ session.query(MyClass).filter(
+ ~MyClass.somereference.any()
+ )
+
+ will produce::
+
+ SELECT * FROM my_table WHERE
+ NOT EXISTS (SELECT 1 FROM related WHERE
+ related.my_id=my_table.id)
+
+ :meth:`~.RelationshipProperty.Comparator.any` is only
+ valid for collections, i.e. a :func:`.relationship`
+ that has ``uselist=True``. For scalar references,
+ use :meth:`~.RelationshipProperty.Comparator.has`.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'any()' not implemented for scalar "
+ "attributes. Use has()."
+ )
+
+ return self._criterion_exists(criterion, **kwargs)
+
+ def has(self, criterion=None, **kwargs):
+ """Produce an expression that tests a scalar reference against
+ particular criterion, using EXISTS.
+
+ An expression like::
+
+ session.query(MyClass).filter(
+ MyClass.somereference.has(SomeRelated.x==2)
+ )
+
+
+ Will produce a query like::
+
+ SELECT * FROM my_table WHERE
+ EXISTS (SELECT 1 FROM related WHERE
+ related.id==my_table.related_id AND related.x=2)
+
+ Because :meth:`~.RelationshipProperty.Comparator.has` uses
+ a correlated subquery, its performance is not nearly as
+ good when compared against large target tables as that of
+ using a join.
+
+ :meth:`~.RelationshipProperty.Comparator.has` is only
+ valid for scalar references, i.e. a :func:`.relationship`
+ that has ``uselist=False``. For collection references,
+ use :meth:`~.RelationshipProperty.Comparator.any`.
+
+ """
+ if self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'has()' not implemented for collections. "
+ "Use any().")
+ return self._criterion_exists(criterion, **kwargs)
+
+ def contains(self, other, **kwargs):
+ """Return a simple expression that tests a collection for
+ containment of a particular item.
+
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ only valid for a collection, i.e. a
+ :func:`~.orm.relationship` that implements
+ one-to-many or many-to-many with ``uselist=True``.
+
+ When used in a simple one-to-many context, an
+ expression like::
+
+ MyClass.contains(other)
+
+ Produces a clause like::
+
+ mytable.id == <some id>
+
+ Where ``<some id>`` is the value of the foreign key
+ attribute on ``other`` which refers to the primary
+ key of its parent object. From this it follows that
+ :meth:`~.RelationshipProperty.Comparator.contains` is
+ very useful when used with simple one-to-many
+ operations.
+
+ For many-to-many operations, the behavior of
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ has more caveats. The association table will be
+ rendered in the statement, producing an "implicit"
+ join, that is, includes multiple tables in the FROM
+ clause which are equated in the WHERE clause::
+
+ query(MyClass).filter(MyClass.contains(other))
+
+ Produces a query like::
+
+ SELECT * FROM my_table, my_association_table AS
+ my_association_table_1 WHERE
+ my_table.id = my_association_table_1.parent_id
+ AND my_association_table_1.child_id = <some id>
+
+ Where ``<some id>`` would be the primary key of
+ ``other``. From the above, it is clear that
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ will **not** work with many-to-many collections when
+ used in queries that move beyond simple AND
+ conjunctions, such as multiple
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ expressions joined by OR. In such cases subqueries or
+ explicit "outer joins" will need to be used instead.
+ See :meth:`~.RelationshipProperty.Comparator.any` for
+ a less-performant alternative using EXISTS, or refer
+ to :meth:`.Query.outerjoin` as well as :ref:`ormtutorial_joins`
+ for more details on constructing outer joins.
+
+ """
+ if not self.property.uselist:
+ raise sa_exc.InvalidRequestError(
+ "'contains' not implemented for scalar "
+ "attributes. Use ==")
+ clause = self.property._optimized_compare(other,
+ adapt_source=self.adapter)
+
+ if self.property.secondaryjoin is not None:
+ clause.negation_clause = \
+ self.__negated_contains_or_equals(other)
+
+ return clause
+
+ def __negated_contains_or_equals(self, other):
+ if self.property.direction == MANYTOONE:
+ state = attributes.instance_state(other)
+
+ def state_bindparam(x, state, col):
+ o = state.obj() # strong ref
+ return sql.bindparam(x, unique=True, callable_=lambda: \
+ self.property.mapper._get_committed_attr_by_column(o, col))
+
+ def adapt(col):
+ if self.adapter:
+ return self.adapter(col)
+ else:
+ return col
+
+ if self.property._use_get:
+ return sql.and_(*[
+ sql.or_(
+ adapt(x) != state_bindparam(adapt(x), state, y),
+ adapt(x) == None)
+ for (x, y) in self.property.local_remote_pairs])
+
+ criterion = sql.and_(*[x == y for (x, y) in
+ zip(
+ self.property.mapper.primary_key,
+ self.property.\
+ mapper.\
+ primary_key_from_instance(other))
+ ])
+ return ~self._criterion_exists(criterion)
+
+ def __ne__(self, other):
+ """Implement the ``!=`` operator.
+
+ In a many-to-one context, such as::
+
+ MyClass.some_prop != <some object>
+
+ This will typically produce a clause such as::
+
+ mytable.related_id != <some id>
+
+ Where ``<some id>`` is the primary key of the
+ given object.
+
+ The ``!=`` operator provides partial functionality for non-
+ many-to-one comparisons:
+
+ * Comparisons against collections are not supported.
+ Use
+ :meth:`~.RelationshipProperty.Comparator.contains`
+ in conjunction with :func:`~.expression.not_`.
+ * Compared to a scalar one-to-many, will produce a
+ clause that compares the target columns in the parent to
+ the given target.
+ * Compared to a scalar many-to-many, an alias
+ of the association table will be rendered as
+ well, forming a natural join that is part of the
+ main body of the query. This will not work for
+ queries that go beyond simple AND conjunctions of
+ comparisons, such as those which use OR. Use
+ explicit joins, outerjoins, or
+ :meth:`~.RelationshipProperty.Comparator.has` in
+ conjunction with :func:`~.expression.not_` for
+ more comprehensive non-many-to-one scalar
+ membership tests.
+ * Comparisons against ``None`` given in a one-to-many
+ or many-to-many context produce an EXISTS clause.
+
+ """
+ if isinstance(other, (util.NoneType, expression.Null)):
+ if self.property.direction == MANYTOONE:
+ return sql.or_(*[x != None for x in
+ self.property._calculated_foreign_keys])
+ else:
+ return self._criterion_exists()
+ elif self.property.uselist:
+ raise sa_exc.InvalidRequestError("Can't compare a collection"
+ " to an object or collection; use "
+ "contains() to test for membership.")
+ else:
+ return self.__negated_contains_or_equals(other)
+
+ @util.memoized_property
+ def property(self):
+ if mapperlib.Mapper._new_mappers:
+ mapperlib.Mapper._configure_all()
+ return self.prop
+
+ def compare(self, op, value,
+ value_is_parent=False,
+ alias_secondary=True):
+ if op == operators.eq:
+ if value is None:
+ if self.uselist:
+ return ~sql.exists([1], self.primaryjoin)
+ else:
+ return self._optimized_compare(None,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return self._optimized_compare(value,
+ value_is_parent=value_is_parent,
+ alias_secondary=alias_secondary)
+ else:
+ return op(self.comparator, value)
+
+ def _optimized_compare(self, value, value_is_parent=False,
+ adapt_source=None,
+ alias_secondary=True):
+ if value is not None:
+ value = attributes.instance_state(value)
+ return self._lazy_strategy.lazy_clause(value,
+ reverse_direction=not value_is_parent,
+ alias_secondary=alias_secondary,
+ adapt_source=adapt_source)
+
+ def __str__(self):
+ return str(self.parent.class_.__name__) + "." + self.key
+
+ def merge(self,
+ session,
+ source_state,
+ source_dict,
+ dest_state,
+ dest_dict,
+ load, _recursive):
+
+ if load:
+ for r in self._reverse_property:
+ if (source_state, r) in _recursive:
+ return
+
+ if not "merge" in self._cascade:
+ return
+
+ if self.key not in source_dict:
+ return
+
+ if self.uselist:
+ instances = source_state.get_impl(self.key).\
+ get(source_state, source_dict)
+ if hasattr(instances, '_sa_adapter'):
+ # convert collections to adapters to get a true iterator
+ instances = instances._sa_adapter
+
+ if load:
+ # for a full merge, pre-load the destination collection,
+ # so that individual _merge of each item pulls from identity
+ # map for those already present.
+ # also assumes CollectionAttrbiuteImpl behavior of loading
+ # "old" list in any case
+ dest_state.get_impl(self.key).get(dest_state, dest_dict)
+
+ dest_list = []
+ for current in instances:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ if obj is not None:
+ dest_list.append(obj)
+
+ if not load:
+ coll = attributes.init_state_collection(dest_state,
+ dest_dict, self.key)
+ for c in dest_list:
+ coll.append_without_event(c)
+ else:
+ dest_state.get_impl(self.key)._set_iterable(dest_state,
+ dest_dict, dest_list)
+ else:
+ current = source_dict[self.key]
+ if current is not None:
+ current_state = attributes.instance_state(current)
+ current_dict = attributes.instance_dict(current)
+ _recursive[(current_state, self)] = True
+ obj = session._merge(current_state, current_dict,
+ load=load, _recursive=_recursive)
+ else:
+ obj = None
+
+ if not load:
+ dest_dict[self.key] = obj
+ else:
+ dest_state.get_impl(self.key).set(dest_state,
+ dest_dict, obj, None)
+
+ def _value_as_iterable(self, state, dict_, key,
+ passive=attributes.PASSIVE_OFF):
+ """Return a list of tuples (state, obj) for the given
+ key.
+
+ returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
+ """
+
+ impl = state.manager[key].impl
+ x = impl.get(state, dict_, passive=passive)
+ if x is attributes.PASSIVE_NO_RESULT or x is None:
+ return []
+ elif hasattr(impl, 'get_collection'):
+ return [
+ (attributes.instance_state(o), o) for o in
+ impl.get_collection(state, dict_, x, passive=passive)
+ ]
+ else:
+ return [(attributes.instance_state(x), x)]
+
+ def cascade_iterator(self, type_, state, dict_,
+ visited_states, halt_on=None):
+ #assert type_ in self._cascade
+
+ # only actively lazy load on the 'delete' cascade
+ if type_ != 'delete' or self.passive_deletes:
+ passive = attributes.PASSIVE_NO_INITIALIZE
+ else:
+ passive = attributes.PASSIVE_OFF
+
+ if type_ == 'save-update':
+ tuples = state.manager[self.key].impl.\
+ get_all_pending(state, dict_)
+
+ else:
+ tuples = self._value_as_iterable(state, dict_, self.key,
+ passive=passive)
+
+ skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
+ not in self._cascade
+
+ for instance_state, c in tuples:
+ if instance_state in visited_states:
+ continue
+
+ if c is None:
+ # would like to emit a warning here, but
+ # would not be consistent with collection.append(None)
+ # current behavior of silently skipping.
+ # see [ticket:2229]
+ continue
+
+ instance_dict = attributes.instance_dict(c)
+
+ if halt_on and halt_on(instance_state):
+ continue
+
+ if skip_pending and not instance_state.key:
+ continue
+
+ instance_mapper = instance_state.manager.mapper
+
+ if not instance_mapper.isa(self.mapper.class_manager.mapper):
+ raise AssertionError("Attribute '%s' on class '%s' "
+ "doesn't handle objects "
+ "of type '%s'" % (
+ self.key,
+ self.parent.class_,
+ c.__class__
+ ))
+
+ visited_states.add(instance_state)
+
+ yield c, instance_mapper, instance_state, instance_dict
+
+ def _add_reverse_property(self, key):
+ other = self.mapper.get_property(key, _configure_mappers=False)
+ self._reverse_property.add(other)
+ other._reverse_property.add(self)
+
+ if not other.mapper.common_parent(self.parent):
+ raise sa_exc.ArgumentError('reverse_property %r on '
+ 'relationship %s references relationship %s, which '
+ 'does not reference mapper %s' % (key, self, other,
+ self.parent))
+ if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
+ == other.direction:
+ raise sa_exc.ArgumentError('%s and back-reference %s are '
+ 'both of the same direction %r. Did you mean to '
+ 'set remote_side on the many-to-one side ?'
+ % (other, self, self.direction))
+
+ @util.memoized_property
+ def mapper(self):
+ """Return the targeted :class:`.Mapper` for this
+ :class:`.RelationshipProperty`.
+
+ This is a lazy-initializing static attribute.
+
+ """
+ if util.callable(self.argument) and \
+ not isinstance(self.argument, (type, mapperlib.Mapper)):
+ argument = self.argument()
+ else:
+ argument = self.argument
+
+ if isinstance(argument, type):
+ mapper_ = mapperlib.class_mapper(argument,
+ configure=False)
+ elif isinstance(self.argument, mapperlib.Mapper):
+ mapper_ = argument
+ else:
+ raise sa_exc.ArgumentError("relationship '%s' expects "
+ "a class or a mapper argument (received: %s)"
+ % (self.key, type(argument)))
+ return mapper_
+
+ @util.memoized_property
+ @util.deprecated("0.7", "Use .target")
+ def table(self):
+ """Return the selectable linked to this
+ :class:`.RelationshipProperty` object's target
+ :class:`.Mapper`.
+ """
+ return self.target
+
+ def do_init(self):
+ self._check_conflicts()
+ self._process_dependent_arguments()
+ self._setup_join_conditions()
+ self._check_cascade_settings(self._cascade)
+ self._post_init()
+ self._generate_backref()
+ super(RelationshipProperty, self).do_init()
+ self._lazy_strategy = self._get_strategy((("lazy", "select"),))
+
+
+ def _process_dependent_arguments(self):
+ """Convert incoming configuration arguments to their
+ proper form.
+
+ Callables are resolved, ORM annotations removed.
+
+ """
+ # accept callables for other attributes which may require
+ # deferred initialization. This technique is used
+ # by declarative "string configs" and some recipes.
+ for attr in (
+ 'order_by', 'primaryjoin', 'secondaryjoin',
+ 'secondary', '_user_defined_foreign_keys', 'remote_side',
+ ):
+ attr_value = getattr(self, attr)
+ if util.callable(attr_value):
+ setattr(self, attr, attr_value())
+
+ # remove "annotations" which are present if mapped class
+ # descriptors are used to create the join expression.
+ for attr in 'primaryjoin', 'secondaryjoin':
+ val = getattr(self, attr)
+ if val is not None:
+ setattr(self, attr, _orm_deannotate(
+ expression._only_column_elements(val, attr))
+ )
+
+ # ensure expressions in self.order_by, foreign_keys,
+ # remote_side are all columns, not strings.
+ if self.order_by is not False and self.order_by is not None:
+ self.order_by = [
+ expression._only_column_elements(x, "order_by")
+ for x in
+ util.to_list(self.order_by)]
+
+ self._user_defined_foreign_keys = \
+ util.column_set(
+ expression._only_column_elements(x, "foreign_keys")
+ for x in util.to_column_set(
+ self._user_defined_foreign_keys
+ ))
+
+ self.remote_side = \
+ util.column_set(
+ expression._only_column_elements(x, "remote_side")
+ for x in
+ util.to_column_set(self.remote_side))
+
+ self.target = self.mapper.mapped_table
+
+
+ def _setup_join_conditions(self):
+ self._join_condition = jc = JoinCondition(
+ parent_selectable=self.parent.mapped_table,
+ child_selectable=self.mapper.mapped_table,
+ parent_local_selectable=self.parent.local_table,
+ child_local_selectable=self.mapper.local_table,
+ primaryjoin=self.primaryjoin,
+ secondary=self.secondary,
+ secondaryjoin=self.secondaryjoin,
+ parent_equivalents=self.parent._equivalent_columns,
+ child_equivalents=self.mapper._equivalent_columns,
+ consider_as_foreign_keys=self._user_defined_foreign_keys,
+ local_remote_pairs=self.local_remote_pairs,
+ remote_side=self.remote_side,
+ self_referential=self._is_self_referential,
+ prop=self,
+ support_sync=not self.viewonly,
+ can_be_synced_fn=self._columns_are_mapped
+ )
+ self.primaryjoin = jc.deannotated_primaryjoin
+ self.secondaryjoin = jc.deannotated_secondaryjoin
+ self.direction = jc.direction
+ self.local_remote_pairs = jc.local_remote_pairs
+ self.remote_side = jc.remote_columns
+ self.local_columns = jc.local_columns
+ self.synchronize_pairs = jc.synchronize_pairs
+ self._calculated_foreign_keys = jc.foreign_key_columns
+ self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
+
+ def _check_conflicts(self):
+ """Test that this relationship is legal, warn about
+ inheritance conflicts."""
+
+ if not self.is_primary() \
+ and not mapperlib.class_mapper(
+ self.parent.class_,
+ configure=False).has_property(self.key):
+ raise sa_exc.ArgumentError("Attempting to assign a new "
+ "relationship '%s' to a non-primary mapper on "
+ "class '%s'. New relationships can only be added "
+ "to the primary mapper, i.e. the very first mapper "
+ "created for class '%s' " % (self.key,
+ self.parent.class_.__name__,
+ self.parent.class_.__name__))
+
+ # check for conflicting relationship() on superclass
+ if not self.parent.concrete:
+ for inheriting in self.parent.iterate_to_root():
+ if inheriting is not self.parent \
+ and inheriting.has_property(self.key):
+ util.warn("Warning: relationship '%s' on mapper "
+ "'%s' supersedes the same relationship "
+ "on inherited mapper '%s'; this can "
+ "cause dependency issues during flush"
+ % (self.key, self.parent, inheriting))
+
+ def _get_cascade(self):
+ """Return the current cascade setting for this
+ :class:`.RelationshipProperty`.
+ """
+ return self._cascade
+
+ def _set_cascade(self, cascade):
+ cascade = CascadeOptions(cascade)
+ if 'mapper' in self.__dict__:
+ self._check_cascade_settings(cascade)
+ self._cascade = cascade
+
+ if self._dependency_processor:
+ self._dependency_processor.cascade = cascade
+
+ cascade = property(_get_cascade, _set_cascade)
+
+ def _check_cascade_settings(self, cascade):
+ if cascade.delete_orphan and not self.single_parent \
+ and (self.direction is MANYTOMANY or self.direction
+ is MANYTOONE):
+ raise sa_exc.ArgumentError(
+ 'On %s, delete-orphan cascade is not supported '
+ 'on a many-to-many or many-to-one relationship '
+ 'when single_parent is not set. Set '
+ 'single_parent=True on the relationship().'
+ % self)
+ if self.direction is MANYTOONE and self.passive_deletes:
+ util.warn("On %s, 'passive_deletes' is normally configured "
+ "on one-to-many, one-to-one, many-to-many "
+ "relationships only."
+ % self)
+
+ if self.passive_deletes == 'all' and \
+ ("delete" in cascade or
+ "delete-orphan" in cascade):
+ raise sa_exc.ArgumentError(
+ "On %s, can't set passive_deletes='all' in conjunction "
+ "with 'delete' or 'delete-orphan' cascade" % self)
+
+ if cascade.delete_orphan:
+ self.mapper.primary_mapper()._delete_orphans.append(
+ (self.key, self.parent.class_)
+ )
+
+ def _columns_are_mapped(self, *cols):
+ """Return True if all columns in the given collection are
+ mapped by the tables referenced by this :class:`.Relationship`.
+
+ """
+ for c in cols:
+ if self.secondary is not None \
+ and self.secondary.c.contains_column(c):
+ continue
+ if not self.parent.mapped_table.c.contains_column(c) and \
+ not self.target.c.contains_column(c):
+ return False
+ return True
+
+ def _generate_backref(self):
+ """Interpret the 'backref' instruction to create a
+ :func:`.relationship` complementary to this one."""
+
+ if not self.is_primary():
+ return
+ if self.backref is not None and not self.back_populates:
+ if isinstance(self.backref, util.string_types):
+ backref_key, kwargs = self.backref, {}
+ else:
+ backref_key, kwargs = self.backref
+ mapper = self.mapper.primary_mapper()
+
+ check = set(mapper.iterate_to_root()).\
+ union(mapper.self_and_descendants)
+ for m in check:
+ if m.has_property(backref_key):
+ raise sa_exc.ArgumentError("Error creating backref "
+ "'%s' on relationship '%s': property of that "
+ "name exists on mapper '%s'" % (backref_key,
+ self, m))
+
+ # determine primaryjoin/secondaryjoin for the
+ # backref. Use the one we had, so that
+ # a custom join doesn't have to be specified in
+ # both directions.
+ if self.secondary is not None:
+ # for many to many, just switch primaryjoin/
+ # secondaryjoin. use the annotated
+ # pj/sj on the _join_condition.
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.secondaryjoin_minus_local)
+ sj = kwargs.pop('secondaryjoin',
+ self._join_condition.primaryjoin_minus_local)
+ else:
+ pj = kwargs.pop('primaryjoin',
+ self._join_condition.primaryjoin_reverse_remote)
+ sj = kwargs.pop('secondaryjoin', None)
+ if sj:
+ raise sa_exc.InvalidRequestError(
+ "Can't assign 'secondaryjoin' on a backref "
+ "against a non-secondary relationship."
+ )
+
+ foreign_keys = kwargs.pop('foreign_keys',
+ self._user_defined_foreign_keys)
+ parent = self.parent.primary_mapper()
+ kwargs.setdefault('viewonly', self.viewonly)
+ kwargs.setdefault('post_update', self.post_update)
+ kwargs.setdefault('passive_updates', self.passive_updates)
+ self.back_populates = backref_key
+ relationship = RelationshipProperty(
+ parent, self.secondary,
+ pj, sj,
+ foreign_keys=foreign_keys,
+ back_populates=self.key,
+ **kwargs)
+ mapper._configure_property(backref_key, relationship)
+
+ if self.back_populates:
+ self._add_reverse_property(self.back_populates)
+
+ def _post_init(self):
+ if self.uselist is None:
+ self.uselist = self.direction is not MANYTOONE
+ if not self.viewonly:
+ self._dependency_processor = \
+ dependency.DependencyProcessor.from_relationship(self)
+
+ @util.memoized_property
+ def _use_get(self):
+ """memoize the 'use_get' attribute of this RelationshipLoader's
+ lazyloader."""
+
+ strategy = self._lazy_strategy
+ return strategy.use_get
+
+ @util.memoized_property
+ def _is_self_referential(self):
+ return self.mapper.common_parent(self.parent)
+
+ def _create_joins(self, source_polymorphic=False,
+ source_selectable=None, dest_polymorphic=False,
+ dest_selectable=None, of_type=None):
+ if source_selectable is None:
+ if source_polymorphic and self.parent.with_polymorphic:
+ source_selectable = self.parent._with_polymorphic_selectable
+
+ aliased = False
+ if dest_selectable is None:
+ if dest_polymorphic and self.mapper.with_polymorphic:
+ dest_selectable = self.mapper._with_polymorphic_selectable
+ aliased = True
+ else:
+ dest_selectable = self.mapper.mapped_table
+
+ if self._is_self_referential and source_selectable is None:
+ dest_selectable = dest_selectable.alias()
+ aliased = True
+ else:
+ aliased = True
+
+ dest_mapper = of_type or self.mapper
+
+ single_crit = dest_mapper._single_table_criterion
+ aliased = aliased or (source_selectable is not None)
+
+ primaryjoin, secondaryjoin, secondary, target_adapter, dest_selectable = \
+ self._join_condition.join_targets(
+ source_selectable, dest_selectable, aliased, single_crit
+ )
+ if source_selectable is None:
+ source_selectable = self.parent.local_table
+ if dest_selectable is None:
+ dest_selectable = self.mapper.local_table
+ return (primaryjoin, secondaryjoin, source_selectable,
+ dest_selectable, secondary, target_adapter)
+
def _annotate_columns(element, annotations):
def clone(elem):
if isinstance(elem, expression.ColumnClause):
@@ -901,4 +2507,4 @@ class _ColInAnnotations(object):
self.name = name
def __call__(self, c):
- return self.name in c._annotations \ No newline at end of file
+ return self.name in c._annotations
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index 6393b74a7..c1f8f319f 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -1,5 +1,5 @@
# orm/scoping.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -163,7 +163,7 @@ def makeprop(name):
return property(get, set)
for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
- 'is_active', 'autoflush', 'no_autoflush'):
+ 'is_active', 'autoflush', 'no_autoflush', 'info'):
setattr(scoped_session, prop, makeprop(prop))
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 5a4486eef..c10a0efc9 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -1,5 +1,5 @@
# orm/session.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,25 +8,40 @@
import weakref
-from .. import util, sql, engine, exc as sa_exc, event
+from .. import util, sql, engine, exc as sa_exc
from ..sql import util as sql_util, expression
from . import (
- SessionExtension, attributes, exc, query, util as orm_util,
+ SessionExtension, attributes, exc, query,
loading, identity
)
-from .util import (
+from ..inspection import inspect
+from .base import (
object_mapper, class_mapper,
_class_to_mapper, _state_mapper, object_state,
- _none_set
+ _none_set, state_str, instance_str
)
from .unitofwork import UOWTransaction
-from .mapper import Mapper
-from .events import SessionEvents
-statelib = util.importlater("sqlalchemy.orm", "state")
+from . import state as statelib
import sys
__all__ = ['Session', 'SessionTransaction', 'SessionExtension', 'sessionmaker']
+_sessions = weakref.WeakValueDictionary()
+"""Weak-referencing dictionary of :class:`.Session` objects.
+"""
+
+def _state_session(state):
+ """Given an :class:`.InstanceState`, return the :class:`.Session`
+ associated, if any.
+ """
+ if state.session_id:
+ try:
+ return _sessions[state.session_id]
+ except KeyError:
+ pass
+ return None
+
+
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@@ -39,7 +54,8 @@ class _SessionClassMethods(object):
sess.close()
@classmethod
- def identity_key(cls, *args, **kwargs):
+ @util.dependencies("sqlalchemy.orm.util")
+ def identity_key(cls, orm_util, *args, **kwargs):
"""Return an identity key.
This is an alias of :func:`.util.identity_key`.
@@ -469,6 +485,7 @@ class Session(_SessionClassMethods):
_enable_transaction_accounting=True,
autocommit=False, twophase=False,
weak_identity_map=True, binds=None, extension=None,
+ info=None,
query_cls=query.Query):
"""Construct a new Session.
@@ -557,6 +574,14 @@ class Session(_SessionClassMethods):
flush events, as well as a post-rollback event. **Deprecated.**
Please see :class:`.SessionEvents`.
+ :param info: optional dictionary of arbitrary data to be associated
+ with this :class:`.Session`. Is available via the :attr:`.Session.info`
+ attribute. Note the dictionary is copied at construction time so
+ that modifications to the per-:class:`.Session` dictionary will be local
+ to that :class:`.Session`.
+
+ .. versionadded:: 0.9.0
+
:param query_cls: Class which should be used to create new Query
objects, as returned by the ``query()`` method. Defaults to
:class:`~sqlalchemy.orm.query.Query`.
@@ -599,6 +624,8 @@ class Session(_SessionClassMethods):
self._enable_transaction_accounting = _enable_transaction_accounting
self.twophase = twophase
self._query_cls = query_cls
+ if info:
+ self.info.update(info)
if extension:
for ext in util.to_list(extension):
@@ -606,22 +633,39 @@ class Session(_SessionClassMethods):
if binds is not None:
for mapperortable, bind in binds.items():
- if isinstance(mapperortable, (type, Mapper)):
+ insp = inspect(mapperortable)
+ if insp.is_selectable:
+ self.bind_table(mapperortable, bind)
+ elif insp.is_mapper:
self.bind_mapper(mapperortable, bind)
else:
- self.bind_table(mapperortable, bind)
+ assert False
+
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
- dispatch = event.dispatcher(SessionEvents)
-
connection_callable = None
transaction = None
"""The current active or inactive :class:`.SessionTransaction`."""
+ @util.memoized_property
+ def info(self):
+ """A user-modifiable dictionary.
+
+ The initial value of this dictioanry can be populated using the
+ ``info`` argument to the :class:`.Session` constructor or
+ :class:`.sessionmaker` constructor or factory methods. The dictionary
+ here is always local to this :class:`.Session` and can be modified
+ independently of all other :class:`.Session` objects.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return {}
+
def begin(self, subtransactions=False, nested=False):
"""Begin a transaction on this Session.
@@ -779,7 +823,7 @@ class Session(_SessionClassMethods):
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
- :param close_with_result: Passed to :meth:`Engine.connect`, indicating
+ :param close_with_result: Passed to :meth:`.Engine.connect`, indicating
the :class:`.Connection` should be considered "single use",
automatically closing when the first result set is closed. This
flag only has an effect if this :class:`.Session` is configured with
@@ -1136,7 +1180,18 @@ class Session(_SessionClassMethods):
def _autoflush(self):
if self.autoflush and not self._flushing:
- self.flush()
+ try:
+ self.flush()
+ except sa_exc.StatementError as e:
+ # note we are reraising StatementError as opposed to
+ # raising FlushError with "chaining" to remain compatible
+ # with code that catches StatementError, IntegrityError,
+ # etc.
+ e.add_detail(
+ "raised as a result of Query-invoked autoflush; "
+ "consider using a session.no_autoflush block if this "
+ "flush is occuring prematurely")
+ util.raise_from_cause(e)
def refresh(self, instance, attribute_names=None, lockmode=None):
"""Expire and refresh the attributes on the given instance.
@@ -1180,7 +1235,7 @@ class Session(_SessionClassMethods):
only_load_props=attribute_names) is None:
raise sa_exc.InvalidRequestError(
"Could not refresh instance '%s'" %
- orm_util.instance_str(instance))
+ instance_str(instance))
def expire_all(self):
"""Expires all persistent instances within this Session.
@@ -1291,7 +1346,7 @@ class Session(_SessionClassMethods):
if state.session_id is not self.hash_key:
raise sa_exc.InvalidRequestError(
"Instance %s is not present in this Session" %
- orm_util.state_str(state))
+ state_str(state))
cascaded = list(state.manager.mapper.cascade_iterator(
'expunge', state))
@@ -1331,7 +1386,7 @@ class Session(_SessionClassMethods):
"expect these generated values. Ensure also that "
"this flush() is not occurring at an inappropriate "
"time, such aswithin a load() event."
- % orm_util.state_str(state)
+ % state_str(state)
)
if state.key is None:
@@ -1434,7 +1489,7 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state in self._deleted:
return
@@ -1598,7 +1653,7 @@ class Session(_SessionClassMethods):
"merging to update the most recent version."
% (
existing_version,
- orm_util.state_str(merged_state),
+ state_str(merged_state),
merged_version
))
@@ -1622,13 +1677,13 @@ class Session(_SessionClassMethods):
if not self.identity_map.contains_state(state):
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persistent within this Session" %
- orm_util.state_str(state))
+ state_str(state))
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
"Object '%s' already has an identity - it can't be registered "
- "as pending" % orm_util.state_str(state))
+ "as pending" % state_str(state))
self._before_attach(state)
if state not in self._new:
@@ -1644,13 +1699,13 @@ class Session(_SessionClassMethods):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" %
- orm_util.state_str(state))
+ state_str(state))
if state.deleted:
raise sa_exc.InvalidRequestError(
"Instance '%s' has been deleted. Use the make_transient() "
"function to send this object back to the transient state." %
- orm_util.state_str(state)
+ state_str(state)
)
self._before_attach(state)
self._deleted.pop(state, None)
@@ -1703,8 +1758,8 @@ class Session(_SessionClassMethods):
may not fire off a backref event, if the effective value
is what was already loaded from a foreign-key-holding value.
- The :meth:`.Session.enable_relationship_loading` method supersedes
- the ``load_on_pending`` flag on :func:`.relationship`. Unlike
+ The :meth:`.Session.enable_relationship_loading` method is
+ similar to the ``load_on_pending`` flag on :func:`.relationship`. Unlike
that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
@@ -1721,6 +1776,12 @@ class Session(_SessionClassMethods):
.. versionadded:: 0.8
+ .. seealso::
+
+ ``load_on_pending`` at :func:`.relationship` - this flag
+ allows per-relationship loading of many-to-ones on items that
+ are pending.
+
"""
state = attributes.instance_state(obj)
self._attach(state, include_before=True)
@@ -1738,14 +1799,14 @@ class Session(_SessionClassMethods):
raise sa_exc.InvalidRequestError("Can't attach instance "
"%s; another instance with key %s is already "
"present in this session."
- % (orm_util.state_str(state), state.key))
+ % (state_str(state), state.key))
if state.session_id and \
state.session_id is not self.hash_key and \
state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
- "(this is '%s')" % (orm_util.state_str(state),
+ "(this is '%s')" % (state_str(state),
state.session_id, self.hash_key))
if state.session_id != self.hash_key:
@@ -2090,9 +2151,10 @@ class Session(_SessionClassMethods):
access to the full set of persistent objects (i.e., those
that have row identity) currently in the session.
- See also:
+ .. seealso::
- :func:`.identity_key` - operations involving identity keys.
+ :func:`.identity_key` - helper function to produce the keys used
+ in this dictionary.
"""
@@ -2196,7 +2258,8 @@ class sessionmaker(_SessionClassMethods):
def __init__(self, bind=None, class_=Session, autoflush=True,
autocommit=False,
- expire_on_commit=True, **kw):
+ expire_on_commit=True,
+ info=None, **kw):
"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
@@ -2213,6 +2276,13 @@ class sessionmaker(_SessionClassMethods):
:class:`.Session` objects.
:param expire_on_commit=True: the expire_on_commit setting to use
with newly created :class:`.Session` objects.
+ :param info: optional dictionary of information that will be available
+ via :attr:`.Session.info`. Note this dictionary is *updated*, not
+ replaced, when the ``info`` parameter is specified to the specific
+ :class:`.Session` construction operation.
+
+ .. versionadded:: 0.9.0
+
:param \**kw: all other keyword arguments are passed to the constructor
of newly created :class:`.Session` objects.
@@ -2221,6 +2291,8 @@ class sessionmaker(_SessionClassMethods):
kw['autoflush'] = autoflush
kw['autocommit'] = autocommit
kw['expire_on_commit'] = expire_on_commit
+ if info is not None:
+ kw['info'] = info
self.kw = kw
# make our own subclass of the given class, so that
# events can be associated with it specifically.
@@ -2238,7 +2310,12 @@ class sessionmaker(_SessionClassMethods):
"""
for k, v in self.kw.items():
- local_kw.setdefault(k, v)
+ if k == 'info' and 'info' in local_kw:
+ d = v.copy()
+ d.update(local_kw['info'])
+ local_kw['info'] = d
+ else:
+ local_kw.setdefault(k, v)
return self.class_(**local_kw)
def configure(self, **new_kw):
@@ -2253,13 +2330,12 @@ class sessionmaker(_SessionClassMethods):
self.kw.update(new_kw)
def __repr__(self):
- return "%s(class_=%r%s)" % (
+ return "%s(class_=%r,%s)" % (
self.__class__.__name__,
self.class_.__name__,
", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
)
-_sessions = weakref.WeakValueDictionary()
def make_transient(instance):
@@ -2304,12 +2380,4 @@ def object_session(instance):
raise exc.UnmappedInstanceError(instance)
-def _state_session(state):
- if state.session_id:
- try:
- return _sessions[state.session_id]
- except KeyError:
- pass
- return None
-
_new_sessionid = util.counter()
diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py
index c479d880d..9712dd055 100644
--- a/lib/sqlalchemy/orm/state.py
+++ b/lib/sqlalchemy/orm/state.py
@@ -1,5 +1,5 @@
# orm/state.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -13,16 +13,11 @@ defines a large part of the ORM's interactivity.
import weakref
from .. import util
-from . import exc as orm_exc, attributes, util as orm_util, interfaces
-from .attributes import (
- PASSIVE_NO_RESULT,
- SQL_OK, NEVER_SET, ATTR_WAS_SET, NO_VALUE,\
- PASSIVE_NO_INITIALIZE
- )
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-instrumentation = util.importlater("sqlalchemy.orm", "instrumentation")
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-
+from . import exc as orm_exc, interfaces
+from .path_registry import PathRegistry
+from .base import PASSIVE_NO_RESULT, SQL_OK, NEVER_SET, ATTR_WAS_SET, \
+ NO_VALUE, PASSIVE_NO_INITIALIZE, INIT_OK, PASSIVE_OFF
+from . import base
class InstanceState(interfaces._InspectionAttr):
"""tracks state information at the instance level."""
@@ -89,15 +84,16 @@ class InstanceState(interfaces._InspectionAttr):
not self._attached
@property
- def _attached(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def _attached(self, sessionlib):
return self.session_id is not None and \
self.session_id in sessionlib._sessions
@property
- def session(self):
+ @util.dependencies("sqlalchemy.orm.session")
+ def session(self, sessionlib):
"""Return the owning :class:`.Session` for this instance,
or ``None`` if none available."""
-
return sessionlib._state_session(self)
@property
@@ -186,7 +182,7 @@ class InstanceState(interfaces._InspectionAttr):
def dict(self):
o = self.obj()
if o is not None:
- return attributes.instance_dict(o)
+ return base.instance_dict(o)
else:
return {}
@@ -214,8 +210,8 @@ class InstanceState(interfaces._InspectionAttr):
return self._pending_mutations[key]
def __getstate__(self):
- d = {'instance': self.obj()}
- d.update(
+ state_dict = {'instance': self.obj()}
+ state_dict.update(
(k, self.__dict__[k]) for k in (
'committed_state', '_pending_mutations', 'modified', 'expired',
'callables', 'key', 'parents', 'load_options',
@@ -223,14 +219,14 @@ class InstanceState(interfaces._InspectionAttr):
) if k in self.__dict__
)
if self.load_path:
- d['load_path'] = self.load_path.serialize()
+ state_dict['load_path'] = self.load_path.serialize()
- self.manager.dispatch.pickle(self, d)
+ state_dict['manager'] = self.manager._serialize(self, state_dict)
- return d
+ return state_dict
- def __setstate__(self, state):
- inst = state['instance']
+ def __setstate__(self, state_dict):
+ inst = state_dict['instance']
if inst is not None:
self.obj = weakref.ref(inst, self._cleanup)
self.class_ = inst.__class__
@@ -239,42 +235,26 @@ class InstanceState(interfaces._InspectionAttr):
# due to storage of state in "parents". "class_"
# also new.
self.obj = None
- self.class_ = state['class_']
- self.manager = manager = instrumentation.manager_of_class(self.class_)
- if manager is None:
- raise orm_exc.UnmappedInstanceError(
- inst,
- "Cannot deserialize object of type %r - "
- "no mapper() has "
- "been configured for this class within the current "
- "Python process!" %
- self.class_)
- elif manager.is_mapped and not manager.mapper.configured:
- mapperlib.configure_mappers()
-
- self.committed_state = state.get('committed_state', {})
- self._pending_mutations = state.get('_pending_mutations', {})
- self.parents = state.get('parents', {})
- self.modified = state.get('modified', False)
- self.expired = state.get('expired', False)
- self.callables = state.get('callables', {})
+ self.class_ = state_dict['class_']
+
+ self.committed_state = state_dict.get('committed_state', {})
+ self._pending_mutations = state_dict.get('_pending_mutations', {})
+ self.parents = state_dict.get('parents', {})
+ self.modified = state_dict.get('modified', False)
+ self.expired = state_dict.get('expired', False)
+ self.callables = state_dict.get('callables', {})
self.__dict__.update([
- (k, state[k]) for k in (
+ (k, state_dict[k]) for k in (
'key', 'load_options',
- ) if k in state
+ ) if k in state_dict
])
- if 'load_path' in state:
- self.load_path = orm_util.PathRegistry.\
- deserialize(state['load_path'])
+ if 'load_path' in state_dict:
+ self.load_path = PathRegistry.\
+ deserialize(state_dict['load_path'])
- # setup _sa_instance_state ahead of time so that
- # unpickle events can access the object normally.
- # see [ticket:2362]
- if inst is not None:
- manager.setup_instance(inst, self)
- manager.dispatch.unpickle(self, state)
+ state_dict['manager'](self, inst, state_dict)
def _initialize(self, key):
"""Set this attribute to an empty value or collection,
@@ -413,6 +393,13 @@ class InstanceState(interfaces._InspectionAttr):
difference(self.dict)
@property
+ def _unloaded_non_object(self):
+ return self.unloaded.intersection(
+ attr for attr in self.manager
+ if self.manager[attr].impl.accepts_scalar_loader
+ )
+
+ @property
def expired_attributes(self):
"""Return the set of keys which are 'expired' to be loaded by
the manager's deferred scalar loader, assuming no pending
@@ -428,6 +415,8 @@ class InstanceState(interfaces._InspectionAttr):
return None
def _modified_event(self, dict_, attr, previous, collection=False):
+ if not attr.send_modified_events:
+ return
if attr.key not in self.committed_state:
if collection:
if previous is NEVER_SET:
@@ -461,7 +450,7 @@ class InstanceState(interfaces._InspectionAttr):
"collected."
% (
self.manager[attr.key],
- orm_util.state_class_str(self)
+ base.state_class_str(self)
))
self.modified = True
@@ -527,13 +516,13 @@ class AttributeState(object):
to a particular attribute on a particular mapped object.
The :class:`.AttributeState` object is accessed
- via the :attr:`.InstanceState.attr` collection
+ via the :attr:`.InstanceState.attrs` collection
of a particular :class:`.InstanceState`::
from sqlalchemy import inspect
insp = inspect(some_mapped_object)
- attr_state = insp.attr.some_attribute
+ attr_state = insp.attrs.some_attribute
"""
@@ -568,10 +557,40 @@ class AttributeState(object):
"""Return the current pre-flush change history for
this attribute, via the :class:`.History` interface.
+ This method will **not** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :meth:`.AttributeState.load_history` - retrieve history
+ using loader callables if the value is not locally present.
+
+ :func:`.attributes.get_history` - underlying function
+
"""
return self.state.get_history(self.key,
PASSIVE_NO_INITIALIZE)
+ def load_history(self):
+ """Return the current pre-flush change history for
+ this attribute, via the :class:`.History` interface.
+
+ This method **will** emit loader callables if the value of the
+ attribute is unloaded.
+
+ .. seealso::
+
+ :attr:`.AttributeState.history`
+
+ :func:`.attributes.get_history` - underlying function
+
+ .. versionadded:: 0.9.0
+
+ """
+ return self.state.get_history(self.key,
+ PASSIVE_OFF ^ INIT_OK)
+
+
class PendingCollection(object):
"""A writable placeholder for an unloaded collection.
diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py
index aa46d06a8..033e3d064 100644
--- a/lib/sqlalchemy/orm/strategies.py
+++ b/lib/sqlalchemy/orm/strategies.py
@@ -1,5 +1,5 @@
# orm/strategies.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,14 +16,13 @@ from . import (
)
from .state import InstanceState
from .util import _none_set
+from . import properties
from .interfaces import (
- LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
- StrategizedProperty
+ LoaderStrategy, StrategizedProperty
)
from .session import _state_session
import itertools
-
def _register_attribute(strategy, mapper, useobject,
compare_function=None,
typecallable=None,
@@ -45,10 +44,10 @@ def _register_attribute(strategy, mapper, useobject,
listen_hooks.append(single_parent_validator)
if prop.key in prop.parent.validators:
- fn, include_removes = prop.parent.validators[prop.key]
+ fn, opts = prop.parent.validators[prop.key]
listen_hooks.append(
lambda desc, prop: orm_util._validator_events(desc,
- prop.key, fn, include_removes)
+ prop.key, fn, **opts)
)
if useobject:
@@ -81,6 +80,7 @@ def _register_attribute(strategy, mapper, useobject,
callable_=callable_,
active_history=active_history,
impl_class=impl_class,
+ send_modified_events=not useobject or not prop.viewonly,
doc=prop.doc,
**kw
)
@@ -88,7 +88,7 @@ def _register_attribute(strategy, mapper, useobject,
for hook in listen_hooks:
hook(desc, prop)
-
+@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
@@ -100,17 +100,19 @@ class UninstrumentedColumnLoader(LoaderStrategy):
super(UninstrumentedColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
return None, None, None
+@log.class_logger
+@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
class ColumnLoader(LoaderStrategy):
"""Provide loading behavior for a :class:`.ColumnProperty`."""
@@ -119,7 +121,7 @@ class ColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
- def setup_query(self, context, entity, path,
+ def setup_query(self, context, entity, path, loadopt,
adapter, column_collection, **kwargs):
for c in self.columns:
if adapter:
@@ -131,7 +133,8 @@ class ColumnLoader(LoaderStrategy):
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.parent_property.active_history or \
- self.columns[0].primary_key
+ self.columns[0].primary_key or \
+ mapper.version_id_col in set(self.columns)
_register_attribute(self, mapper, useobject=False,
compare_function=coltype.compare_values,
@@ -139,7 +142,7 @@ class ColumnLoader(LoaderStrategy):
)
def create_row_processor(self, context, path,
- mapper, row, adapter):
+ loadopt, mapper, row, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
@@ -156,9 +159,9 @@ class ColumnLoader(LoaderStrategy):
return expire_for_non_present_col, None, None
-log.class_logger(ColumnLoader)
-
+@log.class_logger
+@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
class DeferredColumnLoader(LoaderStrategy):
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
@@ -170,16 +173,16 @@ class DeferredColumnLoader(LoaderStrategy):
self.columns = self.parent_property.columns
self.group = self.parent_property.group
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
if col in row:
- return self.parent_property._get_strategy(ColumnLoader).\
+ return self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
- context, path, mapper, row, adapter)
+ context, path, loadopt, mapper, row, adapter)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
@@ -202,15 +205,15 @@ class DeferredColumnLoader(LoaderStrategy):
expire_missing=False
)
- def setup_query(self, context, entity, path, adapter,
+ def setup_query(self, context, entity, path, loadopt, adapter,
only_load_props=None, **kwargs):
if (
- self.group is not None and
- context.attributes.get(('undefer', self.group), False)
+ loadopt and self.group and
+ loadopt.local_opts.get('undefer_group', False) == self.group
) or (only_load_props and self.key in only_load_props):
- self.parent_property._get_strategy(ColumnLoader).\
+ self.parent_property._get_strategy_by_cls(ColumnLoader).\
setup_query(context, entity,
- path, adapter, **kwargs)
+ path, loadopt, adapter, **kwargs)
def _load_for_state(self, state, passive):
if not state.key:
@@ -251,8 +254,6 @@ class DeferredColumnLoader(LoaderStrategy):
return attributes.ATTR_WAS_SET
-log.class_logger(DeferredColumnLoader)
-
class LoadDeferredColumns(object):
"""serializable loader object used by DeferredColumnLoader"""
@@ -269,29 +270,6 @@ class LoadDeferredColumns(object):
return strategy._load_for_state(state, passive)
-class DeferredOption(StrategizedOption):
- propagate_to_loaders = True
-
- def __init__(self, key, defer=False):
- super(DeferredOption, self).__init__(key)
- self.defer = defer
-
- def get_strategy_class(self):
- if self.defer:
- return DeferredColumnLoader
- else:
- return ColumnLoader
-
-
-class UndeferGroupOption(MapperOption):
- propagate_to_loaders = True
-
- def __init__(self, group):
- self.group = group
-
- def process_query(self, query):
- query._attributes[("undefer", self.group)] = True
-
class AbstractRelationshipLoader(LoaderStrategy):
"""LoaderStratgies which deal with related objects."""
@@ -304,6 +282,9 @@ class AbstractRelationshipLoader(LoaderStrategy):
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="noload")
+@properties.RelationshipProperty.strategy_for(lazy=None)
class NoLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=None".
@@ -319,15 +300,16 @@ class NoLoader(AbstractRelationshipLoader):
typecallable=self.parent_property.collection_class,
)
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
return invoke_no_load, None, None
-log.class_logger(NoLoader)
-
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy=True)
+@properties.RelationshipProperty.strategy_for(lazy="select")
class LazyLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=True", that is loads when first accessed.
@@ -350,7 +332,6 @@ class LazyLoader(AbstractRelationshipLoader):
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
- #from sqlalchemy.orm import query
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
self._lazywhere,
@@ -542,7 +523,8 @@ class LazyLoader(AbstractRelationshipLoader):
for pk in self.mapper.primary_key
]
- def _emit_lazyload(self, session, state, ident_key, passive):
+ @util.dependencies("sqlalchemy.orm.strategy_options")
+ def _emit_lazyload(self, strategy_options, session, state, ident_key, passive):
q = session.query(self.mapper)._adapt_all_clauses()
q = q._with_invoke_all_eagers(False)
@@ -571,7 +553,7 @@ class LazyLoader(AbstractRelationshipLoader):
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, LazyLoader):
- q = q.options(EagerLazyOption((rev.key,), lazy='select'))
+ q = q.options(strategy_options.Load(rev.parent).lazyload(rev.key))
lazy_clause = self.lazy_clause(state, passive=passive)
@@ -598,7 +580,7 @@ class LazyLoader(AbstractRelationshipLoader):
else:
return None
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
key = self.key
if not self.is_class_level:
@@ -630,8 +612,6 @@ class LazyLoader(AbstractRelationshipLoader):
return reset_for_lazy_callable, None, None
-log.class_logger(LazyLoader)
-
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
@@ -648,18 +628,19 @@ class LoadLazyAttribute(object):
return strategy._load_for_state(state, passive)
+@properties.RelationshipProperty.strategy_for(lazy="immediate")
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter, column_collection=None,
+ path, loadopt, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
- def create_row_processor(self, context, path,
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
@@ -667,6 +648,8 @@ class ImmediateLoader(AbstractRelationshipLoader):
return None, None, load_immediate
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="subquery")
class SubqueryLoader(AbstractRelationshipLoader):
def __init__(self, parent):
super(SubqueryLoader, self).__init__(parent)
@@ -674,11 +657,11 @@ class SubqueryLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(self, context, entity,
- path, adapter,
+ path, loadopt, adapter,
column_collection=None,
parentmapper=None, **kwargs):
@@ -703,14 +686,14 @@ class SubqueryLoader(AbstractRelationshipLoader):
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
elif subq_path.contains_mapper(self.mapper):
return
- subq_mapper, leftmost_mapper, leftmost_attr = \
+ subq_mapper, leftmost_mapper, leftmost_attr, leftmost_relationship = \
self._get_leftmost(subq_path)
orig_query = context.attributes.get(
@@ -721,7 +704,8 @@ class SubqueryLoader(AbstractRelationshipLoader):
# produce a subquery from it.
left_alias = self._generate_from_original_query(
orig_query, leftmost_mapper,
- leftmost_attr, entity.mapper
+ leftmost_attr, leftmost_relationship,
+ entity.mapper
)
# generate another Query that will join the
@@ -770,11 +754,12 @@ class SubqueryLoader(AbstractRelationshipLoader):
leftmost_mapper._columntoproperty[c].class_attribute
for c in leftmost_cols
]
- return subq_mapper, leftmost_mapper, leftmost_attr
+ return subq_mapper, leftmost_mapper, leftmost_attr, leftmost_prop
def _generate_from_original_query(self,
orig_query, leftmost_mapper,
- leftmost_attr, entity_mapper
+ leftmost_attr, leftmost_relationship,
+ entity_mapper
):
# reformat the original query
# to look only for significant columns
@@ -785,8 +770,22 @@ class SubqueryLoader(AbstractRelationshipLoader):
if not q._from_obj and entity_mapper.isa(leftmost_mapper):
q._set_select_from([entity_mapper], False)
+ target_cols = q._adapt_col_list(leftmost_attr)
+
# select from the identity columns of the outer
- q._set_entities(q._adapt_col_list(leftmost_attr))
+ q._set_entities(target_cols)
+
+ distinct_target_key = leftmost_relationship.distinct_target_key
+
+ if distinct_target_key is True:
+ q._distinct = True
+ elif distinct_target_key is None:
+ # if target_cols refer to a non-primary key or only
+ # part of a composite primary key, set the q as distinct
+ for t in set(c.table for c in target_cols):
+ if not set(target_cols).issuperset(t.primary_key):
+ q._distinct = True
+ break
if q._order_by is False:
q._order_by = leftmost_mapper.order_by
@@ -916,7 +915,36 @@ class SubqueryLoader(AbstractRelationshipLoader):
q = q.order_by(*eager_order_by)
return q
- def create_row_processor(self, context, path,
+ class _SubqCollections(object):
+ """Given a :class:`.Query` used to emit the "subquery load",
+ provide a load interface that executes the query at the
+ first moment a value is needed.
+
+ """
+ _data = None
+
+ def __init__(self, subq):
+ self.subq = subq
+
+ def get(self, key, default):
+ if self._data is None:
+ self._load()
+ return self._data.get(key, default)
+
+ def _load(self):
+ self._data = dict(
+ (k, [vv[0] for vv in v])
+ for k, v in itertools.groupby(
+ self.subq,
+ lambda x: x[1:]
+ )
+ )
+
+ def loader(self, state, dict_, row):
+ if self._data is None:
+ self._load()
+
+ def create_row_processor(self, context, path, loadopt,
mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
@@ -938,12 +966,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
# call upon create_row_processor again
collections = path.get(context.attributes, "collections")
if collections is None:
- collections = dict(
- (k, [v[0] for v in v])
- for k, v in itertools.groupby(
- subq,
- lambda x: x[1:]
- ))
+ collections = self._SubqCollections(subq)
path.set(context.attributes, 'collections', collections)
if adapter:
@@ -963,7 +986,7 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
- return load_collection_from_subq, None, None
+ return load_collection_from_subq, None, None, collections.loader
def _create_scalar_loader(self, collections, local_cols):
def load_scalar_from_subq(state, dict_, row):
@@ -981,12 +1004,13 @@ class SubqueryLoader(AbstractRelationshipLoader):
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
- return load_scalar_from_subq, None, None
-
+ return load_scalar_from_subq, None, None, collections.loader
-log.class_logger(SubqueryLoader)
+@log.class_logger
+@properties.RelationshipProperty.strategy_for(lazy="joined")
+@properties.RelationshipProperty.strategy_for(lazy=False)
class JoinedLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
using joined eager loading.
@@ -998,9 +1022,9 @@ class JoinedLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
- _get_strategy(LazyLoader).init_class_attribute(mapper)
+ _get_strategy_by_cls(LazyLoader).init_class_attribute(mapper)
- def setup_query(self, context, entity, path, adapter, \
+ def setup_query(self, context, entity, path, loadopt, adapter, \
column_collection=None, parentmapper=None,
allow_innerjoin=True,
**kwargs):
@@ -1013,19 +1037,19 @@ class JoinedLoader(AbstractRelationshipLoader):
with_polymorphic = None
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
clauses, adapter, add_to_collection = \
- self._get_user_defined_adapter(
+ self._setup_query_on_user_defined_adapter(
context, entity, path, adapter,
user_defined_adapter
)
else:
# if not via query option, check for
# a cycle
- if not path.contains(context.attributes, "loaderstrategy"):
+ if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
@@ -1034,7 +1058,7 @@ class JoinedLoader(AbstractRelationshipLoader):
clauses, adapter, add_to_collection, \
allow_innerjoin = self._generate_row_adapter(
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
)
@@ -1069,24 +1093,74 @@ class JoinedLoader(AbstractRelationshipLoader):
"when using joined loading with with_polymorphic()."
)
- def _get_user_defined_adapter(self, context, entity,
+ def _init_user_defined_eager_proc(self, loadopt, context):
+
+ # check if the opt applies at all
+ if "eager_from_alias" not in loadopt.local_opts:
+ # nope
+ return False
+
+ path = loadopt.path.parent
+
+ # the option applies. check if the "user_defined_eager_row_processor"
+ # has been built up.
+ adapter = path.get(context.attributes,
+ "user_defined_eager_row_processor", False)
+ if adapter is not False:
+ # just return it
+ return adapter
+
+ # otherwise figure it out.
+ alias = loadopt.local_opts["eager_from_alias"]
+
+ root_mapper, prop = path[-2:]
+
+ #from .mapper import Mapper
+ #from .interfaces import MapperProperty
+ #assert isinstance(root_mapper, Mapper)
+ #assert isinstance(prop, MapperProperty)
+
+ if alias is not None:
+ if isinstance(alias, str):
+ alias = prop.target.alias(alias)
+ adapter = sql_util.ColumnAdapter(alias,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ if path.contains(context.attributes, "path_with_polymorphic"):
+ with_poly_info = path.get(context.attributes,
+ "path_with_polymorphic")
+ adapter = orm_util.ORMAdapter(
+ with_poly_info.entity,
+ equivalents=prop.mapper._equivalent_columns)
+ else:
+ adapter = context.query._polymorphic_adapters.get(prop.mapper, None)
+ path.set(context.attributes,
+ "user_defined_eager_row_processor",
+ adapter)
+
+ return adapter
+
+ def _setup_query_on_user_defined_adapter(self, context, entity,
path, adapter, user_defined_adapter):
- adapter = entity._get_entity_clauses(context.query, context)
- if adapter and user_defined_adapter:
- user_defined_adapter = user_defined_adapter.wrap(adapter)
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
- elif adapter:
- user_defined_adapter = adapter
- path.set(context.attributes, "user_defined_eager_row_processor",
- user_defined_adapter)
+ # apply some more wrapping to the "user defined adapter"
+ # if we are setting up the query for SQL render.
+ adapter = entity._get_entity_clauses(context.query, context)
+
+ if adapter and user_defined_adapter:
+ user_defined_adapter = user_defined_adapter.wrap(adapter)
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
+ elif adapter:
+ user_defined_adapter = adapter
+ path.set(context.attributes, "user_defined_eager_row_processor",
+ user_defined_adapter)
- add_to_collection = context.primary_columns
- return user_defined_adapter, adapter, add_to_collection
+ add_to_collection = context.primary_columns
+ return user_defined_adapter, adapter, add_to_collection
def _generate_row_adapter(self,
- context, entity, path, adapter,
+ context, entity, path, loadopt, adapter,
column_collection, parentmapper, allow_innerjoin
):
with_poly_info = path.get(
@@ -1109,9 +1183,12 @@ class JoinedLoader(AbstractRelationshipLoader):
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
- innerjoin = allow_innerjoin and path.get(context.attributes,
- "eager_join_type",
- self.parent_property.innerjoin)
+ innerjoin = allow_innerjoin and (
+ loadopt.local_opts.get(
+ 'innerjoin', self.parent_property.innerjoin)
+ if loadopt is not None
+ else self.parent_property.innerjoin
+ )
if not innerjoin:
# if this is an outer join, all eager joins from
# here must also be outer joins
@@ -1202,7 +1279,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
- for col in sql_util.find_columns(
+ for col in sql_util._find_columns(
self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
@@ -1218,10 +1295,10 @@ class JoinedLoader(AbstractRelationshipLoader):
)
)
- def _create_eager_adapter(self, context, row, adapter, path):
- user_defined_adapter = path.get(context.attributes,
- "user_defined_eager_row_processor",
- False)
+ def _create_eager_adapter(self, context, row, adapter, path, loadopt):
+ user_defined_adapter = self._init_user_defined_eager_proc(
+ loadopt, context) if loadopt else False
+
if user_defined_adapter is not False:
decorator = user_defined_adapter
# user defined eagerloads are part of the "primary"
@@ -1244,7 +1321,7 @@ class JoinedLoader(AbstractRelationshipLoader):
# processor, will cause a degrade to lazy
return False
- def create_row_processor(self, context, path, mapper, row, adapter):
+ def create_row_processor(self, context, path, loadopt, mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
@@ -1256,7 +1333,7 @@ class JoinedLoader(AbstractRelationshipLoader):
eager_adapter = self._create_eager_adapter(
context,
row,
- adapter, our_path)
+ adapter, our_path, loadopt)
if eager_adapter is not False:
key = self.key
@@ -1273,9 +1350,9 @@ class JoinedLoader(AbstractRelationshipLoader):
return self._create_collection_loader(context, key, _instance)
else:
return self.parent_property.\
- _get_strategy(LazyLoader).\
+ _get_strategy_by_cls(LazyLoader).\
create_row_processor(
- context, path,
+ context, path, loadopt,
mapper, row, adapter)
def _create_collection_loader(self, context, key, _instance):
@@ -1336,102 +1413,6 @@ class JoinedLoader(AbstractRelationshipLoader):
None, load_scalar_from_joined_exec
-log.class_logger(JoinedLoader)
-
-
-class EagerLazyOption(StrategizedOption):
- def __init__(self, key, lazy=True, chained=False,
- propagate_to_loaders=True
- ):
- if isinstance(key[0], str) and key[0] == '*':
- if len(key) != 1:
- raise sa_exc.ArgumentError(
- "Wildcard identifier '*' must "
- "be specified alone.")
- key = ("relationship:*",)
- propagate_to_loaders = False
- super(EagerLazyOption, self).__init__(key)
- self.lazy = lazy
- self.chained = chained
- self.propagate_to_loaders = propagate_to_loaders
- self.strategy_cls = factory(lazy)
-
- def get_strategy_class(self):
- return self.strategy_cls
-
-_factory = {
- False: JoinedLoader,
- "joined": JoinedLoader,
- None: NoLoader,
- "noload": NoLoader,
- "select": LazyLoader,
- True: LazyLoader,
- "subquery": SubqueryLoader,
- "immediate": ImmediateLoader
-}
-
-
-def factory(identifier):
- return _factory.get(identifier, LazyLoader)
-
-
-class EagerJoinOption(PropertyOption):
-
- def __init__(self, key, innerjoin, chained=False):
- super(EagerJoinOption, self).__init__(key)
- self.innerjoin = innerjoin
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths:
- path.set(query._attributes, "eager_join_type", self.innerjoin)
- else:
- paths[-1].set(query._attributes, "eager_join_type", self.innerjoin)
-
-
-class LoadEagerFromAliasOption(PropertyOption):
-
- def __init__(self, key, alias=None, chained=False):
- super(LoadEagerFromAliasOption, self).__init__(key)
- if alias is not None:
- if not isinstance(alias, str):
- info = inspect(alias)
- alias = info.selectable
- self.alias = alias
- self.chained = chained
-
- def process_query_property(self, query, paths):
- if self.chained:
- for path in paths[0:-1]:
- (root_mapper, prop) = path.path[-2:]
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- path.setdefault(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
- root_mapper, prop = paths[-1].path[-2:]
- if self.alias is not None:
- if isinstance(self.alias, str):
- self.alias = prop.target.alias(self.alias)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
- equivalents=prop.mapper._equivalent_columns)
- )
- else:
- if paths[-1].contains(query._attributes, "path_with_polymorphic"):
- with_poly_info = paths[-1].get(query._attributes,
- "path_with_polymorphic")
- adapter = orm_util.ORMAdapter(
- with_poly_info.entity,
- equivalents=prop.mapper._equivalent_columns)
- else:
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- paths[-1].set(query._attributes,
- "user_defined_eager_row_processor",
- adapter)
-
def single_parent_validator(desc, prop):
def _do_check(state, value, oldvalue, initiator):
diff --git a/lib/sqlalchemy/orm/strategy_options.py b/lib/sqlalchemy/orm/strategy_options.py
new file mode 100644
index 000000000..6e838ccb7
--- /dev/null
+++ b/lib/sqlalchemy/orm/strategy_options.py
@@ -0,0 +1,924 @@
+# orm/strategy_options.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+
+"""
+
+from .interfaces import MapperOption, PropComparator
+from .. import util
+from ..sql.base import _generative, Generative
+from .. import exc as sa_exc, inspect
+from .base import _is_aliased_class, _class_to_mapper
+from . import util as orm_util
+from .path_registry import PathRegistry, TokenRegistry, \
+ _WILDCARD_TOKEN, _DEFAULT_TOKEN
+
+class Load(Generative, MapperOption):
+ """Represents loader options which modify the state of a
+ :class:`.Query` in order to affect how various mapped attributes are loaded.
+
+ .. versionadded:: 0.9.0 The :meth:`.Load` system is a new foundation for
+ the existing system of loader options, including options such as
+ :func:`.orm.joinedload`, :func:`.orm.defer`, and others. In particular,
+ it introduces a new method-chained system that replaces the need for
+ dot-separated paths as well as "_all()" options such as :func:`.orm.joinedload_all`.
+
+ A :class:`.Load` object can be used directly or indirectly. To use one
+ directly, instantiate given the parent class. This style of usage is
+ useful when dealing with a :class:`.Query` that has multiple entities,
+ or when producing a loader option that can be applied generically to
+ any style of query::
+
+ myopt = Load(MyClass).joinedload("widgets")
+
+ The above ``myopt`` can now be used with :meth:`.Query.options`::
+
+ session.query(MyClass).options(myopt)
+
+ The :class:`.Load` construct is invoked indirectly whenever one makes use
+ of the various loader options that are present in ``sqlalchemy.orm``, including
+ options such as :func:`.orm.joinedload`, :func:`.orm.defer`, :func:`.orm.subqueryload`,
+ and all the rest. These constructs produce an "anonymous" form of the
+ :class:`.Load` object which tracks attributes and options, but is not linked
+ to a parent class until it is associated with a parent :class:`.Query`::
+
+ # produce "unbound" Load object
+ myopt = joinedload("widgets")
+
+ # when applied using options(), the option is "bound" to the
+ # class observed in the given query, e.g. MyClass
+ session.query(MyClass).options(myopt)
+
+ Whether the direct or indirect style is used, the :class:`.Load` object
+ returned now represents a specific "path" along the entities of a :class:`.Query`.
+ This path can be traversed using a standard method-chaining approach.
+ Supposing a class hierarchy such as ``User``, ``User.addresses -> Address``,
+ ``User.orders -> Order`` and ``Order.items -> Item``, we can specify a variety
+ of loader options along each element in the "path"::
+
+ session.query(User).options(
+ joinedload("addresses"),
+ subqueryload("orders").joinedload("items")
+ )
+
+ Where above, the ``addresses`` collection will be joined-loaded, the
+ ``orders`` collection will be subquery-loaded, and within that subquery load
+ the ``items`` collection will be joined-loaded.
+
+
+ """
+ def __init__(self, entity):
+ insp = inspect(entity)
+ self.path = insp._path_registry
+ self.context = {}
+ self.local_opts = {}
+
+ def _generate(self):
+ cloned = super(Load, self)._generate()
+ cloned.local_opts = {}
+ return cloned
+
+ strategy = None
+ propagate_to_loaders = False
+
+ def process_query(self, query):
+ self._process(query, True)
+
+ def process_query_conditionally(self, query):
+ self._process(query, False)
+
+ def _process(self, query, raiseerr):
+ current_path = query._current_path
+ if current_path:
+ for (token, start_path), loader in self.context.items():
+ chopped_start_path = self._chop_path(start_path, current_path)
+ if chopped_start_path is not None:
+ query._attributes[(token, chopped_start_path)] = loader
+ else:
+ query._attributes.update(self.context)
+
+ def _generate_path(self, path, attr, wildcard_key, raiseerr=True):
+ if raiseerr and not path.has_entity:
+ if isinstance(path, TokenRegistry):
+ raise sa_exc.ArgumentError(
+ "Wildcard token cannot be followed by another entity")
+ else:
+ raise sa_exc.ArgumentError(
+ "Attribute '%s' of entity '%s' does not "
+ "refer to a mapped entity" %
+ (path.prop.key, path.parent.entity)
+ )
+
+ if isinstance(attr, util.string_types):
+ default_token = attr.endswith(_DEFAULT_TOKEN)
+ if attr.endswith(_WILDCARD_TOKEN) or default_token:
+ if default_token:
+ self.propagate_to_loaders = False
+ if wildcard_key:
+ attr = "%s:%s" % (wildcard_key, attr)
+ return path.token(attr)
+
+ try:
+ # use getattr on the class to work around
+ # synonyms, hybrids, etc.
+ attr = getattr(path.entity.class_, attr)
+ except AttributeError:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Can't find property named '%s' on the "
+ "mapped entity %s in this Query. " % (
+ attr, path.entity)
+ )
+ else:
+ return None
+ else:
+ attr = attr.property
+
+ path = path[attr]
+ else:
+ prop = attr.property
+
+ if not prop.parent.common_parent(path.mapper):
+ if raiseerr:
+ raise sa_exc.ArgumentError("Attribute '%s' does not "
+ "link from element '%s'" % (attr, path.entity))
+ else:
+ return None
+
+ if getattr(attr, '_of_type', None):
+ ac = attr._of_type
+ ext_info = inspect(ac)
+
+ path_element = ext_info.mapper
+ if not ext_info.is_aliased_class:
+ ac = orm_util.with_polymorphic(
+ ext_info.mapper.base_mapper,
+ ext_info.mapper, aliased=True,
+ _use_mapper_path=True)
+ path.entity_path[prop].set(self.context,
+ "path_with_polymorphic", inspect(ac))
+ path = path[prop][path_element]
+ else:
+ path = path[prop]
+
+ if path.has_entity:
+ path = path.entity_path
+ return path
+
+ def _coerce_strat(self, strategy):
+ if strategy is not None:
+ strategy = tuple(sorted(strategy.items()))
+ return strategy
+
+ @_generative
+ def set_relationship_strategy(self, attr, strategy, propagate_to_loaders=True):
+ strategy = self._coerce_strat(strategy)
+
+ self.propagate_to_loaders = propagate_to_loaders
+ # if the path is a wildcard, this will set propagate_to_loaders=False
+ self.path = self._generate_path(self.path, attr, "relationship")
+ self.strategy = strategy
+ if strategy is not None:
+ self._set_path_strategy()
+
+ @_generative
+ def set_column_strategy(self, attrs, strategy, opts=None):
+ strategy = self._coerce_strat(strategy)
+
+ for attr in attrs:
+ path = self._generate_path(self.path, attr, "column")
+ cloned = self._generate()
+ cloned.strategy = strategy
+ cloned.path = path
+ cloned.propagate_to_loaders = True
+ if opts:
+ cloned.local_opts.update(opts)
+ cloned._set_path_strategy()
+
+ def _set_path_strategy(self):
+ if self.path.has_entity:
+ self.path.parent.set(self.context, "loader", self)
+ else:
+ self.path.set(self.context, "loader", self)
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d["path"] = self.path.serialize()
+ return d
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.path = PathRegistry.deserialize(self.path)
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+
+ for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
+ if isinstance(c_token, util.string_types):
+ # TODO: this is approximated from the _UnboundLoad
+ # version and probably has issues, not fully covered.
+
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_token.key:
+ return None
+
+ if c_token is p_token:
+ continue
+ else:
+ return None
+ return to_chop[i+1:]
+
+
+class _UnboundLoad(Load):
+ """Represent a loader option that isn't tied to a root entity.
+
+ The loader option will produce an entity-linked :class:`.Load`
+ object when it is passed :meth:`.Query.options`.
+
+ This provides compatibility with the traditional system
+ of freestanding options, e.g. ``joinedload('x.y.z')``.
+
+ """
+ def __init__(self):
+ self.path = ()
+ self._to_bind = set()
+ self.local_opts = {}
+
+ _is_chain_link = False
+
+ def _set_path_strategy(self):
+ self._to_bind.add(self)
+
+ def _generate_path(self, path, attr, wildcard_key):
+ if wildcard_key and isinstance(attr, util.string_types) and \
+ attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN):
+ if attr == _DEFAULT_TOKEN:
+ self.propagate_to_loaders = False
+ attr = "%s:%s" % (wildcard_key, attr)
+
+ return path + (attr, )
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d['path'] = ret = []
+ for token in util.to_list(self.path):
+ if isinstance(token, PropComparator):
+ ret.append((token._parentmapper.class_, token.key))
+ else:
+ ret.append(token)
+ return d
+
+ def __setstate__(self, state):
+ ret = []
+ for key in state['path']:
+ if isinstance(key, tuple):
+ cls, propkey = key
+ ret.append(getattr(cls, propkey))
+ else:
+ ret.append(key)
+ state['path'] = tuple(ret)
+ self.__dict__ = state
+
+ def _process(self, query, raiseerr):
+ for val in self._to_bind:
+ val._bind_loader(query, query._attributes, raiseerr)
+
+ @classmethod
+ def _from_keys(self, meth, keys, chained, kw):
+ opt = _UnboundLoad()
+
+ def _split_key(key):
+ if isinstance(key, util.string_types):
+ # coerce fooload('*') into "default loader strategy"
+ if key == _WILDCARD_TOKEN:
+ return (_DEFAULT_TOKEN, )
+ # coerce fooload(".*") into "wildcard on default entity"
+ elif key.startswith("." + _WILDCARD_TOKEN):
+ key = key[1:]
+ return key.split(".")
+ else:
+ return (key,)
+ all_tokens = [token for key in keys for token in _split_key(key)]
+
+ for token in all_tokens[0:-1]:
+ if chained:
+ opt = meth(opt, token, **kw)
+ else:
+ opt = opt.defaultload(token)
+ opt._is_chain_link = True
+
+ opt = meth(opt, all_tokens[-1], **kw)
+ opt._is_chain_link = False
+
+ return opt
+
+
+ def _chop_path(self, to_chop, path):
+ i = -1
+ for i, (c_token, (p_mapper, p_prop)) in enumerate(zip(to_chop, path.pairs())):
+ if isinstance(c_token, util.string_types):
+ if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
+ return to_chop
+ elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and c_token != p_prop.key:
+ return None
+ elif isinstance(c_token, PropComparator):
+ if c_token.property is not p_prop:
+ return None
+ else:
+ i += 1
+
+ return to_chop[i:]
+
+
+ def _bind_loader(self, query, context, raiseerr):
+ start_path = self.path
+ # _current_path implies we're in a
+ # secondary load with an existing path
+
+ current_path = query._current_path
+ if current_path:
+ start_path = self._chop_path(start_path, current_path)
+
+ if not start_path:
+ return None
+
+ token = start_path[0]
+ if isinstance(token, util.string_types):
+ entity = self._find_entity_basestring(query, token, raiseerr)
+ elif isinstance(token, PropComparator):
+ prop = token.property
+ entity = self._find_entity_prop_comparator(
+ query,
+ prop.key,
+ token._parententity,
+ raiseerr)
+
+ else:
+ raise sa_exc.ArgumentError(
+ "mapper option expects "
+ "string key or list of attributes")
+
+ if not entity:
+ return
+
+ path_element = entity.entity_zero
+
+ # transfer our entity-less state into a Load() object
+ # with a real entity path.
+ loader = Load(path_element)
+ loader.context = context
+ loader.strategy = self.strategy
+
+ path = loader.path
+ for token in start_path:
+ loader.path = path = loader._generate_path(
+ loader.path, token, None, raiseerr)
+ if path is None:
+ return
+
+ loader.local_opts.update(self.local_opts)
+
+ if loader.path.has_entity:
+ effective_path = loader.path.parent
+ else:
+ effective_path = loader.path
+
+ # prioritize "first class" options over those
+ # that were "links in the chain", e.g. "x" and "y" in someload("x.y.z")
+ # versus someload("x") / someload("x.y")
+ if self._is_chain_link:
+ effective_path.setdefault(context, "loader", loader)
+ else:
+ effective_path.set(context, "loader", loader)
+
+ def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
+ if _is_aliased_class(mapper):
+ searchfor = mapper
+ else:
+ searchfor = _class_to_mapper(mapper)
+ for ent in query._mapper_entities:
+ if ent.corresponds_to(searchfor):
+ return ent
+ else:
+ if raiseerr:
+ if not list(query._mapper_entities):
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ raise sa_exc.ArgumentError(
+ "Can't find property '%s' on any entity "
+ "specified in this Query. Note the full path "
+ "from root (%s) to target entity must be specified."
+ % (token, ",".join(str(x) for
+ x in query._mapper_entities))
+ )
+ else:
+ return None
+
+ def _find_entity_basestring(self, query, token, raiseerr):
+ if token.endswith(':' + _WILDCARD_TOKEN):
+ if len(list(query._mapper_entities)) != 1:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Wildcard loader can only be used with exactly "
+ "one entity. Use Load(ent) to specify "
+ "specific entities.")
+
+ for ent in query._mapper_entities:
+ # return only the first _MapperEntity when searching
+ # based on string prop name. Ideally object
+ # attributes are used to specify more exactly.
+ return ent
+ else:
+ if raiseerr:
+ raise sa_exc.ArgumentError(
+ "Query has only expression-based entities - "
+ "can't find property named '%s'."
+ % (token, )
+ )
+ else:
+ return None
+
+
+
+class loader_option(object):
+ def __init__(self):
+ pass
+
+ def __call__(self, fn):
+ self.name = name = fn.__name__
+ self.fn = fn
+ if hasattr(Load, name):
+ raise TypeError("Load class already has a %s method." % (name))
+ setattr(Load, name, fn)
+
+ return self
+
+ def _add_unbound_fn(self, fn):
+ self._unbound_fn = fn
+ fn_doc = self.fn.__doc__
+ self.fn.__doc__ = """Produce a new :class:`.Load` object with the
+:func:`.orm.%(name)s` option applied.
+
+See :func:`.orm.%(name)s` for usage examples.
+
+""" % {"name": self.name}
+
+ fn.__doc__ = fn_doc
+ return self
+
+ def _add_unbound_all_fn(self, fn):
+ self._unbound_all_fn = fn
+ fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
+
+.. deprecated:: 0.9.0
+
+ The "_all()" style is replaced by method chaining, e.g.::
+
+ session.query(MyClass).options(
+ %(name)s("someattribute").%(name)s("anotherattribute")
+ )
+
+""" % {"name": self.name}
+ return self
+
+@loader_option()
+def contains_eager(loadopt, attr, alias=None):
+ """Indicate that the given attribute should be eagerly loaded from
+ columns stated manually in the query.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ The option is used in conjunction with an explicit join that loads
+ the desired rows, i.e.::
+
+ sess.query(Order).\\
+ join(Order.user).\\
+ options(contains_eager(Order.user))
+
+ The above query would join from the ``Order`` entity to its related
+ ``User`` entity, and the returned ``Order`` objects would have the
+ ``Order.user`` attribute pre-populated.
+
+ :func:`contains_eager` also accepts an `alias` argument, which is the
+ string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
+ construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
+ the eagerly-loaded rows are to come from an aliased table::
+
+ user_alias = aliased(User)
+ sess.query(Order).\\
+ join((user_alias, Order.user)).\\
+ options(contains_eager(Order.user, alias=user_alias))
+
+ .. seealso::
+
+ :ref:`contains_eager`
+
+ """
+ if alias is not None:
+ if not isinstance(alias, str):
+ info = inspect(alias)
+ alias = info.selectable
+
+ cloned = loadopt.set_relationship_strategy(
+ attr,
+ {"lazy": "joined"},
+ propagate_to_loaders=False
+ )
+ cloned.local_opts['eager_from_alias'] = alias
+ return cloned
+
+@contains_eager._add_unbound_fn
+def contains_eager(*keys, **kw):
+ return _UnboundLoad()._from_keys(_UnboundLoad.contains_eager, keys, True, kw)
+
+@loader_option()
+def load_only(loadopt, *attrs):
+ """Indicate that for a particular entity, only the given list
+ of column-based attribute names should be loaded; all others will be
+ deferred.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Example - given a class ``User``, load only the ``name`` and ``fullname``
+ attributes::
+
+ session.query(User).options(load_only("name", "fullname"))
+
+ Example - given a relationship ``User.addresses -> Address``, specify
+ subquery loading for the ``User.addresses`` collection, but on each ``Address``
+ object load only the ``email_address`` attribute::
+
+ session.query(User).options(
+ subqueryload("addreses").load_only("email_address")
+ )
+
+ For a :class:`.Query` that has multiple entities, the lead entity can be
+ specifically referred to using the :class:`.Load` constructor::
+
+ session.query(User, Address).join(User.addresses).options(
+ Load(User).load_only("name", "fullname"),
+ Load(Address).load_only("email_addres")
+ )
+
+
+ .. versionadded:: 0.9.0
+
+ """
+ cloned = loadopt.set_column_strategy(
+ attrs,
+ {"deferred": False, "instrument": True}
+ )
+ cloned.set_column_strategy("*",
+ {"deferred": True, "instrument": True})
+ return cloned
+
+@load_only._add_unbound_fn
+def load_only(*attrs):
+ return _UnboundLoad().load_only(*attrs)
+
+@loader_option()
+def joinedload(loadopt, attr, innerjoin=None):
+ """Indicate that the given attribute should be loaded using joined
+ eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # joined-load the "orders" collection on "User"
+ query(User).options(joinedload(User.orders))
+
+ # joined-load Order.items and then Item.keywords
+ query(Order).options(joinedload(Order.items).joinedload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # joined-load the keywords collection
+ query(Order).options(lazyload(Order.items).joinedload(Item.keywords))
+
+ :func:`.orm.joinedload` also accepts a keyword argument `innerjoin=True` which
+ indicates using an inner join instead of an outer::
+
+ query(Order).options(joinedload(Order.user, innerjoin=True))
+
+ .. note::
+
+ The joins produced by :func:`.orm.joinedload` are **anonymously aliased**.
+ The criteria by which the join proceeds cannot be modified, nor can the
+ :class:`.Query` refer to these joins in any way, including ordering.
+
+ To produce a specific SQL JOIN which is explicitly available, use
+ :meth:`.Query.join`. To combine explicit JOINs with eager loading
+ of collections, use :func:`.orm.contains_eager`; see :ref:`contains_eager`.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :ref:`contains_eager`
+
+ :func:`.orm.subqueryload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
+ if innerjoin is not None:
+ loader.local_opts['innerjoin'] = innerjoin
+ return loader
+
+@joinedload._add_unbound_fn
+def joinedload(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, False, kw)
+
+@joinedload._add_unbound_all_fn
+def joinedload_all(*keys, **kw):
+ return _UnboundLoad._from_keys(
+ _UnboundLoad.joinedload, keys, True, kw)
+
+
+@loader_option()
+def subqueryload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ subquery eager loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ examples::
+
+ # subquery-load the "orders" collection on "User"
+ query(User).options(subqueryload(User.orders))
+
+ # subquery-load Order.items and then Item.keywords
+ query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
+
+ # lazily load Order.items, but when Items are loaded,
+ # subquery-load the keywords collection
+ query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
+
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
+
+@subqueryload._add_unbound_fn
+def subqueryload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
+
+@subqueryload._add_unbound_all_fn
+def subqueryload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
+
+@loader_option()
+def lazyload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using "lazy"
+ loading.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ """
+ return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
+
+@lazyload._add_unbound_fn
+def lazyload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
+
+@lazyload._add_unbound_all_fn
+def lazyload_all(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
+
+@loader_option()
+def immediateload(loadopt, attr):
+ """Indicate that the given attribute should be loaded using
+ an immediate load with a per-attribute SELECT statement.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ .. seealso::
+
+ :ref:`loading_toplevel`
+
+ :func:`.orm.joinedload`
+
+ :func:`.orm.lazyload`
+
+ """
+ loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
+ return loader
+
+@immediateload._add_unbound_fn
+def immediateload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.immediateload, keys, False, {})
+
+
+@loader_option()
+def noload(loadopt, attr):
+ """Indicate that the given relationship attribute should remain unloaded.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ :func:`.orm.noload` applies to :func:`.relationship` attributes; for
+ column-based attributes, see :func:`.orm.defer`.
+
+ """
+
+ return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
+
+@noload._add_unbound_fn
+def noload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
+
+@loader_option()
+def defaultload(loadopt, attr):
+ """Indicate an attribute should load using its default loader style.
+
+ This method is used to link to other loader options, such as
+ to set the :func:`.orm.defer` option on a class that is linked to
+ a relationship of the parent class being loaded, :func:`.orm.defaultload`
+ can be used to navigate this path without changing the loading style
+ of the relationship::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ .. seealso::
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_relationship_strategy(
+ attr,
+ None
+ )
+
+@defaultload._add_unbound_fn
+def defaultload(*keys):
+ return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
+
+@loader_option()
+def defer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be deferred, e.g.
+ not loaded until accessed.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ e.g.::
+
+ from sqlalchemy.orm import defer
+
+ session.query(MyClass).options(
+ defer("attribute_one"),
+ defer("attribute_two"))
+
+ session.query(MyClass).options(
+ defer(MyClass.attribute_one),
+ defer(MyClass.attribute_two))
+
+ To specify a deferred load of an attribute on a related class,
+ the path can be specified one token at a time, specifying the loading
+ style for each link along the chain. To leave the loading style
+ for a link unchanged, use :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").defer("some_column"))
+
+ A :class:`.Load` object that is present on a certain path can have
+ :meth:`.Load.defer` called multiple times, each will operate on the same
+ parent entity::
+
+
+ session.query(MyClass).options(
+ defaultload("someattr").
+ defer("some_column").
+ defer("some_other_column").
+ defer("another_column")
+ )
+
+ :param key: Attribute to be deferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": True, "instrument": True}
+ )
+
+
+@defer._add_unbound_fn
+def defer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer(loadopt, key):
+ """Indicate that the given column-oriented attribute should be undeferred, e.g.
+ specified within the SELECT statement of the entity as a whole.
+
+ The column being undeferred is typically set up on the mapping as a
+ :func:`.deferred` attribute.
+
+ This function is part of the :class:`.Load` interface and supports
+ both method-chained and standalone operation.
+
+ Examples::
+
+ # undefer two columns
+ session.query(MyClass).options(undefer("col1"), undefer("col2"))
+
+ # undefer all columns specific to a single class using Load + *
+ session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
+
+ :param key: Attribute to be undeferred.
+
+ :param \*addl_attrs: Deprecated; this option supports the old 0.8 style
+ of specifying a path as a series of attributes, which is now superseded
+ by the method-chained style.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer_group`
+
+ """
+ return loadopt.set_column_strategy(
+ (key, ),
+ {"deferred": False, "instrument": True}
+ )
+
+@undefer._add_unbound_fn
+def undefer(key, *addl_attrs):
+ return _UnboundLoad._from_keys(_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
+
+@loader_option()
+def undefer_group(loadopt, name):
+ """Indicate that columns within the given deferred group name should be undeferred.
+
+ The columns being undeferred are set up on the mapping as
+ :func:`.deferred` attributes and include a "group" name.
+
+ E.g::
+
+ session.query(MyClass).options(undefer_group("large_attrs"))
+
+ To undefer a group of attributes on a related entity, the path can be
+ spelled out using relationship loader options, such as :func:`.orm.defaultload`::
+
+ session.query(MyClass).options(defaultload("someattr").undefer_group("large_attrs"))
+
+ .. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
+ particiular entity load path.
+
+ .. seealso::
+
+ :ref:`deferred`
+
+ :func:`.orm.defer`
+
+ :func:`.orm.undefer`
+
+ """
+ return loadopt.set_column_strategy(
+ "*",
+ None,
+ {"undefer_group": name}
+ )
+
+@undefer_group._add_unbound_fn
+def undefer_group(name):
+ return _UnboundLoad().undefer_group(name)
+
diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py
index 6524ab27a..cf735fc53 100644
--- a/lib/sqlalchemy/orm/sync.py
+++ b/lib/sqlalchemy/orm/sync.py
@@ -1,5 +1,5 @@
# orm/sync.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 1e8d3e4dc..8c0c0d40e 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -1,5 +1,5 @@
# orm/unitofwork.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,8 +16,6 @@ from .. import util, event, exc
from ..util import topological
from . import attributes, persistence, util as orm_util, exc as orm_exc
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
@@ -33,7 +31,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("collection append")
@@ -50,7 +48,7 @@ def track_cascade_events(descriptor, prop):
if item is None:
return
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
prop = state.manager.mapper._props[key]
@@ -74,7 +72,7 @@ def track_cascade_events(descriptor, prop):
if oldvalue is newvalue:
return newvalue
- sess = sessionlib._state_session(state)
+ sess = state.session
if sess:
if sess._warn_on_events:
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index ae1ca2013..dd85f2ef1 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -1,5 +1,5 @@
# orm/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,21 +7,20 @@
from .. import sql, util, event, exc as sa_exc, inspection
from ..sql import expression, util as sql_util, operators
-from .interfaces import PropComparator, MapperProperty, _InspectionAttr
-from itertools import chain
-from . import attributes, exc
+from .interfaces import PropComparator, MapperProperty
+from . import attributes
import re
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
+from .base import instance_str, state_str, state_class_str, attribute_str, \
+ state_attribute_str, object_mapper, object_state, _none_set
+from .base import class_mapper, _class_to_mapper
+from .base import _InspectionAttr
+from .path_registry import PathRegistry
all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
"expunge", "save-update", "refresh-expire",
"none"))
-_INSTRUMENTOR = ('mapper', 'instrumentor')
-
-_none_set = frozenset([None])
-
class CascadeOptions(frozenset):
"""Keeps track of the options sent to relationship().cascade"""
@@ -71,24 +70,43 @@ class CascadeOptions(frozenset):
)
-def _validator_events(desc, key, validator, include_removes):
+def _validator_events(desc, key, validator, include_removes, include_backrefs):
"""Runs a validation method on an attribute value to be set or appended."""
+ if not include_backrefs:
+ def detect_is_backref(state, initiator):
+ impl = state.manager[key].impl
+ return initiator.impl is not impl
+
if include_removes:
def append(state, value, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value, False)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value, False)
+ else:
+ return value
def remove(state, value, initiator):
- validator(state.obj(), key, value, True)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ validator(state.obj(), key, value, True)
+
else:
def append(state, value, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value)
+ if include_backrefs or not detect_is_backref(state, initiator):
+ return validator(state.obj(), key, value)
+ else:
+ return value
event.listen(desc, 'append', append, raw=True, retval=True)
event.listen(desc, 'set', set_, raw=True, retval=True)
@@ -160,31 +178,59 @@ def polymorphic_union(table_map, typecolname,
def identity_key(*args, **kwargs):
- """Get an identity key.
+ """Generate "identity key" tuples, as are used as keys in the
+ :attr:`.Session.identity_map` dictionary.
- Valid call signatures:
+ This function has several call styles:
* ``identity_key(class, ident)``
- class
- mapped class (must be a positional argument)
+ This form receives a mapped class and a primary key scalar or
+ tuple as an argument.
+
+ E.g.::
+
+ >>> identity_key(MyClass, (1, 2))
+ (<class '__main__.MyClass'>, (1, 2))
- ident
- primary key, if the key is composite this is a tuple
+ :param class: mapped class (must be a positional argument)
+ :param ident: primary key, may be a scalar or tuple argument.
* ``identity_key(instance=instance)``
- instance
- object instance (must be given as a keyword arg)
+ This form will produce the identity key for a given instance. The
+ instance need not be persistent, only that its primary key attributes
+ are populated (else the key will contain ``None`` for those missing
+ values).
+
+ E.g.::
+
+ >>> instance = MyClass(1, 2)
+ >>> identity_key(instance=instance)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ In this form, the given instance is ultimately run though
+ :meth:`.Mapper.identity_key_from_instance`, which will have the
+ effect of performing a database check for the corresponding row
+ if the object is expired.
+
+ :param instance: object instance (must be given as a keyword arg)
* ``identity_key(class, row=row)``
- class
- mapped class (must be a positional argument)
+ This form is similar to the class/tuple form, except is passed a
+ database result row as a :class:`.RowProxy` object.
+
+ E.g.::
- row
- result proxy row (must be given as a keyword arg)
+ >>> row = engine.execute("select * from table where a=1 and b=2").first()
+ >>> identity_key(MyClass, row=row)
+ (<class '__main__.MyClass'>, (1, 2))
+
+ :param class: mapped class (must be a positional argument)
+ :param row: :class:`.RowProxy` row returned by a :class:`.ResultProxy`
+ (must be given as a keyword arg)
"""
if args:
@@ -245,212 +291,6 @@ class ORMAdapter(sql_util.ColumnAdapter):
else:
return None
-def _unreduce_path(path):
- return PathRegistry.deserialize(path)
-
-class PathRegistry(object):
- """Represent query load paths and registry functions.
-
- Basically represents structures like:
-
- (<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
-
- These structures are generated by things like
- query options (joinedload(), subqueryload(), etc.) and are
- used to compose keys stored in the query._attributes dictionary
- for various options.
-
- They are then re-composed at query compile/result row time as
- the query is formed and as rows are fetched, where they again
- serve to compose keys to look up options in the context.attributes
- dictionary, which is copied from query._attributes.
-
- The path structure has a limited amount of caching, where each
- "root" ultimately pulls from a fixed registry associated with
- the first mapper, that also contains elements for each of its
- property keys. However paths longer than two elements, which
- are the exception rather than the rule, are generated on an
- as-needed basis.
-
- """
-
- def __eq__(self, other):
- return other is not None and \
- self.path == other.path
-
- def set(self, attributes, key, value):
- attributes[(key, self.path)] = value
-
- def setdefault(self, attributes, key, value):
- attributes.setdefault((key, self.path), value)
-
- def get(self, attributes, key, value=None):
- key = (key, self.path)
- if key in attributes:
- return attributes[key]
- else:
- return value
-
- def __len__(self):
- return len(self.path)
-
- @property
- def length(self):
- return len(self.path)
-
- def pairs(self):
- path = self.path
- for i in range(0, len(path), 2):
- yield path[i], path[i + 1]
-
- def contains_mapper(self, mapper):
- for path_mapper in [
- self.path[i] for i in range(0, len(self.path), 2)
- ]:
- if isinstance(path_mapper, mapperlib.Mapper) and \
- path_mapper.isa(mapper):
- return True
- else:
- return False
-
- def contains(self, attributes, key):
- return (key, self.path) in attributes
-
- def __reduce__(self):
- return _unreduce_path, (self.serialize(), )
-
- def serialize(self):
- path = self.path
- return list(zip(
- [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
- [path[i].key for i in range(1, len(path), 2)] + [None]
- ))
-
- @classmethod
- def deserialize(cls, path):
- if path is None:
- return None
-
- p = tuple(chain(*[(class_mapper(mcls),
- class_mapper(mcls).attrs[key]
- if key is not None else None)
- for mcls, key in path]))
- if p and p[-1] is None:
- p = p[0:-1]
- return cls.coerce(p)
-
- @classmethod
- def per_mapper(cls, mapper):
- return EntityRegistry(
- cls.root, mapper
- )
-
- @classmethod
- def coerce(cls, raw):
- return util.reduce(lambda prev, next: prev[next], raw, cls.root)
-
- @classmethod
- def token(cls, token):
- return TokenRegistry(cls.root, token)
-
- def __add__(self, other):
- return util.reduce(
- lambda prev, next: prev[next],
- other.path, self)
-
- def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.path, )
-
-
-class RootRegistry(PathRegistry):
- """Root registry, defers to mappers so that
- paths are maintained per-root-mapper.
-
- """
- path = ()
-
- def __getitem__(self, entity):
- return entity._path_registry
-PathRegistry.root = RootRegistry()
-
-class TokenRegistry(PathRegistry):
- def __init__(self, parent, token):
- self.token = token
- self.parent = parent
- self.path = parent.path + (token,)
-
- def __getitem__(self, entity):
- raise NotImplementedError()
-
-class PropRegistry(PathRegistry):
- def __init__(self, parent, prop):
- # restate this path in terms of the
- # given MapperProperty's parent.
- insp = inspection.inspect(parent[-1])
- if not insp.is_aliased_class or insp._use_mapper_path:
- parent = parent.parent[prop.parent]
- elif insp.is_aliased_class and insp.with_polymorphic_mappers:
- if prop.parent is not insp.mapper and \
- prop.parent in insp.with_polymorphic_mappers:
- subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
- parent = parent.parent[subclass_entity]
-
- self.prop = prop
- self.parent = parent
- self.path = parent.path + (prop,)
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return EntityRegistry(
- self, entity
- )
-
-
-class EntityRegistry(PathRegistry, dict):
- is_aliased_class = False
-
- def __init__(self, parent, entity):
- self.key = entity
- self.parent = parent
- self.is_aliased_class = entity.is_aliased_class
-
- self.path = parent.path + (entity,)
-
- def __bool__(self):
- return True
- __nonzero__ = __bool__
-
- def __getitem__(self, entity):
- if isinstance(entity, (int, slice)):
- return self.path[entity]
- else:
- return dict.__getitem__(self, entity)
-
- def _inlined_get_for(self, prop, context, key):
- """an inlined version of:
-
- cls = path[mapperproperty].get(context, key)
-
- Skips the isinstance() check in __getitem__
- and the extra method call for get().
- Used by StrategizedProperty for its
- very frequent lookup.
-
- """
- path = dict.__getitem__(self, prop)
- path_key = (key, path.path)
- if path_key in context.attributes:
- return context.attributes[path_key]
- else:
- return None
-
- def __missing__(self, key):
- self[key] = item = PropRegistry(self, key)
- return item
-
-
class AliasedClass(object):
"""Represents an "aliased" form of a mapped class for usage with Query.
@@ -538,8 +378,10 @@ class AliasedClass(object):
else:
raise AttributeError(key)
- if isinstance(attr, attributes.QueryableAttribute):
- return _aliased_insp._adapt_prop(attr, key)
+ if isinstance(attr, PropComparator):
+ ret = attr.adapt_to_entity(_aliased_insp)
+ setattr(self, key, ret)
+ return ret
elif hasattr(attr, 'func_code'):
is_method = getattr(_aliased_insp._target, key, None)
if is_method and is_method.__self__ is not None:
@@ -550,7 +392,8 @@ class AliasedClass(object):
ret = attr.__get__(None, self)
if isinstance(ret, PropComparator):
return ret.adapt_to_entity(_aliased_insp)
- return ret
+ else:
+ return ret
else:
return attr
@@ -672,17 +515,6 @@ class AliasedInsp(_InspectionAttr):
'parentmapper': self.mapper}
)
- def _adapt_prop(self, existing, key):
- comparator = existing.comparator.adapt_to_entity(self)
- queryattr = attributes.QueryableAttribute(
- self.entity, key,
- impl=existing.impl,
- parententity=self,
- comparator=comparator)
- setattr(self.entity, key, queryattr)
- return queryattr
-
-
def _entity_for_mapper(self, mapper):
self_poly = self.with_polymorphic_mappers
if mapper in self_poly:
@@ -1053,186 +885,6 @@ def with_parent(instance, prop):
value_is_parent=True)
-def _attr_as_key(attr):
- if hasattr(attr, 'key'):
- return attr.key
- else:
- return expression._column_as_key(attr)
-
-
-_state_mapper = util.dottedgetter('manager.mapper')
-
-
-@inspection._inspects(object)
-def _inspect_mapped_object(instance):
- try:
- return attributes.instance_state(instance)
- # TODO: whats the py-2/3 syntax to catch two
- # different kinds of exceptions at once ?
- except exc.UnmappedClassError:
- return None
- except exc.NO_STATE:
- return None
-
-
-@inspection._inspects(type)
-def _inspect_mapped_class(class_, configure=False):
- try:
- class_manager = attributes.manager_of_class(class_)
- if not class_manager.is_mapped:
- return None
- mapper = class_manager.mapper
- if configure and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper
-
- except exc.NO_STATE:
- return None
-
-
-def object_mapper(instance):
- """Given an object, return the primary Mapper associated with the object
- instance.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- This function is available via the inspection system as::
-
- inspect(instance).mapper
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- return object_state(instance).mapper
-
-
-def object_state(instance):
- """Given an object, return the :class:`.InstanceState`
- associated with the object.
-
- Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
- if no mapping is configured.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(instance)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
- not part of a mapping.
-
- """
- state = _inspect_mapped_object(instance)
- if state is None:
- raise exc.UnmappedInstanceError(instance)
- else:
- return state
-
-
-def class_mapper(class_, configure=True):
- """Given a class, return the primary :class:`.Mapper` associated
- with the key.
-
- Raises :class:`.UnmappedClassError` if no mapping is configured
- on the given class, or :class:`.ArgumentError` if a non-class
- object is passed.
-
- Equivalent functionality is available via the :func:`.inspect`
- function as::
-
- inspect(some_mapped_class)
-
- Using the inspection system will raise
- :class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
-
- """
- mapper = _inspect_mapped_class(class_, configure=configure)
- if mapper is None:
- if not isinstance(class_, type):
- raise sa_exc.ArgumentError(
- "Class object expected, got '%r'." % class_)
- raise exc.UnmappedClassError(class_)
- else:
- return mapper
-
-
-def _class_to_mapper(class_or_mapper):
- insp = inspection.inspect(class_or_mapper, False)
- if insp is not None:
- return insp.mapper
- else:
- raise exc.UnmappedClassError(class_or_mapper)
-
-
-def _mapper_or_none(entity):
- """Return the :class:`.Mapper` for the given class or None if the
- class is not mapped."""
-
- insp = inspection.inspect(entity, False)
- if insp is not None:
- return insp.mapper
- else:
- return None
-
-
-def _is_mapped_class(entity):
- """Return True if the given object is a mapped class,
- :class:`.Mapper`, or :class:`.AliasedClass`."""
-
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- hasattr(insp, "mapper") and \
- (
- insp.is_mapper
- or insp.is_aliased_class
- )
-
-
-def _is_aliased_class(entity):
- insp = inspection.inspect(entity, False)
- return insp is not None and \
- getattr(insp, "is_aliased_class", False)
-
-
-def _entity_descriptor(entity, key):
- """Return a class attribute given an entity and string name.
-
- May return :class:`.InstrumentedAttribute` or user-defined
- attribute.
-
- """
- insp = inspection.inspect(entity)
- if insp.is_selectable:
- description = entity
- entity = insp.c
- elif insp.is_aliased_class:
- entity = insp.entity
- description = entity
- elif hasattr(insp, "mapper"):
- description = entity = insp.mapper.class_
- else:
- description = entity
-
- try:
- return getattr(entity, key)
- except AttributeError:
- raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (description, key)
- )
-
-
-def _orm_columns(entity):
- insp = inspection.inspect(entity, False)
- if hasattr(insp, 'selectable'):
- return [c for c in insp.selectable.c]
- else:
- return [entity]
-
def has_identity(object):
"""Return True if the given object has a database
@@ -1260,37 +912,8 @@ def was_deleted(object):
state = attributes.instance_state(object)
return state.deleted
-def instance_str(instance):
- """Return a string describing an instance."""
-
- return state_str(attributes.instance_state(instance))
-
-
-def state_str(state):
- """Return a string describing an instance via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
-
-
-def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s>' % (state.class_.__name__, )
-def attribute_str(instance, attribute):
- return instance_str(instance) + "." + attribute
-
-
-def state_attribute_str(state, attribute):
- return state_str(state) + "." + attribute
-
def randomize_unitofwork():
"""Use random-ordering sets within the unit of work in order
@@ -1327,3 +950,4 @@ def randomize_unitofwork():
from sqlalchemy.testing.util import RandomSet
topological.set = unitofwork.set = session.set = mapper.set = \
dependency.set = RandomSet
+
diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py
index 498b001c1..f84f331d5 100644
--- a/lib/sqlalchemy/pool.py
+++ b/lib/sqlalchemy/pool.py
@@ -1,5 +1,5 @@
# sqlalchemy/pool.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -20,7 +20,7 @@ import time
import traceback
import weakref
-from . import exc, log, event, events, interfaces, util
+from . import exc, log, event, interfaces, util
from .util import queue as sqla_queue
from .util import threading, memoized_property, \
chop_traceback
@@ -130,10 +130,10 @@ class Pool(log.Identified):
:meth:`unique_connection` method is provided to bypass the
threadlocal behavior installed into :meth:`connect`.
- :param reset_on_return: If true, reset the database state of
- connections returned to the pool. This is typically a
- ROLLBACK to release locks and transaction resources.
- Disable at your own peril. Defaults to True.
+ :param reset_on_return: Configures the action to take
+ on connections as they are returned to the pool.
+ See the argument description in :class:`.QueuePool` for
+ more detail.
:param events: a list of 2-tuples, each of the form
``(callable, target)`` which will be passed to event.listen()
@@ -185,8 +185,6 @@ class Pool(log.Identified):
for l in listeners:
self.add_listener(l)
- dispatch = event.dispatcher(events.PoolEvents)
-
def _close_connection(self, connection):
self.logger.debug("Closing connection %r", connection)
try:
@@ -218,7 +216,7 @@ class Pool(log.Identified):
"""
- return _ConnectionFairy.checkout(self)
+ return _ConnectionFairy._checkout(self)
def _create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
@@ -270,16 +268,17 @@ class Pool(log.Identified):
"""
if not self._use_threadlocal:
- return _ConnectionFairy.checkout(self)
+ return _ConnectionFairy._checkout(self)
try:
rec = self._threadconns.current()
except AttributeError:
pass
else:
- return rec.checkout_existing()
+ if rec is not None:
+ return rec._checkout_existing()
- return _ConnectionFairy.checkout(self, self._threadconns)
+ return _ConnectionFairy._checkout(self, self._threadconns)
def _return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
@@ -310,6 +309,34 @@ class Pool(log.Identified):
class _ConnectionRecord(object):
+ """Internal object which maintains an individual DBAPI connection
+ referenced by a :class:`.Pool`.
+
+ The :class:`._ConnectionRecord` object always exists for any particular
+ DBAPI connection whether or not that DBAPI connection has been
+ "checked out". This is in contrast to the :class:`._ConnectionFairy`
+ which is only a public facade to the DBAPI connection while it is checked
+ out.
+
+ A :class:`._ConnectionRecord` may exist for a span longer than that
+ of a single DBAPI connection. For example, if the
+ :meth:`._ConnectionRecord.invalidate`
+ method is called, the DBAPI connection associated with this
+ :class:`._ConnectionRecord`
+ will be discarded, but the :class:`._ConnectionRecord` may be used again,
+ in which case a new DBAPI connection is produced when the :class:`.Pool`
+ next uses this record.
+
+ The :class:`._ConnectionRecord` is delivered along with connection
+ pool events, including :meth:`.PoolEvents.connect` and
+ :meth:`.PoolEvents.checkout`, however :class:`._ConnectionRecord` still
+ remains an internal object whose API and internals may change.
+
+ .. seealso::
+
+ :class:`._ConnectionFairy`
+
+ """
def __init__(self, pool):
self.__pool = pool
@@ -321,8 +348,23 @@ class _ConnectionRecord(object):
exec_once(self.connection, self)
pool.dispatch.connect(self.connection, self)
+ connection = None
+ """A reference to the actual DBAPI connection being tracked.
+
+ May be ``None`` if this :class:`._ConnectionRecord` has been marked
+ as invalidated; a new DBAPI connection may replace it if the owning
+ pool calls upon this :class:`._ConnectionRecord` to reconnect.
+
+ """
+
@util.memoized_property
def info(self):
+ """The ``.info`` dictionary associated with the DBAPI connection.
+
+ This dictionary is shared among the :attr:`._ConnectionFairy.info`
+ and :attr:`.Connection.info` accessors.
+
+ """
return {}
@classmethod
@@ -361,9 +403,22 @@ class _ConnectionRecord(object):
def close(self):
if self.connection is not None:
- self.__pool._close_connection(self.connection)
+ self.__close()
def invalidate(self, e=None):
+ """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`.
+
+ This method is called for all connection invalidations, including
+ when the :meth:`._ConnectionFairy.invalidate` or :meth:`.Connection.invalidate`
+ methods are called, as well as when any so-called "automatic invalidation"
+ condition occurs.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
+ """
+ self.__pool.dispatch.invalidate(self.connection, self, e)
if e is not None:
self.__pool.logger.info(
"Invalidate connection %r (reason: %s:%s)",
@@ -424,18 +479,8 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo, fairy=None):
try:
fairy = fairy or _ConnectionFairy(connection, connection_record)
- if pool.dispatch.reset:
- pool.dispatch.reset(fairy, connection_record)
- if pool._reset_on_return is reset_rollback:
- if echo:
- pool.logger.debug("Connection %s rollback-on-return",
- connection)
- pool._dialect.do_rollback(fairy)
- elif pool._reset_on_return is reset_commit:
- if echo:
- pool.logger.debug("Connection %s commit-on-return",
- connection)
- pool._dialect.do_commit(fairy)
+ assert fairy.connection is connection
+ fairy._reset(pool, echo)
# Immediately close detached instances
if not connection_record:
@@ -454,15 +499,58 @@ _refs = set()
class _ConnectionFairy(object):
- """Proxies a DB-API connection and provides return-on-dereference
- support."""
+ """Proxies a DBAPI connection and provides return-on-dereference
+ support.
+
+ This is an internal object used by the :class:`.Pool` implementation
+ to provide context management to a DBAPI connection delivered by
+ that :class:`.Pool`.
+
+ The name "fairy" is inspired by the fact that the :class:`._ConnectionFairy`
+ object's lifespan is transitory, as it lasts only for the length of a
+ specific DBAPI connection being checked out from the pool, and additionally
+ that as a transparent proxy, it is mostly invisible.
+
+ .. seealso::
+
+ :class:`._ConnectionRecord`
+
+ """
def __init__(self, dbapi_connection, connection_record):
self.connection = dbapi_connection
self._connection_record = connection_record
+ connection = None
+ """A reference to the actual DBAPI connection being tracked."""
+
+ _connection_record = None
+ """A reference to the :class:`._ConnectionRecord` object associated
+ with the DBAPI connection.
+
+ This is currently an internal accessor which is subject to change.
+
+ """
+
+ _reset_agent = None
+ """Refer to an object with a ``.commit()`` and ``.rollback()`` method;
+ if non-None, the "reset-on-return" feature will call upon this object
+ rather than directly against the dialect-level do_rollback() and do_commit()
+ methods.
+
+ In practice, a :class:`.Connection` assigns a :class:`.Transaction` object
+ to this variable when one is in scope so that the :class:`.Transaction`
+ takes the job of committing or rolling back on return if
+ :meth:`.Connection.close` is called while the :class:`.Transaction`
+ still exists.
+
+ This is essentially an "event handler" of sorts but is simplified as an
+ instance variable both for performance/simplicity as well as that there
+ can only be one "reset agent" at a time.
+ """
+
@classmethod
- def checkout(cls, pool, threadconns=None, fairy=None):
+ def _checkout(cls, pool, threadconns=None, fairy=None):
if not fairy:
fairy = _ConnectionRecord.checkout(pool)
@@ -499,16 +587,40 @@ class _ConnectionFairy(object):
fairy.invalidate()
raise exc.InvalidRequestError("This connection is closed")
- def checkout_existing(self):
- return _ConnectionFairy.checkout(self._pool, fairy=self)
+ def _checkout_existing(self):
+ return _ConnectionFairy._checkout(self._pool, fairy=self)
- def checkin(self):
+ def _checkin(self):
_finalize_fairy(self.connection, self._connection_record,
self._pool, None, self._echo, fairy=self)
self.connection = None
self._connection_record = None
- _close = checkin
+ _close = _checkin
+
+ def _reset(self, pool, echo):
+ if pool.dispatch.reset:
+ pool.dispatch.reset(self, self._connection_record)
+ if pool._reset_on_return is reset_rollback:
+ if echo:
+ pool.logger.debug("Connection %s rollback-on-return%s",
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
+ if self._reset_agent:
+ self._reset_agent.rollback()
+ else:
+ pool._dialect.do_rollback(self)
+ elif pool._reset_on_return is reset_commit:
+ if echo:
+ pool.logger.debug("Connection %s commit-on-return%s",
+ self.connection,
+ ", via agent"
+ if self._reset_agent else "")
+ if self._reset_agent:
+ self._reset_agent.commit()
+ else:
+ pool._dialect.do_commit(self)
@property
def _logger(self):
@@ -516,6 +628,9 @@ class _ConnectionFairy(object):
@property
def is_valid(self):
+ """Return True if this :class:`._ConnectionFairy` still refers
+ to an active DBAPI connection."""
+
return self.connection is not None
@util.memoized_property
@@ -526,7 +641,9 @@ class _ConnectionFairy(object):
The data here will follow along with the DBAPI connection including
after it is returned to the connection pool and used again
- in subsequent instances of :class:`.ConnectionFairy`.
+ in subsequent instances of :class:`._ConnectionFairy`. It is shared
+ with the :attr:`._ConnectionRecord.info` and :attr:`.Connection.info`
+ accessors.
"""
return self._connection_record.info
@@ -534,8 +651,16 @@ class _ConnectionFairy(object):
def invalidate(self, e=None):
"""Mark this connection as invalidated.
- The connection will be immediately closed. The containing
- ConnectionRecord will create a new connection when next used.
+ This method can be called directly, and is also called as a result
+ of the :meth:`.Connection.invalidate` method. When invoked,
+ the DBAPI connection is immediately closed and discarded from
+ further use by the pool. The invalidation mechanism proceeds
+ via the :meth:`._ConnectionRecord.invalidate` internal method.
+
+ .. seealso::
+
+ :ref:`pool_connection_invalidation`
+
"""
if self.connection is None:
@@ -543,9 +668,15 @@ class _ConnectionFairy(object):
if self._connection_record:
self._connection_record.invalidate(e=e)
self.connection = None
- self.checkin()
+ self._checkin()
def cursor(self, *args, **kwargs):
+ """Return a new DBAPI cursor for the underlying connection.
+
+ This method is a proxy for the ``connection.cursor()`` DBAPI
+ method.
+
+ """
return self.connection.cursor(*args, **kwargs)
def __getattr__(self, key):
@@ -577,7 +708,7 @@ class _ConnectionFairy(object):
def close(self):
self._counter -= 1
if self._counter == 0:
- self.checkin()
+ self._checkin()
@@ -659,15 +790,6 @@ class SingletonThreadPool(Pool):
return c
-class DummyLock(object):
-
- def acquire(self, wait=True):
- return True
-
- def release(self):
- pass
-
-
class QueuePool(Pool):
"""A :class:`.Pool` that imposes a limit on the number of open connections.
@@ -775,30 +897,27 @@ class QueuePool(Pool):
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
- self._overflow_lock = threading.Lock() if self._max_overflow > -1 \
- else DummyLock()
+ self._overflow_lock = threading.Lock()
def _do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except sqla_queue.Full:
- conn.close()
- self._overflow_lock.acquire()
try:
- self._overflow -= 1
+ conn.close()
finally:
- self._overflow_lock.release()
+ self._dec_overflow()
def _do_get(self):
+ use_overflow = self._max_overflow > -1
+
try:
- wait = self._max_overflow > -1 and \
- self._overflow >= self._max_overflow
+ wait = use_overflow and self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.SAAbort as aborted:
return aborted.context._do_get()
except sqla_queue.Empty:
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
+ if use_overflow and self._overflow >= self._max_overflow:
if not wait:
return self._do_get()
else:
@@ -807,17 +926,33 @@ class QueuePool(Pool):
"connection timed out, timeout %d" %
(self.size(), self.overflow(), self._timeout))
- self._overflow_lock.acquire()
- try:
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
- return self._do_get()
- else:
- con = self._create_connection()
- self._overflow += 1
- return con
- finally:
- self._overflow_lock.release()
+ if self._inc_overflow():
+ try:
+ return self._create_connection()
+ except:
+ self._dec_overflow()
+ raise
+ else:
+ return self._do_get()
+
+ def _inc_overflow(self):
+ if self._max_overflow == -1:
+ self._overflow += 1
+ return True
+ with self._overflow_lock:
+ if self._overflow < self._max_overflow:
+ self._overflow += 1
+ return True
+ else:
+ return False
+
+ def _dec_overflow(self):
+ if self._max_overflow == -1:
+ self._overflow -= 1
+ return True
+ with self._overflow_lock:
+ self._overflow -= 1
+ return True
def recreate(self):
self.logger.info("Pool recreating")
diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py
index bf95d146b..d0f52e42b 100644
--- a/lib/sqlalchemy/processors.py
+++ b/lib/sqlalchemy/processors.py
@@ -1,5 +1,5 @@
# sqlalchemy/processors.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
#
# This module is part of SQLAlchemy and is released under
@@ -15,6 +15,7 @@ They all share one common characteristic: None is passed through unchanged.
import codecs
import re
import datetime
+from . import util
def str_to_datetime_processor_factory(regexp, type_):
@@ -66,7 +67,22 @@ def py_fallback():
return decoder(value, errors)[0]
return process
- def to_decimal_processor_factory(target_class, scale=10):
+ def to_conditional_unicode_processor_factory(encoding, errors=None):
+ decoder = codecs.getdecoder(encoding)
+
+ def process(value):
+ if value is None:
+ return None
+ elif isinstance(value, util.text_type):
+ return value
+ else:
+ # decoder returns a tuple: (value, len). Simply dropping the
+ # len part is safe: it is done that way in the normal
+ # 'xx'.decode(encoding) code path.
+ return decoder(value, errors)[0]
+ return process
+
+ def to_decimal_processor_factory(target_class, scale):
fstring = "%%.%df" % scale
def process(value):
@@ -113,13 +129,18 @@ try:
str_to_date
def to_unicode_processor_factory(encoding, errors=None):
- # this is cumbersome but it would be even more so on the C side
if errors is not None:
return UnicodeResultProcessor(encoding, errors).process
else:
return UnicodeResultProcessor(encoding).process
- def to_decimal_processor_factory(target_class, scale=10):
+ def to_conditional_unicode_processor_factory(encoding, errors=None):
+ if errors is not None:
+ return UnicodeResultProcessor(encoding, errors).conditional_process
+ else:
+ return UnicodeResultProcessor(encoding).conditional_process
+
+ def to_decimal_processor_factory(target_class, scale):
# Note that the scale argument is not taken into account for integer
# values in the C implementation while it is in the Python one.
# For example, the Python implementation might return
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index ebcc9a7ed..9e647e595 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -1,3638 +1,58 @@
-# sqlalchemy/schema.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""The schema module provides the building blocks for database metadata.
-
-Each element within this module describes a database entity which can be
-created and dropped, or is otherwise part of such an entity. Examples include
-tables, columns, sequences, and indexes.
-
-All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
-defined in this module they are intended to be agnostic of any vendor-specific
-constructs.
-
-A collection of entities are grouped into a unit called
-:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
-schema elements, and can also be associated with an actual database connection
-such that operations involving the contained elements can contact the database
-as needed.
-
-Two of the elements here also build upon their "syntactic" counterparts, which
-are defined in :class:`~sqlalchemy.sql.expression.`, specifically
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
-Since these objects are part of the SQL expression language, they are usable
-as components in SQL expressions.
+"""Compatiblity namespace for sqlalchemy.sql.schema and related.
"""
-import re
-import inspect
-from . import exc, util, dialects, event, events, inspection
-from .sql import expression, visitors
-import collections
-
-ddl = util.importlater("sqlalchemy.engine", "ddl")
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-url = util.importlater("sqlalchemy.engine", "url")
-sqltypes = util.importlater("sqlalchemy", "types")
-
-__all__ = ['SchemaItem', 'Table', 'Column', 'ForeignKey', 'Sequence', 'Index',
- 'ForeignKeyConstraint', 'PrimaryKeyConstraint', 'CheckConstraint',
- 'UniqueConstraint', 'DefaultGenerator', 'Constraint', 'MetaData',
- 'ThreadLocalMetaData', 'SchemaVisitor', 'PassiveDefault',
- 'DefaultClause', 'FetchedValue', 'ColumnDefault', 'DDL',
- 'CreateTable', 'DropTable', 'CreateSequence', 'DropSequence',
- 'AddConstraint', 'DropConstraint',
- ]
-__all__.sort()
-
-RETAIN_SCHEMA = util.symbol('retain_schema')
-
-
-class SchemaItem(events.SchemaEventTarget, visitors.Visitable):
- """Base class for items that define a database schema."""
-
- __visit_name__ = 'schema_item'
- quote = None
-
- def _init_items(self, *args):
- """Initialize the list of child items for this SchemaItem."""
-
- for item in args:
- if item is not None:
- item._set_parent_with_dispatch(self)
-
- def get_children(self, **kwargs):
- """used to allow SchemaVisitor access"""
- return []
-
- def __repr__(self):
- return util.generic_repr(self)
-
- @util.memoized_property
- def info(self):
- """Info dictionary associated with the object, allowing user-defined
- data to be associated with this :class:`.SchemaItem`.
-
- The dictionary is automatically generated when first accessed.
- It can also be specified in the constructor of some objects,
- such as :class:`.Table` and :class:`.Column`.
-
- """
- return {}
-
-
-def _get_table_key(name, schema):
- if schema is None:
- return name
- else:
- return schema + "." + name
-
-
-def _validate_dialect_kwargs(kwargs, name):
- # validate remaining kwargs that they all specify DB prefixes
-
- for k in kwargs:
- m = re.match('^(.+?)_.*', k)
- if m is None:
- raise TypeError("Additional arguments should be "
- "named <dialectname>_<argument>, got '%s'" % k)
-
-
-inspection._self_inspects(SchemaItem)
-
-
-class Table(SchemaItem, expression.TableClause):
- """Represent a table in a database.
-
- e.g.::
-
- mytable = Table("mytable", metadata,
- Column('mytable_id', Integer, primary_key=True),
- Column('value', String(50))
- )
-
- The :class:`.Table` object constructs a unique instance of itself based
- on its name and optional schema name within the given
- :class:`.MetaData` object. Calling the :class:`.Table`
- constructor with the same name and same :class:`.MetaData` argument
- a second time will return the *same* :class:`.Table` object - in this way
- the :class:`.Table` constructor acts as a registry function.
-
- See also:
-
- :ref:`metadata_describing` - Introduction to database metadata
-
- Constructor arguments are as follows:
-
- :param name: The name of this table as represented in the database.
-
- This property, along with the *schema*, indicates the *singleton
- identity* of this table in relation to its parent :class:`.MetaData`.
- Additional calls to :class:`.Table` with the same name, metadata,
- and schema name will return the same :class:`.Table` object.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- :param metadata: a :class:`.MetaData` object which will contain this
- table. The metadata is used as a point of association of this table
- with other tables which are referenced via foreign key. It also
- may be used to associate this table with a particular
- :class:`.Connectable`.
-
- :param \*args: Additional positional arguments are used primarily
- to add the list of :class:`.Column` objects contained within this
- table. Similar to the style of a CREATE TABLE statement, other
- :class:`.SchemaItem` constructs may be added here, including
- :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`.
-
- :param autoload: Defaults to False: the Columns for this table should
- be reflected from the database. Usually there will be no Column
- objects in the constructor if this property is set.
-
- :param autoload_replace: If ``True``, when using ``autoload=True``
- and ``extend_existing=True``,
- replace ``Column`` objects already present in the ``Table`` that's
- in the ``MetaData`` registry with
- what's reflected. Otherwise, all existing columns will be
- excluded from the reflection process. Note that this does
- not impact ``Column`` objects specified in the same call to ``Table``
- which includes ``autoload``, those always take precedence.
- Defaults to ``True``.
-
- .. versionadded:: 0.7.5
-
- :param autoload_with: If autoload==True, this is an optional Engine
- or Connection instance to be used for the table reflection. If
- ``None``, the underlying MetaData's bound connectable will be used.
-
- :param extend_existing: When ``True``, indicates that if this
- :class:`.Table` is already present in the given :class:`.MetaData`,
- apply further arguments within the constructor to the existing
- :class:`.Table`.
-
- If ``extend_existing`` or ``keep_existing`` are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- .. versionchanged:: 0.7.4
- ``extend_existing`` will work in conjunction
- with ``autoload=True`` to run a new reflection operation against
- the database; new :class:`.Column` objects will be produced
- from database metadata to replace those existing with the same
- name, and additional :class:`.Column` objects not present
- in the :class:`.Table` will be added.
-
- As is always the case with ``autoload=True``, :class:`.Column`
- objects can be specified in the same :class:`.Table` constructor,
- which will take precedence. I.e.::
-
- Table("mytable", metadata,
- Column('y', Integer),
- extend_existing=True,
- autoload=True,
- autoload_with=engine
- )
-
- The above will overwrite all columns within ``mytable`` which
- are present in the database, except for ``y`` which will be used as is
- from the above definition. If the ``autoload_replace`` flag
- is set to False, no existing columns will be replaced.
-
- :param implicit_returning: True by default - indicates that
- RETURNING can be used by default to fetch newly inserted primary key
- values, for backends which support this. Note that
- create_engine() also provides an implicit_returning flag.
-
- :param include_columns: A list of strings indicating a subset of
- columns to be loaded via the ``autoload`` operation; table columns who
- aren't present in this list will not be represented on the resulting
- ``Table`` object. Defaults to ``None`` which indicates all columns
- should be reflected.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.SchemaItem.info` attribute of this object.
-
- :param keep_existing: When ``True``, indicates that if this Table
- is already present in the given :class:`.MetaData`, ignore
- further arguments within the constructor to the existing
- :class:`.Table`, and return the :class:`.Table` object as
- originally created. This is to allow a function that wishes
- to define a new :class:`.Table` on first call, but on
- subsequent calls will return the same :class:`.Table`,
- without any of the declarations (particularly constraints)
- being applied a second time. Also see extend_existing.
-
- If extend_existing or keep_existing are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
- which will be passed to :func:`.event.listen` upon construction.
- This alternate hook to :func:`.event.listen` allows the establishment
- of a listener function specific to this :class:`.Table` before
- the "autoload" process begins. Particularly useful for
- the :meth:`.DDLEvents.column_reflect` event::
-
- def listen_for_reflect(table, column_info):
- "handle the column reflection event"
- # ...
-
- t = Table(
- 'sometable',
- autoload=True,
- listeners=[
- ('column_reflect', listen_for_reflect)
- ])
-
- :param mustexist: When ``True``, indicates that this Table must already
- be present in the given :class:`.MetaData` collection, else
- an exception is raised.
-
- :param prefixes:
- A list of strings to insert after CREATE in the CREATE TABLE
- statement. They will be separated by spaces.
-
- :param quote: Force quoting of this table's name on or off, corresponding
- to ``True`` or ``False``. When left at its default of ``None``,
- the column identifier will be quoted according to whether the name is
- case sensitive (identifiers with at least one upper case character are
- treated as case sensitive), or if it's a reserved word. This flag
- is only needed to force quoting of a reserved word which is not known
- by the SQLAlchemy dialect.
-
- :param quote_schema: same as 'quote' but applies to the schema identifier.
-
- :param schema: The *schema name* for this table, which is required if
- the table resides in a schema other than the default selected schema
- for the engine's database connection. Defaults to ``None``.
-
- :param useexisting: Deprecated. Use extend_existing.
-
- """
-
- __visit_name__ = 'table'
-
- def __new__(cls, *args, **kw):
- if not args:
- # python3k pickle seems to call this
- return object.__new__(cls)
-
- try:
- name, metadata, args = args[0], args[1], args[2:]
- except IndexError:
- raise TypeError("Table() takes at least two arguments")
-
- schema = kw.get('schema', None)
- if schema is None:
- schema = metadata.schema
- keep_existing = kw.pop('keep_existing', False)
- extend_existing = kw.pop('extend_existing', False)
- if 'useexisting' in kw:
- msg = "useexisting is deprecated. Use extend_existing."
- util.warn_deprecated(msg)
- if extend_existing:
- msg = "useexisting is synonymous with extend_existing."
- raise exc.ArgumentError(msg)
- extend_existing = kw.pop('useexisting', False)
-
- if keep_existing and extend_existing:
- msg = "keep_existing and extend_existing are mutually exclusive."
- raise exc.ArgumentError(msg)
-
- mustexist = kw.pop('mustexist', False)
- key = _get_table_key(name, schema)
- if key in metadata.tables:
- if not keep_existing and not extend_existing and bool(args):
- raise exc.InvalidRequestError(
- "Table '%s' is already defined for this MetaData "
- "instance. Specify 'extend_existing=True' "
- "to redefine "
- "options and columns on an "
- "existing Table object." % key)
- table = metadata.tables[key]
- if extend_existing:
- table._init_existing(*args, **kw)
- return table
- else:
- if mustexist:
- raise exc.InvalidRequestError(
- "Table '%s' not defined" % (key))
- table = object.__new__(cls)
- table.dispatch.before_parent_attach(table, metadata)
- metadata._add_table(name, schema, table)
- try:
- table._init(name, metadata, *args, **kw)
- table.dispatch.after_parent_attach(table, metadata)
- return table
- except:
- metadata._remove_table(name, schema)
- raise
-
- def __init__(self, *args, **kw):
- """Constructor for :class:`~.schema.Table`.
-
- This method is a no-op. See the top-level
- documentation for :class:`~.schema.Table`
- for constructor arguments.
-
- """
- # __init__ is overridden to prevent __new__ from
- # calling the superclass constructor.
-
- def _init(self, name, metadata, *args, **kwargs):
- super(Table, self).__init__(name)
- self.metadata = metadata
- self.schema = kwargs.pop('schema', None)
- if self.schema is None:
- self.schema = metadata.schema
- self.quote_schema = kwargs.pop(
- 'quote_schema', metadata.quote_schema)
- else:
- self.quote_schema = kwargs.pop('quote_schema', None)
-
- self.indexes = set()
- self.constraints = set()
- self._columns = expression.ColumnCollection()
- PrimaryKeyConstraint()._set_parent_with_dispatch(self)
- self.foreign_keys = set()
- self._extra_dependencies = set()
- self.kwargs = {}
- if self.schema is not None:
- self.fullname = "%s.%s" % (self.schema, self.name)
- else:
- self.fullname = self.name
-
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- # this argument is only used with _init_existing()
- kwargs.pop('autoload_replace', True)
- include_columns = kwargs.pop('include_columns', None)
-
- self.implicit_returning = kwargs.pop('implicit_returning', True)
- self.quote = kwargs.pop('quote', None)
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
- if 'listeners' in kwargs:
- listeners = kwargs.pop('listeners')
- for evt, fn in listeners:
- event.listen(self, evt, fn)
-
- self._prefixes = kwargs.pop('prefixes', [])
-
- self._extra_kwargs(**kwargs)
-
- # load column definitions from the database if 'autoload' is defined
- # we do it after the table is in the singleton dictionary to support
- # circular foreign keys
- if autoload:
- self._autoload(metadata, autoload_with, include_columns)
-
- # initialize all the column, etc. objects. done after reflection to
- # allow user-overrides
- self._init_items(*args)
-
- def _autoload(self, metadata, autoload_with, include_columns,
- exclude_columns=()):
- if self.primary_key.columns:
- PrimaryKeyConstraint(*[
- c for c in self.primary_key.columns
- if c.key in exclude_columns
- ])._set_parent_with_dispatch(self)
-
- if autoload_with:
- autoload_with.run_callable(
- autoload_with.dialect.reflecttable,
- self, include_columns, exclude_columns
- )
- else:
- bind = _bind_or_error(metadata,
- msg="No engine is bound to this Table's MetaData. "
- "Pass an engine to the Table via "
- "autoload_with=<someengine>, "
- "or associate the MetaData with an engine via "
- "metadata.bind=<someengine>")
- bind.run_callable(
- bind.dialect.reflecttable,
- self, include_columns, exclude_columns
- )
-
- @property
- def _sorted_constraints(self):
- """Return the set of constraints as a list, sorted by creation
- order.
-
- """
- return sorted(self.constraints, key=lambda c: c._creation_order)
-
- def _init_existing(self, *args, **kwargs):
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- autoload_replace = kwargs.pop('autoload_replace', True)
- schema = kwargs.pop('schema', None)
- if schema and schema != self.schema:
- raise exc.ArgumentError(
- "Can't change schema of existing table from '%s' to '%s'",
- (self.schema, schema))
-
- include_columns = kwargs.pop('include_columns', None)
-
- if include_columns is not None:
- for c in self.c:
- if c.name not in include_columns:
- self._columns.remove(c)
-
- for key in ('quote', 'quote_schema'):
- if key in kwargs:
- setattr(self, key, kwargs.pop(key))
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- if autoload:
- if not autoload_replace:
- exclude_columns = [c.name for c in self.c]
- else:
- exclude_columns = ()
- self._autoload(
- self.metadata, autoload_with, include_columns, exclude_columns)
-
- self._extra_kwargs(**kwargs)
- self._init_items(*args)
-
- def _extra_kwargs(self, **kwargs):
- # validate remaining kwargs that they all specify DB prefixes
- _validate_dialect_kwargs(kwargs, "Table")
- self.kwargs.update(kwargs)
-
- def _init_collections(self):
- pass
-
- @util.memoized_property
- def _autoincrement_column(self):
- for col in self.primary_key:
- if col.autoincrement and \
- col.type._type_affinity is not None and \
- issubclass(col.type._type_affinity, sqltypes.Integer) and \
- (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \
- isinstance(col.default, (type(None), Sequence)) and \
- (col.server_default is None or col.server_default.reflected):
- return col
-
- @property
- def key(self):
- return _get_table_key(self.name, self.schema)
-
- def __repr__(self):
- return "Table(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.metadata)] +
- [repr(x) for x in self.columns] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']])
-
- def __str__(self):
- return _get_table_key(self.description, self.schema)
-
- @property
- def bind(self):
- """Return the connectable associated with this Table."""
-
- return self.metadata and self.metadata.bind or None
-
- def add_is_dependent_on(self, table):
- """Add a 'dependency' for this Table.
-
- This is another Table object which must be created
- first before this one can, or dropped after this one.
-
- Usually, dependencies between tables are determined via
- ForeignKey objects. However, for other situations that
- create dependencies outside of foreign keys (rules, inheriting),
- this method can manually establish such a link.
-
- """
- self._extra_dependencies.add(table)
-
- def append_column(self, column):
- """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`.
-
- The "key" of the newly added :class:`~.schema.Column`, i.e. the
- value of its ``.key`` attribute, will then be available
- in the ``.c`` collection of this :class:`~.schema.Table`, and the
- column definition will be included in any CREATE TABLE, SELECT,
- UPDATE, etc. statements generated from this :class:`~.schema.Table`
- construct.
-
- Note that this does **not** change the definition of the table
- as it exists within any underlying database, assuming that
- table has already been created in the database. Relational
- databases support the addition of columns to existing tables
- using the SQL ALTER command, which would need to be
- emitted for an already-existing table that doesn't contain
- the newly added column.
-
- """
-
- column._set_parent_with_dispatch(self)
-
- def append_constraint(self, constraint):
- """Append a :class:`~.schema.Constraint` to this
- :class:`~.schema.Table`.
-
- This has the effect of the constraint being included in any
- future CREATE TABLE statement, assuming specific DDL creation
- events have not been associated with the given
- :class:`~.schema.Constraint` object.
-
- Note that this does **not** produce the constraint within the
- relational database automatically, for a table that already exists
- in the database. To add a constraint to an
- existing relational database table, the SQL ALTER command must
- be used. SQLAlchemy also provides the
- :class:`.AddConstraint` construct which can produce this SQL when
- invoked as an executable clause.
-
- """
-
- constraint._set_parent_with_dispatch(self)
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``Table``.
-
- .. deprecated:: 0.7
- See :class:`.DDLEvents`.
-
- """
-
- def adapt_listener(target, connection, **kw):
- listener(event_name, target, connection)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def _set_parent(self, metadata):
- metadata._add_table(self.name, self.schema, self)
- self.metadata = metadata
-
- def get_children(self, column_collections=True,
- schema_visitor=False, **kw):
- if not schema_visitor:
- return expression.TableClause.get_children(
- self, column_collections=column_collections, **kw)
- else:
- if column_collections:
- return list(self.columns)
- else:
- return []
-
- def exists(self, bind=None):
- """Return True if this table exists."""
-
- if bind is None:
- bind = _bind_or_error(self)
-
- return bind.run_callable(bind.dialect.has_table,
- self.name, schema=self.schema)
-
- def create(self, bind=None, checkfirst=False):
- """Issue a ``CREATE`` statement for this
- :class:`.Table`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.create_all`.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue a ``DROP`` statement for this
- :class:`.Table`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.drop_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
-
- def tometadata(self, metadata, schema=RETAIN_SCHEMA):
- """Return a copy of this :class:`.Table` associated with a different
- :class:`.MetaData`.
-
- E.g.::
-
- some_engine = create_engine("sqlite:///some.db")
-
- # create two metadata
- meta1 = MetaData()
- meta2 = MetaData()
-
- # load 'users' from the sqlite engine
- users_table = Table('users', meta1, autoload=True,
- autoload_with=some_engine)
-
- # create the same Table object for the plain metadata
- users_table_2 = users_table.tometadata(meta2)
-
- :param metadata: Target :class:`.MetaData` object.
- :param schema: Optional string name of a target schema, or
- ``None`` for no schema. The :class:`.Table` object will be
- given this schema name upon copy. Defaults to the special
- symbol :attr:`.RETAIN_SCHEMA` which indicates no change should be
- made to the schema name of the resulting :class:`.Table`.
-
- """
-
- if schema is RETAIN_SCHEMA:
- schema = self.schema
- elif schema is None:
- schema = metadata.schema
- key = _get_table_key(self.name, schema)
- if key in metadata.tables:
- util.warn("Table '%s' already exists within the given "
- "MetaData - not copying." % self.description)
- return metadata.tables[key]
-
- args = []
- for c in self.columns:
- args.append(c.copy(schema=schema))
- table = Table(
- self.name, metadata, schema=schema,
- *args, **self.kwargs
- )
- for c in self.constraints:
- table.append_constraint(c.copy(schema=schema, target_table=table))
-
- for index in self.indexes:
- # skip indexes that would be generated
- # by the 'index' flag on Column
- if len(index.columns) == 1 and \
- list(index.columns)[0].index:
- continue
- Index(index.name,
- unique=index.unique,
- *[table.c[col] for col in index.columns.keys()],
- **index.kwargs)
- table.dispatch._update(self.dispatch)
- return table
-
-
-class Column(SchemaItem, expression.ColumnClause):
- """Represents a column in a database table."""
-
- __visit_name__ = 'column'
-
- def __init__(self, *args, **kwargs):
- """
- Construct a new ``Column`` object.
-
- :param name: The name of this column as represented in the database.
- This argument may be the first positional argument, or specified
- via keyword.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- The name field may be omitted at construction time and applied
- later, at any time before the Column is associated with a
- :class:`.Table`. This is to support convenient
- usage within the :mod:`~sqlalchemy.ext.declarative` extension.
-
- :param type\_: The column's type, indicated using an instance which
- subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
- are required for the type, the class of the type can be sent
- as well, e.g.::
-
- # use a type with arguments
- Column('data', String(50))
-
- # use no arguments
- Column('level', Integer)
-
- The ``type`` argument may be the second positional argument
- or specified by keyword.
-
- If the ``type`` is ``None`` or is omitted, it will first default to the special
- type :class:`.NullType`. If and when this :class:`.Column` is
- made to refer to another column using :class:`.ForeignKey`
- and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced
- column will be copied to this column as well, at the moment that
- the foreign key is resolved against that remote :class:`.Column`
- object.
-
- .. versionchanged:: 0.9.0
- Support for propagation of type to a :class:`.Column` from its
- :class:`.ForeignKey` object has been improved and should be
- more reliable and timely.
-
- :param \*args: Additional positional arguments include various
- :class:`.SchemaItem` derived constructs which will be applied
- as options to the column. These include instances of
- :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`,
- and :class:`.Sequence`. In some cases an equivalent keyword
- argument is available such as ``server_default``, ``default``
- and ``unique``.
-
- :param autoincrement: This flag may be set to ``False`` to
- indicate an integer primary key column that should not be
- considered to be the "autoincrement" column, that is
- the integer primary key column which generates values
- implicitly upon INSERT and whose value is usually returned
- via the DBAPI cursor.lastrowid attribute. It defaults
- to ``True`` to satisfy the common use case of a table
- with a single integer primary key column. If the table
- has a composite primary key consisting of more than one
- integer column, set this flag to True only on the
- column that should be considered "autoincrement".
-
- The setting *only* has an effect for columns which are:
-
- * Integer derived (i.e. INT, SMALLINT, BIGINT).
-
- * Part of the primary key
-
- * Are not referenced by any foreign keys, unless
- the value is specified as ``'ignore_fk'``
-
- .. versionadded:: 0.7.4
-
- * have no server side or client side defaults (with the exception
- of Postgresql SERIAL).
-
- The setting has these two effects on columns that meet the
- above criteria:
-
- * DDL issued for the column will include database-specific
- keywords intended to signify this column as an
- "autoincrement" column, such as AUTO INCREMENT on MySQL,
- SERIAL on Postgresql, and IDENTITY on MS-SQL. It does
- *not* issue AUTOINCREMENT for SQLite since this is a
- special SQLite flag that is not required for autoincrementing
- behavior. See the SQLite dialect documentation for
- information on SQLite's AUTOINCREMENT.
-
- * The column will be considered to be available as
- cursor.lastrowid or equivalent, for those dialects which
- "post fetch" newly inserted identifiers after a row has
- been inserted (SQLite, MySQL, MS-SQL). It does not have
- any effect in this regard for databases that use sequences
- to generate primary key identifiers (i.e. Firebird, Postgresql,
- Oracle).
-
- .. versionchanged:: 0.7.4
- ``autoincrement`` accepts a special value ``'ignore_fk'``
- to indicate that autoincrementing status regardless of foreign
- key references. This applies to certain composite foreign key
- setups, such as the one demonstrated in the ORM documentation
- at :ref:`post_update`.
-
- :param default: A scalar, Python callable, or
- :class:`.ColumnElement` expression representing the
- *default value* for this column, which will be invoked upon insert
- if this column is otherwise not specified in the VALUES clause of
- the insert. This is a shortcut to using :class:`.ColumnDefault` as
- a positional argument; see that class for full detail on the
- structure of the argument.
-
- Contrast this argument to ``server_default`` which creates a
- default generator on the database side.
-
- :param doc: optional String that can be used by the ORM or similar
- to document attributes. This attribute does not render SQL
- comments (a future attribute 'comment' will achieve that).
-
- :param key: An optional string identifier which will identify this
- ``Column`` object on the :class:`.Table`. When a key is provided,
- this is the only identifier referencing the ``Column`` within the
- application, including ORM attribute mapping; the ``name`` field
- is used only when rendering SQL.
-
- :param index: When ``True``, indicates that the column is indexed.
- This is a shortcut for using a :class:`.Index` construct on the
- table. To specify indexes with explicit names or indexes that
- contain multiple columns, use the :class:`.Index` construct
- instead.
-
- :param info: Optional data dictionary which will be populated into the
- :attr:`.SchemaItem.info` attribute of this object.
-
- :param nullable: If set to the default of ``True``, indicates the
- column will be rendered as allowing NULL, else it's rendered as
- NOT NULL. This parameter is only used when issuing CREATE TABLE
- statements.
-
- :param onupdate: A scalar, Python callable, or
- :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
- default value to be applied to the column within UPDATE
- statements, which wil be invoked upon update if this column is not
- present in the SET clause of the update. This is a shortcut to
- using :class:`.ColumnDefault` as a positional argument with
- ``for_update=True``.
-
- :param primary_key: If ``True``, marks this column as a primary key
- column. Multiple columns can have this flag set to specify
- composite primary keys. As an alternative, the primary key of a
- :class:`.Table` can be specified via an explicit
- :class:`.PrimaryKeyConstraint` object.
-
- :param server_default: A :class:`.FetchedValue` instance, str, Unicode
- or :func:`~sqlalchemy.sql.expression.text` construct representing
- the DDL DEFAULT value for the column.
-
- String types will be emitted as-is, surrounded by single quotes::
-
- Column('x', Text, server_default="val")
-
- x TEXT DEFAULT 'val'
-
- A :func:`~sqlalchemy.sql.expression.text` expression will be
- rendered as-is, without quotes::
-
- Column('y', DateTime, server_default=text('NOW()'))0
-
- y DATETIME DEFAULT NOW()
-
- Strings and text() will be converted into a :class:`.DefaultClause`
- object upon initialization.
-
- Use :class:`.FetchedValue` to indicate that an already-existing
- column will generate a default value on the database side which
- will be available to SQLAlchemy for post-fetch after inserts. This
- construct does not specify any DDL and the implementation is left
- to the database, such as via a trigger.
-
- :param server_onupdate: A :class:`.FetchedValue` instance
- representing a database-side default generation function. This
- indicates to SQLAlchemy that a newly generated value will be
- available after updates. This construct does not specify any DDL
- and the implementation is left to the database, such as via a
- trigger.
-
- :param quote: Force quoting of this column's name on or off,
- corresponding to ``True`` or ``False``. When left at its default
- of ``None``, the column identifier will be quoted according to
- whether the name is case sensitive (identifiers with at least one
- upper case character are treated as case sensitive), or if it's a
- reserved word. This flag is only needed to force quoting of a
- reserved word which is not known by the SQLAlchemy dialect.
-
- :param unique: When ``True``, indicates that this column contains a
- unique constraint, or if ``index`` is ``True`` as well, indicates
- that the :class:`.Index` should be created with the unique flag.
- To specify multiple columns in the constraint/index or to specify
- an explicit name, use the :class:`.UniqueConstraint` or
- :class:`.Index` constructs explicitly.
-
- """
-
- name = kwargs.pop('name', None)
- type_ = kwargs.pop('type_', None)
- args = list(args)
- if args:
- if isinstance(args[0], util.string_types):
- if name is not None:
- raise exc.ArgumentError(
- "May not pass name positionally and as a keyword.")
- name = args.pop(0)
- if args:
- coltype = args[0]
-
- if (isinstance(coltype, sqltypes.TypeEngine) or
- (isinstance(coltype, type) and
- issubclass(coltype, sqltypes.TypeEngine))):
- if type_ is not None:
- raise exc.ArgumentError(
- "May not pass type_ positionally and as a keyword.")
- type_ = args.pop(0)
-
- super(Column, self).__init__(name, None, type_)
- self.key = kwargs.pop('key', name)
- self.primary_key = kwargs.pop('primary_key', False)
- self.nullable = kwargs.pop('nullable', not self.primary_key)
- self.default = kwargs.pop('default', None)
- self.server_default = kwargs.pop('server_default', None)
- self.server_onupdate = kwargs.pop('server_onupdate', None)
- self.index = kwargs.pop('index', None)
- self.unique = kwargs.pop('unique', None)
- self.quote = kwargs.pop('quote', None)
- self.doc = kwargs.pop('doc', None)
- self.onupdate = kwargs.pop('onupdate', None)
- self.autoincrement = kwargs.pop('autoincrement', True)
- self.constraints = set()
- self.foreign_keys = set()
-
- # check if this Column is proxying another column
- if '_proxies' in kwargs:
- self._proxies = kwargs.pop('_proxies')
- # otherwise, add DDL-related events
- elif isinstance(self.type, sqltypes.SchemaType):
- self.type._set_parent_with_dispatch(self)
-
- if self.default is not None:
- if isinstance(self.default, (ColumnDefault, Sequence)):
- args.append(self.default)
- else:
- if getattr(self.type, '_warn_on_bytestring', False):
- if isinstance(self.default, util.binary_type):
- util.warn("Unicode column received non-unicode "
- "default value.")
- args.append(ColumnDefault(self.default))
-
- if self.server_default is not None:
- if isinstance(self.server_default, FetchedValue):
- args.append(self.server_default._as_for_update(False))
- else:
- args.append(DefaultClause(self.server_default))
-
- if self.onupdate is not None:
- if isinstance(self.onupdate, (ColumnDefault, Sequence)):
- args.append(self.onupdate)
- else:
- args.append(ColumnDefault(self.onupdate, for_update=True))
-
- if self.server_onupdate is not None:
- if isinstance(self.server_onupdate, FetchedValue):
- args.append(self.server_onupdate._as_for_update(True))
- else:
- args.append(DefaultClause(self.server_onupdate,
- for_update=True))
- self._init_items(*args)
-
- util.set_creation_order(self)
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- if kwargs:
- raise exc.ArgumentError(
- "Unknown arguments passed to Column: " + repr(list(kwargs)))
-
- def __str__(self):
- if self.name is None:
- return "(no name)"
- elif self.table is not None:
- if self.table.named_with_column:
- return (self.table.description + "." + self.description)
- else:
- return self.description
- else:
- return self.description
-
- def references(self, column):
- """Return True if this Column references the given column via foreign
- key."""
-
- for fk in self.foreign_keys:
- if fk.column.proxy_set.intersection(column.proxy_set):
- return True
- else:
- return False
-
- def append_foreign_key(self, fk):
- fk._set_parent_with_dispatch(self)
-
- def __repr__(self):
- kwarg = []
- if self.key != self.name:
- kwarg.append('key')
- if self.primary_key:
- kwarg.append('primary_key')
- if not self.nullable:
- kwarg.append('nullable')
- if self.onupdate:
- kwarg.append('onupdate')
- if self.default:
- kwarg.append('default')
- if self.server_default:
- kwarg.append('server_default')
- return "Column(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.type)] +
- [repr(x) for x in self.foreign_keys if x is not None] +
- [repr(x) for x in self.constraints] +
- [(self.table is not None and "table=<%s>" %
- self.table.description or "table=None")] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
-
- def _set_parent(self, table):
- if not self.name:
- raise exc.ArgumentError(
- "Column must be constructed with a non-blank name or "
- "assign a non-blank .name before adding to a Table.")
- if self.key is None:
- self.key = self.name
-
- existing = getattr(self, 'table', None)
- if existing is not None and existing is not table:
- raise exc.ArgumentError(
- "Column object already assigned to Table '%s'" %
- existing.description)
-
- if self.key in table._columns:
- col = table._columns.get(self.key)
- if col is not self:
- for fk in col.foreign_keys:
- table.foreign_keys.remove(fk)
- if fk.constraint in table.constraints:
- # this might have been removed
- # already, if it's a composite constraint
- # and more than one col being replaced
- table.constraints.remove(fk.constraint)
-
- table._columns.replace(self)
-
- if self.primary_key:
- table.primary_key._replace(self)
- Table._autoincrement_column._reset(table)
- elif self.key in table.primary_key:
- raise exc.ArgumentError(
- "Trying to redefine primary-key column '%s' as a "
- "non-primary-key column on table '%s'" % (
- self.key, table.fullname))
- self.table = table
-
- if self.index:
- if isinstance(self.index, util.string_types):
- raise exc.ArgumentError(
- "The 'index' keyword argument on Column is boolean only. "
- "To create indexes with a specific name, create an "
- "explicit Index object external to the Table.")
- Index(expression._truncated_label('ix_%s' % self._label),
- self, unique=self.unique)
- elif self.unique:
- if isinstance(self.unique, util.string_types):
- raise exc.ArgumentError(
- "The 'unique' keyword argument on Column is boolean "
- "only. To create unique constraints or indexes with a "
- "specific name, append an explicit UniqueConstraint to "
- "the Table's list of elements, or create an explicit "
- "Index object external to the Table.")
- table.append_constraint(UniqueConstraint(self.key))
-
- fk_key = (table.key, self.key)
- if fk_key in self.table.metadata._fk_memos:
- for fk in self.table.metadata._fk_memos[fk_key]:
- fk._set_remote_table(table)
-
- def _on_table_attach(self, fn):
- if self.table is not None:
- fn(self, self.table)
- event.listen(self, 'after_parent_attach', fn)
-
- def copy(self, **kw):
- """Create a copy of this ``Column``, unitialized.
-
- This is used in ``Table.tometadata``.
-
- """
-
- # Constraint objects plus non-constraint-bound ForeignKey objects
- args = \
- [c.copy(**kw) for c in self.constraints] + \
- [c.copy(**kw) for c in self.foreign_keys if not c.constraint]
-
- type_ = self.type
- if isinstance(type_, sqltypes.SchemaType):
- type_ = type_.copy(**kw)
-
- c = self._constructor(
- name=self.name,
- type_=type_,
- key=self.key,
- primary_key=self.primary_key,
- nullable=self.nullable,
- unique=self.unique,
- quote=self.quote,
- index=self.index,
- autoincrement=self.autoincrement,
- default=self.default,
- server_default=self.server_default,
- onupdate=self.onupdate,
- server_onupdate=self.server_onupdate,
- info=self.info,
- doc=self.doc,
- *args
- )
- c.dispatch._update(self.dispatch)
- return c
-
- def _make_proxy(self, selectable, name=None, key=None,
- name_is_truncatable=False, **kw):
- """Create a *proxy* for this column.
-
- This is a copy of this ``Column`` referenced by a different parent
- (such as an alias or select statement). The column should
- be used only in select scenarios, as its full DDL/default
- information is not transferred.
-
- """
- fk = [ForeignKey(f.column, _constraint=f.constraint)
- for f in self.foreign_keys]
- if name is None and self.name is None:
- raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
- " with this Column object until it's 'name' has "
- "been assigned.")
- try:
- c = self._constructor(
- expression._as_truncated(name or self.name) if \
- name_is_truncatable else (name or self.name),
- self.type,
- key=key if key else name if name else self.key,
- primary_key=self.primary_key,
- nullable=self.nullable,
- quote=self.quote,
- _proxies=[self], *fk)
- except TypeError:
- util.raise_from_cause(
- TypeError(
- "Could not create a copy of this %r object. "
- "Ensure the class includes a _constructor() "
- "attribute or method which accepts the "
- "standard Column constructor arguments, or "
- "references the Column class itself." % self.__class__)
- )
-
- c.table = selectable
- selectable._columns.add(c)
- if selectable._is_clone_of is not None:
- c._is_clone_of = selectable._is_clone_of.columns[c.key]
- if self.primary_key:
- selectable.primary_key.add(c)
- c.dispatch.after_parent_attach(c, selectable)
- return c
-
- def get_children(self, schema_visitor=False, **kwargs):
- if schema_visitor:
- return [x for x in (self.default, self.onupdate)
- if x is not None] + \
- list(self.foreign_keys) + list(self.constraints)
- else:
- return expression.ColumnClause.get_children(self, **kwargs)
-
-
-class ForeignKey(SchemaItem):
- """Defines a dependency between two columns.
-
- ``ForeignKey`` is specified as an argument to a :class:`.Column` object,
- e.g.::
-
- t = Table("remote_table", metadata,
- Column("remote_id", ForeignKey("main_table.id"))
- )
-
- Note that ``ForeignKey`` is only a marker object that defines
- a dependency between two columns. The actual constraint
- is in all cases represented by the :class:`.ForeignKeyConstraint`
- object. This object will be generated automatically when
- a ``ForeignKey`` is associated with a :class:`.Column` which
- in turn is associated with a :class:`.Table`. Conversely,
- when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`,
- ``ForeignKey`` markers are automatically generated to be
- present on each associated :class:`.Column`, which are also
- associated with the constraint object.
-
- Note that you cannot define a "composite" foreign key constraint,
- that is a constraint between a grouping of multiple parent/child
- columns, using ``ForeignKey`` objects. To define this grouping,
- the :class:`.ForeignKeyConstraint` object must be used, and applied
- to the :class:`.Table`. The associated ``ForeignKey`` objects
- are created automatically.
-
- The ``ForeignKey`` objects associated with an individual
- :class:`.Column` object are available in the `foreign_keys` collection
- of that column.
-
- Further examples of foreign key configuration are in
- :ref:`metadata_foreignkeys`.
-
- """
-
- __visit_name__ = 'foreign_key'
-
- def __init__(self, column, _constraint=None, use_alter=False, name=None,
- onupdate=None, ondelete=None, deferrable=None,
- schema=None,
- initially=None, link_to_name=False, match=None):
- """
- Construct a column-level FOREIGN KEY.
-
- The :class:`.ForeignKey` object when constructed generates a
- :class:`.ForeignKeyConstraint` which is associated with the parent
- :class:`.Table` object's collection of constraints.
-
- :param column: A single target column for the key relationship. A
- :class:`.Column` object or a column name as a string:
- ``tablename.columnkey`` or ``schema.tablename.columnkey``.
- ``columnkey`` is the ``key`` which has been assigned to the column
- (defaults to the column name itself), unless ``link_to_name`` is
- ``True`` in which case the rendered name of the column is used.
-
- .. versionadded:: 0.7.4
- Note that if the schema name is not included, and the
- underlying :class:`.MetaData` has a "schema", that value will
- be used.
-
- :param name: Optional string. An in-database name for the key if
- `constraint` is not provided.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally
- assigned ``key``.
-
- :param use_alter: passed to the underlying
- :class:`.ForeignKeyConstraint` to indicate the constraint should be
- generated/dropped externally from the CREATE TABLE/ DROP TABLE
- statement. See that classes' constructor for details.
-
- :param match: Optional string. If set, emit MATCH <value> when issuing
- DDL for this constraint. Typical values include SIMPLE, PARTIAL
- and FULL.
-
- """
-
- self._colspec = column
-
- # the linked ForeignKeyConstraint.
- # ForeignKey will create this when parent Column
- # is attached to a Table, *or* ForeignKeyConstraint
- # object passes itself in when creating ForeignKey
- # markers.
- self.constraint = _constraint
- self.parent = None
- self.use_alter = use_alter
- self.name = name
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.deferrable = deferrable
- self.initially = initially
- self.link_to_name = link_to_name
- self.match = match
-
- def __repr__(self):
- return "ForeignKey(%r)" % self._get_colspec()
-
- def copy(self, schema=None):
- """Produce a copy of this :class:`.ForeignKey` object.
-
- The new :class:`.ForeignKey` will not be bound
- to any :class:`.Column`.
-
- This method is usually used by the internal
- copy procedures of :class:`.Column`, :class:`.Table`,
- and :class:`.MetaData`.
-
- :param schema: The returned :class:`.ForeignKey` will
- reference the original table and column name, qualified
- by the given string schema name.
-
- """
-
- fk = ForeignKey(
- self._get_colspec(schema=schema),
- use_alter=self.use_alter,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match
- )
- fk.dispatch._update(self.dispatch)
- return fk
-
- def _get_colspec(self, schema=None):
- """Return a string based 'column specification' for this
- :class:`.ForeignKey`.
-
- This is usually the equivalent of the string-based "tablename.colname"
- argument first passed to the object's constructor.
-
- """
- if schema:
- return schema + "." + self.column.table.name + \
- "." + self.column.key
- elif isinstance(self._colspec, util.string_types):
- return self._colspec
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- return "%s.%s" % (_column.table.fullname, _column.key)
-
-
- target_fullname = property(_get_colspec)
-
- def references(self, table):
- """Return True if the given :class:`.Table` is referenced by this
- :class:`.ForeignKey`."""
-
- return table.corresponding_column(self.column) is not None
-
- def get_referent(self, table):
- """Return the :class:`.Column` in the given :class:`.Table`
- referenced by this :class:`.ForeignKey`.
-
- Returns None if this :class:`.ForeignKey` does not reference the given
- :class:`.Table`.
-
- """
-
- return table.corresponding_column(self.column)
-
- @util.memoized_property
- def _column_tokens(self):
- """parse a string-based _colspec into its component parts."""
-
- m = self._colspec.split('.')
- if m is None:
- raise exc.ArgumentError(
- "Invalid foreign key column specification: %s" %
- self._colspec)
- if (len(m) == 1):
- tname = m.pop()
- colname = None
- else:
- colname = m.pop()
- tname = m.pop()
-
- # A FK between column 'bar' and table 'foo' can be
- # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
- # 'otherdb.dbo.foo.bar'. Once we have the column name and
- # the table name, treat everything else as the schema
- # name. Some databases (e.g. Sybase) support
- # inter-database foreign keys. See tickets#1341 and --
- # indirectly related -- Ticket #594. This assumes that '.'
- # will never appear *within* any component of the FK.
-
- if (len(m) > 0):
- schema = '.'.join(m)
- else:
- schema = None
- return schema, tname, colname
-
- def _table_key(self):
- if isinstance(self._colspec, util.string_types):
- schema, tname, colname = self._column_tokens
- return _get_table_key(tname, schema)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- if _column.table is None:
- return None
- else:
- return _column.table.key
-
- def _resolve_col_tokens(self):
- if self.parent is None:
- raise exc.InvalidRequestError(
- "this ForeignKey object does not yet have a "
- "parent Column associated with it.")
-
- elif self.parent.table is None:
- raise exc.InvalidRequestError(
- "this ForeignKey's parent column is not yet associated "
- "with a Table.")
-
- parenttable = self.parent.table
-
- # assertion, can be commented out.
- # basically Column._make_proxy() sends the actual
- # target Column to the ForeignKey object, so the
- # string resolution here is never called.
- for c in self.parent.base_columns:
- if isinstance(c, Column):
- assert c.table is parenttable
- break
- else:
- assert False
- ######################
-
- schema, tname, colname = self._column_tokens
-
- if schema is None and parenttable.metadata.schema is not None:
- schema = parenttable.metadata.schema
-
- tablekey = _get_table_key(tname, schema)
- return parenttable, tablekey, colname
-
-
- def _link_to_col_by_colstring(self, parenttable, table, colname):
- if not hasattr(self.constraint, '_referred_table'):
- self.constraint._referred_table = table
- else:
- assert self.constraint._referred_table is table
-
- _column = None
- if colname is None:
- # colname is None in the case that ForeignKey argument
- # was specified as table name only, in which case we
- # match the column name to the same column on the
- # parent.
- key = self.parent
- _column = table.c.get(self.parent.key, None)
- elif self.link_to_name:
- key = colname
- for c in table.c:
- if c.name == colname:
- _column = c
- else:
- key = colname
- _column = table.c.get(colname, None)
-
- if _column is None:
- raise exc.NoReferencedColumnError(
- "Could not initialize target column for ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, table.name, key),
- table.name, key)
-
- self._set_target_column(_column)
-
- def _set_target_column(self, column):
- # propagate TypeEngine to parent if it didn't have one
- if isinstance(self.parent.type, sqltypes.NullType):
- self.parent.type = column.type
-
- # super-edgy case, if other FKs point to our column,
- # they'd get the type propagated out also.
- if isinstance(self.parent.table, Table):
- fk_key = (self.parent.table.key, self.parent.key)
- if fk_key in self.parent.table.metadata._fk_memos:
- for fk in self.parent.table.metadata._fk_memos[fk_key]:
- if isinstance(fk.parent.type, sqltypes.NullType):
- fk.parent.type = column.type
-
- self.column = column
-
- @util.memoized_property
- def column(self):
- """Return the target :class:`.Column` referenced by this
- :class:`.ForeignKey`.
-
- If no target column has been established, an exception
- is raised.
-
- .. versionchanged:: 0.9.0
- Foreign key target column resolution now occurs as soon as both
- the ForeignKey object and the remote Column to which it refers
- are both associated with the same MetaData object.
-
- """
-
- if isinstance(self._colspec, util.string_types):
-
- parenttable, tablekey, colname = self._resolve_col_tokens()
-
- if tablekey not in parenttable.metadata:
- raise exc.NoReferencedTableError(
- "Foreign key associated with column '%s' could not find "
- "table '%s' with which to generate a "
- "foreign key to target column '%s'" %
- (self.parent, tablekey, colname),
- tablekey)
- elif parenttable.key not in parenttable.metadata:
- raise exc.InvalidRequestError(
- "Table %s is no longer associated with its "
- "parent MetaData" % parenttable)
- else:
- raise exc.NoReferencedColumnError(
- "Could not initialize target column for "
- "ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, tablekey, colname),
- tablekey, colname)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- return _column
- else:
- _column = self._colspec
- return _column
-
- def _set_parent(self, column):
- if self.parent is not None and self.parent is not column:
- raise exc.InvalidRequestError(
- "This ForeignKey already has a parent !")
- self.parent = column
- self.parent.foreign_keys.add(self)
- self.parent._on_table_attach(self._set_table)
-
- def _set_remote_table(self, table):
- parenttable, tablekey, colname = self._resolve_col_tokens()
- self._link_to_col_by_colstring(parenttable, table, colname)
- self.constraint._validate_dest_table(table)
-
- def _remove_from_metadata(self, metadata):
- parenttable, table_key, colname = self._resolve_col_tokens()
- fk_key = (table_key, colname)
-
- if self in metadata._fk_memos[fk_key]:
- # TODO: no test coverage for self not in memos
- metadata._fk_memos[fk_key].remove(self)
-
- def _set_table(self, column, table):
- # standalone ForeignKey - create ForeignKeyConstraint
- # on the hosting Table when attached to the Table.
- if self.constraint is None and isinstance(table, Table):
- self.constraint = ForeignKeyConstraint(
- [], [], use_alter=self.use_alter, name=self.name,
- onupdate=self.onupdate, ondelete=self.ondelete,
- deferrable=self.deferrable, initially=self.initially,
- match=self.match,
- )
- self.constraint._elements[self.parent] = self
- self.constraint._set_parent_with_dispatch(table)
- table.foreign_keys.add(self)
-
- # set up remote ".column" attribute, or a note to pick it
- # up when the other Table/Column shows up
- if isinstance(self._colspec, util.string_types):
- parenttable, table_key, colname = self._resolve_col_tokens()
- fk_key = (table_key, colname)
- if table_key in parenttable.metadata.tables:
- table = parenttable.metadata.tables[table_key]
- try:
- self._link_to_col_by_colstring(parenttable, table, colname)
- except exc.NoReferencedColumnError:
- # this is OK, we'll try later
- pass
- parenttable.metadata._fk_memos[fk_key].append(self)
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- self._set_target_column(_column)
- else:
- _column = self._colspec
- self._set_target_column(_column)
-
-
-
-class _NotAColumnExpr(object):
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression." % self.__class__.__name__)
-
- __clause_element__ = self_group = lambda self: self._not_a_column_expr()
- _from_objects = property(lambda self: self._not_a_column_expr())
-
-
-class DefaultGenerator(_NotAColumnExpr, SchemaItem):
- """Base class for column *default* values."""
-
- __visit_name__ = 'default_generator'
-
- is_sequence = False
- is_server_default = False
- column = None
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.onupdate = self
- else:
- self.column.default = self
-
- def execute(self, bind=None, **kwargs):
- if bind is None:
- bind = _bind_or_error(self)
- return bind._execute_default(self, **kwargs)
-
- @property
- def bind(self):
- """Return the connectable associated with this default."""
- if getattr(self, 'column', None) is not None:
- return self.column.table.bind
- else:
- return None
-
-
-class ColumnDefault(DefaultGenerator):
- """A plain default value on a column.
-
- This could correspond to a constant, a callable function,
- or a SQL clause.
-
- :class:`.ColumnDefault` is generated automatically
- whenever the ``default``, ``onupdate`` arguments of
- :class:`.Column` are used. A :class:`.ColumnDefault`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, default=50)
-
- Is equivalent to::
-
- Column('foo', Integer, ColumnDefault(50))
-
-
- """
-
- def __init__(self, arg, **kwargs):
- """"Construct a new :class:`.ColumnDefault`.
-
-
- :param arg: argument representing the default value.
- May be one of the following:
-
- * a plain non-callable Python value, such as a
- string, integer, boolean, or other simple type.
- The default value will be used as is each time.
- * a SQL expression, that is one which derives from
- :class:`.ColumnElement`. The SQL expression will
- be rendered into the INSERT or UPDATE statement,
- or in the case of a primary key column when
- RETURNING is not used may be
- pre-executed before an INSERT within a SELECT.
- * A Python callable. The function will be invoked for each
- new row subject to an INSERT or UPDATE.
- The callable must accept exactly
- zero or one positional arguments. The one-argument form
- will receive an instance of the :class:`.ExecutionContext`,
- which provides contextual information as to the current
- :class:`.Connection` in use as well as the current
- statement and parameters.
-
- """
- super(ColumnDefault, self).__init__(**kwargs)
- if isinstance(arg, FetchedValue):
- raise exc.ArgumentError(
- "ColumnDefault may not be a server-side default type.")
- if util.callable(arg):
- arg = self._maybe_wrap_callable(arg)
- self.arg = arg
-
- @util.memoized_property
- def is_callable(self):
- return util.callable(self.arg)
-
- @util.memoized_property
- def is_clause_element(self):
- return isinstance(self.arg, expression.ClauseElement)
-
- @util.memoized_property
- def is_scalar(self):
- return not self.is_callable and \
- not self.is_clause_element and \
- not self.is_sequence
-
- def _maybe_wrap_callable(self, fn):
- """Wrap callables that don't accept a context.
-
- The alternative here is to require that
- a simple callable passed to "default" would need
- to be of the form "default=lambda ctx: datetime.now".
- That is the more "correct" way to go, but the case
- of using a zero-arg callable for "default" is so
- much more prominent than the context-specific one
- I'm having trouble justifying putting that inconvenience
- on everyone.
-
- """
- if inspect.isfunction(fn):
- inspectable = fn
- elif inspect.isclass(fn):
- inspectable = fn.__init__
- elif hasattr(fn, '__call__'):
- inspectable = fn.__call__
- else:
- # probably not inspectable, try anyways.
- inspectable = fn
- try:
- argspec = inspect.getargspec(inspectable)
- except TypeError:
- return lambda ctx: fn()
-
- defaulted = argspec[3] is not None and len(argspec[3]) or 0
- positionals = len(argspec[0]) - defaulted
-
- # Py3K compat - no unbound methods
- if inspect.ismethod(inspectable) or inspect.isclass(fn):
- positionals -= 1
-
- if positionals == 0:
- return lambda ctx: fn()
- elif positionals == 1:
- return fn
- else:
- raise exc.ArgumentError(
- "ColumnDefault Python function takes zero or one "
- "positional arguments")
-
- def _visit_name(self):
- if self.for_update:
- return "column_onupdate"
- else:
- return "column_default"
- __visit_name__ = property(_visit_name)
-
- def __repr__(self):
- return "ColumnDefault(%r)" % self.arg
-
-
-class Sequence(DefaultGenerator):
- """Represents a named database sequence.
-
- The :class:`.Sequence` object represents the name and configurational
- parameters of a database sequence. It also represents
- a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
- or :class:`.Connection`, rendering the appropriate "next value" function
- for the target database and returning a result.
-
- The :class:`.Sequence` is typically associated with a primary key column::
-
- some_table = Table('some_table', metadata,
- Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
- )
-
- When CREATE TABLE is emitted for the above :class:`.Table`, if the
- target platform supports sequences, a CREATE SEQUENCE statement will
- be emitted as well. For platforms that don't support sequences,
- the :class:`.Sequence` construct is ignored.
-
- See also: :class:`.CreateSequence` :class:`.DropSequence`
-
- """
-
- __visit_name__ = 'sequence'
-
- is_sequence = True
-
- def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, metadata=None,
- quote_schema=None,
- for_update=False):
- """Construct a :class:`.Sequence` object.
-
- :param name: The name of the sequence.
- :param start: the starting index of the sequence. This value is
- used when the CREATE SEQUENCE command is emitted to the database
- as the value of the "START WITH" clause. If ``None``, the
- clause is omitted, which on most platforms indicates a starting
- value of 1.
- :param increment: the increment value of the sequence. This
- value is used when the CREATE SEQUENCE command is emitted to
- the database as the value of the "INCREMENT BY" clause. If ``None``,
- the clause is omitted, which on most platforms indicates an
- increment of 1.
- :param schema: Optional schema name for the sequence, if located
- in a schema other than the default.
- :param optional: boolean value, when ``True``, indicates that this
- :class:`.Sequence` object only needs to be explicitly generated
- on backends that don't provide another way to generate primary
- key identifiers. Currently, it essentially means, "don't create
- this sequence on the Postgresql backend, where the SERIAL keyword
- creates a sequence for us automatically".
- :param quote: boolean value, when ``True`` or ``False``, explicitly
- forces quoting of the schema name on or off. When left at its
- default of ``None``, normal quoting rules based on casing and reserved
- words take place.
- :param metadata: optional :class:`.MetaData` object which will be
- associated with this :class:`.Sequence`. A :class:`.Sequence`
- that is associated with a :class:`.MetaData` gains access to the
- ``bind`` of that :class:`.MetaData`, meaning the
- :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will
- make usage of that engine automatically.
-
- .. versionchanged:: 0.7
- Additionally, the appropriate CREATE SEQUENCE/
- DROP SEQUENCE DDL commands will be emitted corresponding to this
- :class:`.Sequence` when :meth:`.MetaData.create_all` and
- :meth:`.MetaData.drop_all` are invoked.
-
- Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
- the :class:`.Sequence` is automatically associated with the
- :class:`.MetaData` object of that column's parent :class:`.Table`,
- when that association is made. The :class:`.Sequence` will then
- be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
- to when the :class:`.Table` object itself is created or dropped,
- rather than that of the :class:`.MetaData` object overall.
- :param for_update: Indicates this :class:`.Sequence`, when associated
- with a :class:`.Column`, should be invoked for UPDATE statements
- on that column's table, rather than for INSERT statements, when
- no value is otherwise present for that column in the statement.
-
- """
- super(Sequence, self).__init__(for_update=for_update)
- self.name = name
- self.start = start
- self.increment = increment
- self.optional = optional
- self.quote = quote
- if metadata is not None and schema is None and metadata.schema:
- self.schema = schema = metadata.schema
- self.quote_schema = metadata.quote_schema
- else:
- self.schema = schema
- self.quote_schema = quote_schema
- self.metadata = metadata
- self._key = _get_table_key(name, schema)
- if metadata:
- self._set_metadata(metadata)
-
- @util.memoized_property
- def is_callable(self):
- return False
-
- @util.memoized_property
- def is_clause_element(self):
- return False
-
- def next_value(self):
- """Return a :class:`.next_value` function element
- which will render the appropriate increment function
- for this :class:`.Sequence` within any SQL expression.
-
- """
- return expression.func.next_value(self, bind=self.bind)
-
- def _set_parent(self, column):
- super(Sequence, self)._set_parent(column)
- column._on_table_attach(self._set_table)
-
- def _set_table(self, column, table):
- self._set_metadata(table.metadata)
-
- def _set_metadata(self, metadata):
- self.metadata = metadata
- self.metadata._sequences[self._key] = self
-
- @property
- def bind(self):
- if self.metadata:
- return self.metadata.bind
- else:
- return None
-
- def create(self, bind=None, checkfirst=True):
- """Creates this sequence in the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=True):
- """Drops this sequence from the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst)
-
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression. Use func.next_value(sequence) "
- "to produce a 'next value' function that's usable "
- "as a column element."
- % self.__class__.__name__)
-
-
-class FetchedValue(_NotAColumnExpr, events.SchemaEventTarget):
- """A marker for a transparent database-side default.
-
- Use :class:`.FetchedValue` when the database is configured
- to provide some automatic default for a column.
-
- E.g.::
-
- Column('foo', Integer, FetchedValue())
-
- Would indicate that some trigger or default generator
- will create a new value for the ``foo`` column during an
- INSERT.
-
- .. seealso::
-
- :ref:`triggered_columns`
-
- """
- is_server_default = True
- reflected = False
- has_argument = False
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _as_for_update(self, for_update):
- if for_update == self.for_update:
- return self
- else:
- return self._clone(for_update)
-
- def _clone(self, for_update):
- n = self.__class__.__new__(self.__class__)
- n.__dict__.update(self.__dict__)
- n.__dict__.pop('column', None)
- n.for_update = for_update
- return n
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.server_onupdate = self
- else:
- self.column.server_default = self
-
- def __repr__(self):
- return util.generic_repr(self)
-
-inspection._self_inspects(FetchedValue)
-
-
-class DefaultClause(FetchedValue):
- """A DDL-specified DEFAULT column value.
-
- :class:`.DefaultClause` is a :class:`.FetchedValue`
- that also generates a "DEFAULT" clause when
- "CREATE TABLE" is emitted.
-
- :class:`.DefaultClause` is generated automatically
- whenever the ``server_default``, ``server_onupdate`` arguments of
- :class:`.Column` are used. A :class:`.DefaultClause`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, server_default="50")
-
- Is equivalent to::
-
- Column('foo', Integer, DefaultClause("50"))
-
- """
-
- has_argument = True
-
- def __init__(self, arg, for_update=False, _reflected=False):
- util.assert_arg_type(arg, (util.string_types[0],
- expression.ClauseElement,
- expression.TextClause), 'arg')
- super(DefaultClause, self).__init__(for_update)
- self.arg = arg
- self.reflected = _reflected
-
- def __repr__(self):
- return "DefaultClause(%r, for_update=%r)" % \
- (self.arg, self.for_update)
-
-
-class PassiveDefault(DefaultClause):
- """A DDL-specified DEFAULT column value.
-
- .. deprecated:: 0.6
- :class:`.PassiveDefault` is deprecated.
- Use :class:`.DefaultClause`.
- """
- @util.deprecated("0.6",
- ":class:`.PassiveDefault` is deprecated. "
- "Use :class:`.DefaultClause`.",
- False)
- def __init__(self, *arg, **kw):
- DefaultClause.__init__(self, *arg, **kw)
-
-
-class Constraint(SchemaItem):
- """A table-level SQL constraint."""
-
- __visit_name__ = 'constraint'
-
- def __init__(self, name=None, deferrable=None, initially=None,
- _create_rule=None,
- **kw):
- """Create a SQL constraint.
-
- :param name:
- Optional, the in-database name of this ``Constraint``.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- :param _create_rule:
- a callable which is passed the DDLCompiler object during
- compilation. Returns True or False to signal inline generation of
- this Constraint.
-
- The AddConstraint and DropConstraint DDL constructs provide
- DDLElement's more comprehensive "conditional DDL" approach that is
- passed a database connection when DDL is being issued. _create_rule
- is instead called during any CREATE TABLE compilation, where there
- may not be any transaction/connection in progress. However, it
- allows conditional compilation of the constraint even for backends
- which do not support addition of constraints through ALTER TABLE,
- which currently includes SQLite.
-
- _create_rule is used by some types to create constraints.
- Currently, its call signature is subject to change at any time.
-
- :param \**kwargs:
- Dialect-specific keyword parameters, see the documentation
- for various dialects and constraints regarding options here.
-
- """
-
- self.name = name
- self.deferrable = deferrable
- self.initially = initially
- self._create_rule = _create_rule
- util.set_creation_order(self)
- _validate_dialect_kwargs(kw, self.__class__.__name__)
- self.kwargs = kw
-
- @property
- def table(self):
- try:
- if isinstance(self.parent, Table):
- return self.parent
- except AttributeError:
- pass
- raise exc.InvalidRequestError(
- "This constraint is not bound to a table. Did you "
- "mean to call table.append_constraint(constraint) ?")
-
- def _set_parent(self, parent):
- self.parent = parent
- parent.constraints.add(self)
-
- def copy(self, **kw):
- raise NotImplementedError()
-
-
-class ColumnCollectionMixin(object):
- def __init__(self, *columns):
- self.columns = expression.ColumnCollection()
- self._pending_colargs = [_to_schema_column_or_string(c)
- for c in columns]
- if self._pending_colargs and \
- isinstance(self._pending_colargs[0], Column) and \
- isinstance(self._pending_colargs[0].table, Table):
- self._set_parent_with_dispatch(self._pending_colargs[0].table)
-
- def _set_parent(self, table):
- for col in self._pending_colargs:
- if isinstance(col, util.string_types):
- col = table.c[col]
- self.columns.add(col)
-
-
-class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
- """A constraint that proxies a ColumnCollection."""
-
- def __init__(self, *columns, **kw):
- """
- :param \*columns:
- A sequence of column names or Column objects.
-
- :param name:
- Optional, the in-database name of this constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
- ColumnCollectionMixin.__init__(self, *columns)
- Constraint.__init__(self, **kw)
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
- Constraint._set_parent(self, table)
-
- def __contains__(self, x):
- return x in self.columns
-
- def copy(self, **kw):
- c = self.__class__(name=self.name, deferrable=self.deferrable,
- initially=self.initially, *self.columns.keys())
- c.dispatch._update(self.dispatch)
- return c
-
- def contains_column(self, col):
- return self.columns.contains_column(col)
-
- def __iter__(self):
- # inlining of
- # return iter(self.columns)
- # ColumnCollection->OrderedProperties->OrderedDict
- ordered_dict = self.columns._data
- return (ordered_dict[key] for key in ordered_dict._list)
-
- def __len__(self):
- return len(self.columns._data)
-
-
-class CheckConstraint(Constraint):
- """A table- or column-level CHECK constraint.
-
- Can be included in the definition of a Table or Column.
- """
-
- def __init__(self, sqltext, name=None, deferrable=None,
- initially=None, table=None, _create_rule=None,
- _autoattach=True):
- """Construct a CHECK constraint.
-
- :param sqltext:
- A string containing the constraint definition, which will be used
- verbatim, or a SQL expression construct.
-
- :param name:
- Optional, the in-database name of the constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
-
- super(CheckConstraint, self).\
- __init__(name, deferrable, initially, _create_rule)
- self.sqltext = expression._literal_as_text(sqltext)
- if table is not None:
- self._set_parent_with_dispatch(table)
- elif _autoattach:
- cols = sqlutil.find_columns(self.sqltext)
- tables = set([c.table for c in cols
- if isinstance(c.table, Table)])
- if len(tables) == 1:
- self._set_parent_with_dispatch(
- tables.pop())
-
- def __visit_name__(self):
- if isinstance(self.parent, Table):
- return "check_constraint"
- else:
- return "column_check_constraint"
- __visit_name__ = property(__visit_name__)
-
- def copy(self, target_table=None, **kw):
- if target_table is not None:
- def replace(col):
- if self.table.c.contains_column(col):
- return target_table.c[col.key]
- else:
- return None
- sqltext = visitors.replacement_traverse(self.sqltext, {}, replace)
- else:
- sqltext = self.sqltext
- c = CheckConstraint(sqltext,
- name=self.name,
- initially=self.initially,
- deferrable=self.deferrable,
- _create_rule=self._create_rule,
- table=target_table,
- _autoattach=False)
- c.dispatch._update(self.dispatch)
- return c
-
-
-class ForeignKeyConstraint(Constraint):
- """A table-level FOREIGN KEY constraint.
-
- Defines a single column or composite FOREIGN KEY ... REFERENCES
- constraint. For a no-frills, single column foreign key, adding a
- :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
- equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
-
- Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
-
- """
- __visit_name__ = 'foreign_key_constraint'
-
- def __init__(self, columns, refcolumns, name=None, onupdate=None,
- ondelete=None, deferrable=None, initially=None, use_alter=False,
- link_to_name=False, match=None, table=None):
- """Construct a composite-capable FOREIGN KEY.
-
- :param columns: A sequence of local column names. The named columns
- must be defined and present in the parent Table. The names should
- match the ``key`` given to each column (defaults to the name) unless
- ``link_to_name`` is True.
-
- :param refcolumns: A sequence of foreign column names or Column
- objects. The columns must all be located within the same Table.
-
- :param name: Optional, the in-database name of the key.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally assigned
- ``key``.
-
- :param use_alter: If True, do not emit the DDL for this constraint as
- part of the CREATE TABLE definition. Instead, generate it via an
- ALTER TABLE statement issued after the full collection of tables
- have been created, and drop it via an ALTER TABLE statement before
- the full collection of tables are dropped. This is shorthand for the
- usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
- as "after-create" and "before-drop" events on the MetaData object.
- This is normally used to generate/drop constraints on objects that
- are mutually dependent on each other.
-
- :param match: Optional string. If set, emit MATCH <value> when issuing
- DDL for this constraint. Typical values include SIMPLE, PARTIAL
- and FULL.
-
- """
- super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially)
-
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.link_to_name = link_to_name
- if self.name is None and use_alter:
- raise exc.ArgumentError("Alterable Constraint requires a name")
- self.use_alter = use_alter
- self.match = match
-
- self._elements = util.OrderedDict()
-
- # standalone ForeignKeyConstraint - create
- # associated ForeignKey objects which will be applied to hosted
- # Column objects (in col.foreign_keys), either now or when attached
- # to the Table for string-specified names
- for col, refcol in zip(columns, refcolumns):
- self._elements[col] = ForeignKey(
- refcol,
- _constraint=self,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- link_to_name=self.link_to_name,
- match=self.match
- )
-
- if table is not None:
- self._set_parent_with_dispatch(table)
- elif columns and \
- isinstance(columns[0], Column) and \
- columns[0].table is not None:
- self._set_parent_with_dispatch(columns[0].table)
-
- def _validate_dest_table(self, table):
- table_keys = set([elem._table_key() for elem in self._elements.values()])
- if None not in table_keys and len(table_keys) > 1:
- elem0, elem1 = sorted(table_keys)[0:2]
- raise exc.ArgumentError(
- 'ForeignKeyConstraint on %s(%s) refers to '
- 'multiple remote tables: %s and %s' % (
- table.fullname,
- self._col_description,
- elem0,
- elem1
- ))
-
- @property
- def _col_description(self):
- return ", ".join(self._elements)
-
- @property
- def columns(self):
- return list(self._elements)
-
- @property
- def elements(self):
- return list(self._elements.values())
-
- def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
-
- self._validate_dest_table(table)
-
- for col, fk in self._elements.items():
- # string-specified column names now get
- # resolved to Column objects
- if isinstance(col, util.string_types):
- try:
- col = table.c[col]
- except KeyError:
- raise exc.ArgumentError(
- "Can't create ForeignKeyConstraint "
- "on table '%s': no column "
- "named '%s' is present." % (table.description, col))
-
- if not hasattr(fk, 'parent') or \
- fk.parent is not col:
- fk._set_parent_with_dispatch(col)
-
- if self.use_alter:
- def supports_alter(ddl, event, schema_item, bind, **kw):
- return table in set(kw['tables']) and \
- bind.dialect.supports_alter
-
- event.listen(table.metadata, "after_create",
- AddConstraint(self, on=supports_alter))
- event.listen(table.metadata, "before_drop",
- DropConstraint(self, on=supports_alter))
-
- def copy(self, schema=None, **kw):
- fkc = ForeignKeyConstraint(
- [x.parent.key for x in self._elements.values()],
- [x._get_colspec(schema=schema) for x in self._elements.values()],
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name,
- match=self.match
- )
- fkc.dispatch._update(self.dispatch)
- return fkc
-
-
-class PrimaryKeyConstraint(ColumnCollectionConstraint):
- """A table-level PRIMARY KEY constraint.
-
- Defines a single column or composite PRIMARY KEY constraint. For a
- no-frills primary key, adding ``primary_key=True`` to one or more
- ``Column`` definitions is a shorthand equivalent for an unnamed single- or
- multiple-column PrimaryKeyConstraint.
- """
-
- __visit_name__ = 'primary_key_constraint'
-
- def _set_parent(self, table):
- super(PrimaryKeyConstraint, self)._set_parent(table)
-
- if table.primary_key in table.constraints:
- table.constraints.remove(table.primary_key)
- table.primary_key = self
- table.constraints.add(self)
-
- for c in self.columns:
- c.primary_key = True
-
- def _replace(self, col):
- self.columns.replace(col)
-
-
-class UniqueConstraint(ColumnCollectionConstraint):
- """A table-level UNIQUE constraint.
-
- Defines a single column or composite UNIQUE constraint. For a no-frills,
- single column constraint, adding ``unique=True`` to the ``Column``
- definition is a shorthand equivalent for an unnamed, single column
- UniqueConstraint.
- """
-
- __visit_name__ = 'unique_constraint'
-
-
-class Index(ColumnCollectionMixin, SchemaItem):
- """A table-level INDEX.
-
- Defines a composite (one or more column) INDEX. For a no-frills, single
- column index, adding ``index=True`` to the ``Column`` definition is
- a shorthand equivalent for an unnamed, single column :class:`.Index`.
-
- .. seealso::
-
- :ref:`schema_indexes` - General information on :class:`.Index`.
-
- :ref:`postgresql_indexes` - PostgreSQL-specific options available for the
- :class:`.Index` construct.
-
- :ref:`mysql_indexes` - MySQL-specific options available for the
- :class:`.Index` construct.
-
- :ref:`mssql_indexes` - MSSQL-specific options available for the
- :class:`.Index` construct.
-
- """
-
- __visit_name__ = 'index'
-
- def __init__(self, name, *expressions, **kw):
- """Construct an index object.
-
- :param name:
- The name of the index
-
- :param \*expressions:
- Column expressions to include in the index. The expressions
- are normally instances of :class:`.Column`, but may also
- be arbitrary SQL expressions which ultmately refer to a
- :class:`.Column`.
-
- .. versionadded:: 0.8 :class:`.Index` supports SQL expressions as
- well as plain columns.
-
- :param unique:
- Defaults to False: create a unique index.
-
- :param \**kw:
- Other keyword arguments may be interpreted by specific dialects.
-
- """
- self.table = None
-
- columns = []
- for expr in expressions:
- if not isinstance(expr, expression.ClauseElement):
- columns.append(expr)
- else:
- cols = []
- visitors.traverse(expr, {}, {'column': cols.append})
- if cols:
- columns.append(cols[0])
- else:
- columns.append(expr)
-
- self.expressions = expressions
-
- # will call _set_parent() if table-bound column
- # objects are present
- ColumnCollectionMixin.__init__(self, *columns)
-
- self.name = name
- self.unique = kw.pop('unique', False)
- self.kwargs = kw
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
-
- if self.table is not None and table is not self.table:
- raise exc.ArgumentError(
- "Index '%s' is against table '%s', and "
- "cannot be associated with table '%s'." % (
- self.name,
- self.table.description,
- table.description
- )
- )
- self.table = table
- for c in self.columns:
- if c.table != self.table:
- raise exc.ArgumentError(
- "Column '%s' is not part of table '%s'." %
- (c, self.table.description)
- )
- table.indexes.add(self)
-
- self.expressions = [
- expr if isinstance(expr, expression.ClauseElement)
- else colexpr
- for expr, colexpr in zip(self.expressions, self.columns)
- ]
-
- @property
- def bind(self):
- """Return the connectable associated with this Index."""
-
- return self.table.bind
-
- def create(self, bind=None):
- """Issue a ``CREATE`` statement for this
- :class:`.Index`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.create_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator, self)
- return self
-
- def drop(self, bind=None):
- """Issue a ``DROP`` statement for this
- :class:`.Index`, using the given :class:`.Connectable`
- for connectivity.
-
- See also :meth:`.MetaData.drop_all`.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper, self)
-
- def __repr__(self):
- return 'Index(%s)' % (
- ", ".join(
- [repr(self.name)] +
- [repr(c) for c in self.columns] +
- (self.unique and ["unique=True"] or [])
- ))
-
-
-class MetaData(SchemaItem):
- """A collection of :class:`.Table` objects and their associated schema
- constructs.
-
- Holds a collection of :class:`.Table` objects as well as
- an optional binding to an :class:`.Engine` or
- :class:`.Connection`. If bound, the :class:`.Table` objects
- in the collection and their columns may participate in implicit SQL
- execution.
-
- The :class:`.Table` objects themselves are stored in the
- ``metadata.tables`` dictionary.
-
- The ``bind`` property may be assigned to dynamically. A common pattern is
- to start unbound and then bind later when an engine is available::
-
- metadata = MetaData()
- # define tables
- Table('mytable', metadata, ...)
- # connect to an engine later, perhaps after loading a URL from a
- # configuration file
- metadata.bind = an_engine
-
- MetaData is a thread-safe object after tables have been explicitly defined
- or loaded via reflection.
-
- See also:
-
- :ref:`metadata_describing` - Introduction to database metadata
-
- .. index::
- single: thread safety; MetaData
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self, bind=None, reflect=False, schema=None,
- quote_schema=None):
- """Create a new MetaData object.
-
- :param bind:
- An Engine or Connection to bind to. May also be a string or URL
- instance, these are passed to create_engine() and this MetaData will
- be bound to the resulting engine.
-
- :param reflect:
- Optional, automatically load all tables from the bound database.
- Defaults to False. ``bind`` is required when this option is set.
-
- .. deprecated:: 0.8
- Please use the :meth:`.MetaData.reflect` method.
-
- :param schema:
- The default schema to use for the :class:`.Table`,
- :class:`.Sequence`, and other objects associated with this
- :class:`.MetaData`. Defaults to ``None``.
-
- :param quote_schema:
- Sets the ``quote_schema`` flag for those :class:`.Table`,
- :class:`.Sequence`, and other objects which make usage of the
- local ``schema`` name.
-
- .. versionadded:: 0.7.4
- ``schema`` and ``quote_schema`` parameters.
-
- """
- self.tables = util.immutabledict()
- self.schema = schema
- self.quote_schema = quote_schema
- self._schemas = set()
- self._sequences = {}
- self._fk_memos = collections.defaultdict(list)
-
- self.bind = bind
- if reflect:
- util.warn("reflect=True is deprecate; please "
- "use the reflect() method.")
- if not bind:
- raise exc.ArgumentError(
- "A bind must be supplied in conjunction "
- "with reflect=True")
- self.reflect()
-
- def __repr__(self):
- return 'MetaData(bind=%r)' % self.bind
-
- def __contains__(self, table_or_key):
- if not isinstance(table_or_key, util.string_types):
- table_or_key = table_or_key.key
- return table_or_key in self.tables
-
- def _add_table(self, name, schema, table):
- key = _get_table_key(name, schema)
- dict.__setitem__(self.tables, key, table)
- if schema:
- self._schemas.add(schema)
-
-
-
- def _remove_table(self, name, schema):
- key = _get_table_key(name, schema)
- removed = dict.pop(self.tables, key, None)
- if removed is not None:
- for fk in removed.foreign_keys:
- fk._remove_from_metadata(self)
- if self._schemas:
- self._schemas = set([t.schema
- for t in self.tables.values()
- if t.schema is not None])
-
-
- def __getstate__(self):
- return {'tables': self.tables,
- 'schema': self.schema,
- 'quote_schema': self.quote_schema,
- 'schemas': self._schemas,
- 'sequences': self._sequences,
- 'fk_memos': self._fk_memos}
-
- def __setstate__(self, state):
- self.tables = state['tables']
- self.schema = state['schema']
- self.quote_schema = state['quote_schema']
- self._bind = None
- self._sequences = state['sequences']
- self._schemas = state['schemas']
- self._fk_memos = state['fk_memos']
-
- def is_bound(self):
- """True if this MetaData is bound to an Engine or Connection."""
-
- return self._bind is not None
-
- def bind(self):
- """An :class:`.Engine` or :class:`.Connection` to which this
- :class:`.MetaData` is bound.
-
- Typically, a :class:`.Engine` is assigned to this attribute
- so that "implicit execution" may be used, or alternatively
- as a means of providing engine binding information to an
- ORM :class:`.Session` object::
-
- engine = create_engine("someurl://")
- metadata.bind = engine
-
- .. seealso::
-
- :ref:`dbengine_implicit` - background on "bound metadata"
-
- """
- return self._bind
-
- def _bind_to(self, bind):
- """Bind this MetaData to an Engine, Connection, string or URL."""
-
- if isinstance(bind, util.string_types + (url.URL, )):
- from sqlalchemy import create_engine
- self._bind = create_engine(bind)
- else:
- self._bind = bind
- bind = property(bind, _bind_to)
-
- def clear(self):
- """Clear all Table objects from this MetaData."""
-
- dict.clear(self.tables)
- self._schemas.clear()
- self._fk_memos.clear()
-
- def remove(self, table):
- """Remove the given Table object from this MetaData."""
-
- self._remove_table(table.name, table.schema)
-
- @property
- def sorted_tables(self):
- """Returns a list of :class:`.Table` objects sorted in order of
- foreign key dependency.
-
- The sorting will place :class:`.Table` objects that have dependencies
- first, before the dependencies themselves, representing the
- order in which they can be created. To get the order in which
- the tables would be dropped, use the ``reversed()`` Python built-in.
-
- .. seealso::
-
- :meth:`.Inspector.sorted_tables`
-
- """
- return sqlutil.sort_tables(self.tables.values())
-
- def reflect(self, bind=None, schema=None, views=False, only=None):
- """Load all available table definitions from the database.
-
- Automatically creates ``Table`` entries in this ``MetaData`` for any
- table available in the database but not yet present in the
- ``MetaData``. May be called multiple times to pick up tables recently
- added to the database, however no special action is taken if a table
- in this ``MetaData`` no longer exists in the database.
-
- :param bind:
- A :class:`.Connectable` used to access the database; if None, uses
- the existing bind on this ``MetaData``, if any.
-
- :param schema:
- Optional, query and reflect tables from an alterate schema.
- If None, the schema associated with this :class:`.MetaData`
- is used, if any.
-
- :param views:
- If True, also reflect views.
-
- :param only:
- Optional. Load only a sub-set of available named tables. May be
- specified as a sequence of names or a callable.
-
- If a sequence of names is provided, only those tables will be
- reflected. An error is raised if a table is requested but not
- available. Named tables already present in this ``MetaData`` are
- ignored.
-
- If a callable is provided, it will be used as a boolean predicate to
- filter the list of potential table names. The callable is called
- with a table name and this ``MetaData`` instance as positional
- arguments and should return a true value for any table to reflect.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
-
- with bind.connect() as conn:
-
- reflect_opts = {
- 'autoload': True,
- 'autoload_with': conn
- }
-
- if schema is None:
- schema = self.schema
-
- if schema is not None:
- reflect_opts['schema'] = schema
-
- available = util.OrderedSet(bind.engine.table_names(schema,
- connection=conn))
- if views:
- available.update(
- bind.dialect.get_view_names(conn, schema)
- )
-
- if schema is not None:
- available_w_schema = util.OrderedSet(["%s.%s" % (schema, name)
- for name in available])
- else:
- available_w_schema = available
-
- current = set(self.tables)
-
- if only is None:
- load = [name for name, schname in
- zip(available, available_w_schema)
- if schname not in current]
- elif util.callable(only):
- load = [name for name, schname in
- zip(available, available_w_schema)
- if schname not in current and only(name, self)]
- else:
- missing = [name for name in only if name not in available]
- if missing:
- s = schema and (" schema '%s'" % schema) or ''
- raise exc.InvalidRequestError(
- 'Could not reflect: requested table(s) not available '
- 'in %s%s: (%s)' %
- (bind.engine.url, s, ', '.join(missing)))
- load = [name for name in only if name not in current]
-
- for name in load:
- Table(name, self, **reflect_opts)
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``MetaData``.
-
- .. deprecated:: 0.7
- See :class:`.DDLEvents`.
-
- """
- def adapt_listener(target, connection, **kw):
- tables = kw['tables']
- listener(event, target, connection, tables=tables)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def create_all(self, bind=None, tables=None, checkfirst=True):
- """Create all tables stored in this metadata.
-
- Conditional by default, will not attempt to recreate tables already
- present in the target database.
-
- :param bind:
- A :class:`.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the total
- tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, don't issue CREATEs for tables already present
- in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaGenerator,
- self,
- checkfirst=checkfirst,
- tables=tables)
-
- def drop_all(self, bind=None, tables=None, checkfirst=True):
- """Drop all tables stored in this metadata.
-
- Conditional by default, will not attempt to drop tables not present in
- the target database.
-
- :param bind:
- A :class:`.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the
- total tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, only issue DROPs for tables confirmed to be
- present in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind._run_visitor(ddl.SchemaDropper,
- self,
- checkfirst=checkfirst,
- tables=tables)
-
-
-class ThreadLocalMetaData(MetaData):
- """A MetaData variant that presents a different ``bind`` in every thread.
-
- Makes the ``bind`` property of the MetaData a thread-local value, allowing
- this collection of tables to be bound to different ``Engine``
- implementations or connections in each thread.
-
- The ThreadLocalMetaData starts off bound to None in each thread. Binds
- must be made explicitly by assigning to the ``bind`` property or using
- ``connect()``. You can also re-bind dynamically multiple times per
- thread, just like a regular ``MetaData``.
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self):
- """Construct a ThreadLocalMetaData."""
-
- self.context = util.threading.local()
- self.__engines = {}
- super(ThreadLocalMetaData, self).__init__()
-
- def bind(self):
- """The bound Engine or Connection for this thread.
-
- This property may be assigned an Engine or Connection, or assigned a
- string or URL to automatically create a basic Engine for this bind
- with ``create_engine()``."""
-
- return getattr(self.context, '_engine', None)
-
- def _bind_to(self, bind):
- """Bind to a Connectable in the caller's thread."""
-
- if isinstance(bind, util.string_types + (url.URL, )):
- try:
- self.context._engine = self.__engines[bind]
- except KeyError:
- from sqlalchemy import create_engine
- e = create_engine(bind)
- self.__engines[bind] = e
- self.context._engine = e
- else:
- # TODO: this is squirrely. we shouldnt have to hold onto engines
- # in a case like this
- if bind not in self.__engines:
- self.__engines[bind] = bind
- self.context._engine = bind
-
- bind = property(bind, _bind_to)
-
- def is_bound(self):
- """True if there is a bind for this thread."""
- return (hasattr(self.context, '_engine') and
- self.context._engine is not None)
-
- def dispose(self):
- """Dispose all bound engines, in all thread contexts."""
-
- for e in self.__engines.values():
- if hasattr(e, 'dispose'):
- e.dispose()
-
-
-class SchemaVisitor(visitors.ClauseVisitor):
- """Define the visiting for ``SchemaItem`` objects."""
-
- __traverse_options__ = {'schema_visitor': True}
-
-
-class _DDLCompiles(expression.ClauseElement):
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.ddl_compiler(dialect, self, **kw)
-
-
-class DDLElement(expression.Executable, _DDLCompiles):
- """Base class for DDL expression constructs.
-
- This class is the base for the general purpose :class:`.DDL` class,
- as well as the various create/drop clause constructs such as
- :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
- etc.
-
- :class:`.DDLElement` integrates closely with SQLAlchemy events,
- introduced in :ref:`event_toplevel`. An instance of one is
- itself an event receiving callable::
-
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
-
- See also:
-
- :class:`.DDL`
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- :ref:`schema_ddl_sequences`
-
- """
-
- _execution_options = expression.Executable.\
- _execution_options.union({'autocommit': True})
-
- target = None
- on = None
- dialect = None
- callable_ = None
-
- def execute(self, bind=None, target=None):
- """Execute this DDL immediately.
-
- Executes the DDL statement in isolation using the supplied
- :class:`.Connectable` or
- :class:`.Connectable` assigned to the ``.bind``
- property, if not supplied. If the DDL has a conditional ``on``
- criteria, it will be invoked with None as the event.
-
- :param bind:
- Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
- :class:`.Connectable` must be present in the
- ``.bind`` property.
-
- :param target:
- Optional, defaults to None. The target SchemaItem for the
- execute call. Will be passed to the ``on`` callable if any,
- and may also provide string expansion data for the
- statement. See ``execute_at`` for more information.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
-
- if self._should_execute(target, bind):
- return bind.execute(self.against(target))
- else:
- bind.engine.logger.info(
- "DDL execution skipped, criteria not met.")
-
- @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
- ":meth:`.DDLElement.execute_if`.")
- def execute_at(self, event_name, target):
- """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
-
- Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
- executing it when that schema item is created or dropped. The DDL
- statement will be executed using the same Connection and transactional
- context as the Table create/drop itself. The ``.bind`` property of
- this statement is ignored.
-
- :param event:
- One of the events defined in the schema item's ``.ddl_events``;
- e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
-
- :param target:
- The Table or MetaData instance for which this DDLElement will
- be associated with.
-
- A DDLElement instance can be linked to any number of schema items.
-
- ``execute_at`` builds on the ``append_ddl_listener`` interface of
- :class:`.MetaData` and :class:`.Table` objects.
-
- Caveat: Creating or dropping a Table in isolation will also trigger
- any DDL set to ``execute_at`` that Table's MetaData. This may change
- in a future release.
-
- """
-
- def call_event(target, connection, **kw):
- if self._should_execute_deprecated(event_name,
- target, connection, **kw):
- return connection.execute(self.against(target))
-
- event.listen(target, "" + event_name.replace('-', '_'), call_event)
-
- @expression._generative
- def against(self, target):
- """Return a copy of this DDL against a specific schema item."""
-
- self.target = target
-
- @expression._generative
- def execute_if(self, dialect=None, callable_=None, state=None):
- """Return a callable that will execute this
- DDLElement conditionally.
-
- Used to provide a wrapper for event listening::
-
- event.listen(
- metadata,
- 'before_create',
- DDL("my_ddl").execute_if(dialect='postgresql')
- )
-
- :param dialect: May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something').execute_if(dialect='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something').execute_if(dialect=('postgresql', 'mysql'))
-
- :param callable_: A callable, which will be invoked with
- four positional arguments as well as optional keyword
- arguments:
-
- :ddl:
- This DDL element.
-
- :target:
- The :class:`.Table` or :class:`.MetaData` object which is the
- target of this event. May be None if the DDL is executed
- explicitly.
-
- :bind:
- The :class:`.Connection` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
- :state:
- Optional keyword argument - will be the ``state`` argument
- passed to this function.
-
- :checkfirst:
- Keyword argument, will be True if the 'checkfirst' flag was
- set during the call to ``create()``, ``create_all()``,
- ``drop()``, ``drop_all()``.
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param state: any value which will be passed to the callable_
- as the ``state`` keyword argument.
-
- See also:
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- """
- self.dialect = dialect
- self.callable_ = callable_
- self.state = state
-
- def _should_execute(self, target, bind, **kw):
- if self.on is not None and \
- not self._should_execute_deprecated(None, target, bind, **kw):
- return False
-
- if isinstance(self.dialect, util.string_types):
- if self.dialect != bind.engine.name:
- return False
- elif isinstance(self.dialect, (tuple, list, set)):
- if bind.engine.name not in self.dialect:
- return False
- if self.callable_ is not None and \
- not self.callable_(self, target, bind, state=self.state, **kw):
- return False
-
- return True
-
- def _should_execute_deprecated(self, event, target, bind, **kw):
- if self.on is None:
- return True
- elif isinstance(self.on, util.string_types):
- return self.on == bind.engine.name
- elif isinstance(self.on, (tuple, list, set)):
- return bind.engine.name in self.on
- else:
- return self.on(self, event, target, bind, **kw)
-
- def __call__(self, target, bind, **kw):
- """Execute the DDL as a ddl_listener."""
-
- if self._should_execute(target, bind, **kw):
- return bind.execute(self.against(target))
-
- def _check_ddl_on(self, on):
- if (on is not None and
- (not isinstance(on, util.string_types + (tuple, list, set)) and
- not util.callable(on))):
- raise exc.ArgumentError(
- "Expected the name of a database dialect, a tuple "
- "of names, or a callable for "
- "'on' criteria, got type '%s'." % type(on).__name__)
-
- def bind(self):
- if self._bind:
- return self._bind
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
-
-class DDL(DDLElement):
- """A literal DDL statement.
-
- Specifies literal SQL DDL to be executed by the database. DDL objects
- function as DDL event listeners, and can be subscribed to those events
- listed in :class:`.DDLEvents`, using either :class:`.Table` or
- :class:`.MetaData` objects as targets. Basic templating support allows
- a single DDL instance to handle repetitive tasks for multiple tables.
-
- Examples::
-
- from sqlalchemy import event, DDL
-
- tbl = Table('users', metadata, Column('uid', Integer))
- event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
-
- spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
- event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
-
- drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
- connection.execute(drop_spow)
-
- When operating on Table events, the following ``statement``
- string substitions are available::
-
- %(table)s - the Table name, with any required quoting applied
- %(schema)s - the schema name, with any required quoting applied
- %(fullname)s - the Table name including schema, quoted if needed
-
- The DDL's "context", if any, will be combined with the standard
- substutions noted above. Keys present in the context will override
- the standard substitutions.
-
- """
-
- __visit_name__ = "ddl"
-
- def __init__(self, statement, on=None, context=None, bind=None):
- """Create a DDL statement.
-
- :param statement:
- A string or unicode string to be executed. Statements will be
- processed with Python's string formatting operator. See the
- ``context`` argument and the ``execute_at`` method.
-
- A literal '%' in a statement must be escaped as '%%'.
-
- SQL bind parameters are not available in DDL statements.
-
- :param on:
- .. deprecated:: 0.7
- See :meth:`.DDLElement.execute_if`.
-
- Optional filtering criteria. May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something', on='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something', on=('postgresql', 'mysql'))
-
- If a callable, it will be invoked with four positional arguments
- as well as optional keyword arguments:
-
- :ddl:
- This DDL element.
-
- :event:
- The name of the event that has triggered this DDL, such as
- 'after-create' Will be None if the DDL is executed explicitly.
-
- :target:
- The ``Table`` or ``MetaData`` object which is the target of
- this event. May be None if the DDL is executed explicitly.
-
- :connection:
- The ``Connection`` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param context:
- Optional dictionary, defaults to None. These values will be
- available for use in string substitutions on the DDL statement.
-
- :param bind:
- Optional. A :class:`.Connectable`, used by
- default when ``execute()`` is invoked without a bind argument.
-
-
- See also:
-
- :class:`.DDLEvents`
- :mod:`sqlalchemy.event`
-
- """
-
- if not isinstance(statement, util.string_types):
- raise exc.ArgumentError(
- "Expected a string or unicode SQL statement, got '%r'" %
- statement)
-
- self.statement = statement
- self.context = context or {}
-
- self._check_ddl_on(on)
- self.on = on
- self._bind = bind
-
- def __repr__(self):
- return '<%s@%s; %s>' % (
- type(self).__name__, id(self),
- ', '.join([repr(self.statement)] +
- ['%s=%r' % (key, getattr(self, key))
- for key in ('on', 'context')
- if getattr(self, key)]))
-
-
-def _to_schema_column(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, Column):
- raise exc.ArgumentError("schema.Column object expected")
- return element
-
-
-def _to_schema_column_or_string(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, util.string_types + (expression.ColumnElement, )):
- msg = "Element %r is not a string name or column element"
- raise exc.ArgumentError(msg % element)
- return element
-
-
-class _CreateDropBase(DDLElement):
- """Base class for DDL constucts that represent CREATE and DROP or
- equivalents.
-
- The common theme of _CreateDropBase is a single
- ``element`` attribute which refers to the element
- to be created or dropped.
-
- """
-
- def __init__(self, element, on=None, bind=None):
- self.element = element
- self._check_ddl_on(on)
- self.on = on
- self.bind = bind
-
- def _create_rule_disable(self, compiler):
- """Allow disable of _create_rule using a callable.
-
- Pass to _create_rule using
- util.portable_instancemethod(self._create_rule_disable)
- to retain serializability.
-
- """
- return False
-
-
-class CreateSchema(_CreateDropBase):
- """Represent a CREATE SCHEMA statement.
-
- .. versionadded:: 0.7.4
-
- The argument here is the string name of the schema.
-
- """
-
- __visit_name__ = "create_schema"
-
- def __init__(self, name, quote=None, **kw):
- """Create a new :class:`.CreateSchema` construct."""
-
- self.quote = quote
- super(CreateSchema, self).__init__(name, **kw)
-
-
-class DropSchema(_CreateDropBase):
- """Represent a DROP SCHEMA statement.
-
- The argument here is the string name of the schema.
-
- .. versionadded:: 0.7.4
-
- """
-
- __visit_name__ = "drop_schema"
-
- def __init__(self, name, quote=None, cascade=False, **kw):
- """Create a new :class:`.DropSchema` construct."""
-
- self.quote = quote
- self.cascade = cascade
- super(DropSchema, self).__init__(name, **kw)
-
-
-class CreateTable(_CreateDropBase):
- """Represent a CREATE TABLE statement."""
-
- __visit_name__ = "create_table"
-
- def __init__(self, element, on=None, bind=None):
- """Create a :class:`.CreateTable` construct.
-
- :param element: a :class:`.Table` that's the subject
- of the CREATE
- :param on: See the description for 'on' in :class:`.DDL`.
- :param bind: See the description for 'bind' in :class:`.DDL`.
-
- """
- super(CreateTable, self).__init__(element, on=on, bind=bind)
- self.columns = [CreateColumn(column)
- for column in element.columns
- ]
-
-
-class _DropView(_CreateDropBase):
- """Semi-public 'DROP VIEW' construct.
-
- Used by the test suite for dialect-agnostic drops of views.
- This object will eventually be part of a public "view" API.
-
- """
- __visit_name__ = "drop_view"
-
-
-class CreateColumn(_DDLCompiles):
- """Represent a :class:`.Column` as rendered in a CREATE TABLE statement,
- via the :class:`.CreateTable` construct.
-
- This is provided to support custom column DDL within the generation
- of CREATE TABLE statements, by using the
- compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
- to extend :class:`.CreateColumn`.
-
- Typical integration is to examine the incoming :class:`.Column`
- object, and to redirect compilation if a particular flag or condition
- is found::
-
- from sqlalchemy import schema
- from sqlalchemy.ext.compiler import compiles
-
- @compiles(schema.CreateColumn)
- def compile(element, compiler, **kw):
- column = element.element
-
- if "special" not in column.info:
- return compiler.visit_create_column(element, **kw)
-
- text = "%s SPECIAL DIRECTIVE %s" % (
- column.name,
- compiler.type_compiler.process(column.type)
- )
- default = compiler.get_column_default_string(column)
- if default is not None:
- text += " DEFAULT " + default
-
- if not column.nullable:
- text += " NOT NULL"
-
- if column.constraints:
- text += " ".join(
- compiler.process(const)
- for const in column.constraints)
- return text
-
- The above construct can be applied to a :class:`.Table` as follows::
-
- from sqlalchemy import Table, Metadata, Column, Integer, String
- from sqlalchemy import schema
-
- metadata = MetaData()
-
- table = Table('mytable', MetaData(),
- Column('x', Integer, info={"special":True}, primary_key=True),
- Column('y', String(50)),
- Column('z', String(20), info={"special":True})
- )
-
- metadata.create_all(conn)
-
- Above, the directives we've added to the :attr:`.Column.info` collection
- will be detected by our custom compilation scheme::
-
- CREATE TABLE mytable (
- x SPECIAL DIRECTIVE INTEGER NOT NULL,
- y VARCHAR(50),
- z SPECIAL DIRECTIVE VARCHAR(20),
- PRIMARY KEY (x)
- )
-
- .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added
- to support custom column creation styles.
-
- """
- __visit_name__ = 'create_column'
-
- def __init__(self, element):
- self.element = element
-
-
-class DropTable(_CreateDropBase):
- """Represent a DROP TABLE statement."""
-
- __visit_name__ = "drop_table"
-
-
-class CreateSequence(_CreateDropBase):
- """Represent a CREATE SEQUENCE statement."""
-
- __visit_name__ = "create_sequence"
-
-
-class DropSequence(_CreateDropBase):
- """Represent a DROP SEQUENCE statement."""
-
- __visit_name__ = "drop_sequence"
-
-
-class CreateIndex(_CreateDropBase):
- """Represent a CREATE INDEX statement."""
-
- __visit_name__ = "create_index"
-
-
-class DropIndex(_CreateDropBase):
- """Represent a DROP INDEX statement."""
-
- __visit_name__ = "drop_index"
-
-
-class AddConstraint(_CreateDropBase):
- """Represent an ALTER TABLE ADD CONSTRAINT statement."""
-
- __visit_name__ = "add_constraint"
-
- def __init__(self, element, *args, **kw):
- super(AddConstraint, self).__init__(element, *args, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-
-class DropConstraint(_CreateDropBase):
- """Represent an ALTER TABLE DROP CONSTRAINT statement."""
-
- __visit_name__ = "drop_constraint"
-
- def __init__(self, element, cascade=False, **kw):
- self.cascade = cascade
- super(DropConstraint, self).__init__(element, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-
-def _bind_or_error(schemaitem, msg=None):
- bind = schemaitem.bind
- if not bind:
- name = schemaitem.__class__.__name__
- label = getattr(schemaitem, 'fullname',
- getattr(schemaitem, 'name', None))
- if label:
- item = '%s %r' % (name, label)
- else:
- item = name
- if isinstance(schemaitem, (MetaData, DDL)):
- bindable = "the %s's .bind" % name
- else:
- bindable = "this %s's .metadata.bind" % name
-
- if msg is None:
- msg = "The %s is not bound to an Engine or Connection. "\
- "Execution can not proceed without a database to execute "\
- "against. Either execute with an explicit connection or "\
- "assign %s to enable implicit execution." % \
- (item, bindable)
- raise exc.UnboundExecutionError(msg)
- return bind
+from .sql.base import (
+ SchemaVisitor
+ )
+
+
+from .sql.schema import (
+ CheckConstraint,
+ Column,
+ ColumnDefault,
+ Constraint,
+ DefaultClause,
+ DefaultGenerator,
+ FetchedValue,
+ ForeignKey,
+ ForeignKeyConstraint,
+ Index,
+ MetaData,
+ PassiveDefault,
+ PrimaryKeyConstraint,
+ SchemaItem,
+ Sequence,
+ Table,
+ ThreadLocalMetaData,
+ UniqueConstraint,
+ _get_table_key,
+ ColumnCollectionConstraint,
+ )
+
+
+from .sql.ddl import (
+ DDL,
+ CreateTable,
+ DropTable,
+ CreateSequence,
+ DropSequence,
+ CreateIndex,
+ DropIndex,
+ CreateSchema,
+ DropSchema,
+ _DropView,
+ CreateColumn,
+ AddConstraint,
+ DropConstraint,
+ DDLBase,
+ DDLElement,
+ _CreateDropBase,
+ _DDLCompiles
+)
diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py
index 9700f26a0..9ed6049af 100644
--- a/lib/sqlalchemy/sql/__init__.py
+++ b/lib/sqlalchemy/sql/__init__.py
@@ -1,5 +1,5 @@
# sql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -35,6 +35,7 @@ from .expression import (
exists,
extract,
false,
+ False_,
func,
insert,
intersect,
@@ -55,6 +56,7 @@ from .expression import (
table,
text,
true,
+ True_,
tuple_,
type_coerce,
union,
@@ -64,5 +66,24 @@ from .expression import (
from .visitors import ClauseVisitor
-__tmp = list(locals().keys())
-__all__ = sorted([i for i in __tmp if not i.startswith('__')])
+
+def __go(lcls):
+ global __all__
+ from .. import util as _sa_util
+
+ import inspect as _inspect
+
+ __all__ = sorted(name for name, obj in lcls.items()
+ if not (name.startswith('_') or _inspect.ismodule(obj)))
+
+ from .annotation import _prepare_annotations, Annotated
+ from .elements import AnnotatedColumnElement, ClauseList
+ from .selectable import AnnotatedFromClause
+ _prepare_annotations(ColumnElement, AnnotatedColumnElement)
+ _prepare_annotations(FromClause, AnnotatedFromClause)
+ _prepare_annotations(ClauseList, Annotated)
+
+ _sa_util.dependencies.resolve_all("sqlalchemy.sql")
+
+__go(locals())
+
diff --git a/lib/sqlalchemy/sql/annotation.py b/lib/sqlalchemy/sql/annotation.py
new file mode 100644
index 000000000..b5b7849d2
--- /dev/null
+++ b/lib/sqlalchemy/sql/annotation.py
@@ -0,0 +1,182 @@
+# sql/annotation.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`.Annotated` class and related routines; creates hash-equivalent
+copies of SQL constructs which contain context-specific markers and associations.
+
+"""
+
+from .. import util
+from . import operators
+
+class Annotated(object):
+ """clones a ClauseElement and applies an 'annotations' dictionary.
+
+ Unlike regular clones, this clone also mimics __hash__() and
+ __cmp__() of the original element so that it takes its place
+ in hashed collections.
+
+ A reference to the original element is maintained, for the important
+ reason of keeping its hash value current. When GC'ed, the
+ hash value may be reused, causing conflicts.
+
+ """
+
+ def __new__(cls, *args):
+ if not args:
+ # clone constructor
+ return object.__new__(cls)
+ else:
+ element, values = args
+ # pull appropriate subclass from registry of annotated
+ # classes
+ try:
+ cls = annotated_classes[element.__class__]
+ except KeyError:
+ cls = _new_annotation_type(element.__class__, cls)
+ return object.__new__(cls)
+
+ def __init__(self, element, values):
+ self.__dict__ = element.__dict__.copy()
+ self.__element = element
+ self._annotations = values
+
+ def _annotate(self, values):
+ _values = self._annotations.copy()
+ _values.update(values)
+ return self._with_annotations(_values)
+
+ def _with_annotations(self, values):
+ clone = self.__class__.__new__(self.__class__)
+ clone.__dict__ = self.__dict__.copy()
+ clone._annotations = values
+ return clone
+
+ def _deannotate(self, values=None, clone=True):
+ if values is None:
+ return self.__element
+ else:
+ _values = self._annotations.copy()
+ for v in values:
+ _values.pop(v, None)
+ return self._with_annotations(_values)
+
+ def _compiler_dispatch(self, visitor, **kw):
+ return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
+
+ @property
+ def _constructor(self):
+ return self.__element._constructor
+
+ def _clone(self):
+ clone = self.__element._clone()
+ if clone is self.__element:
+ # detect immutable, don't change anything
+ return self
+ else:
+ # update the clone with any changes that have occurred
+ # to this object's __dict__.
+ clone.__dict__.update(self.__dict__)
+ return self.__class__(clone, self._annotations)
+
+ def __hash__(self):
+ return hash(self.__element)
+
+ def __eq__(self, other):
+ if isinstance(self.__element, operators.ColumnOperators):
+ return self.__element.__class__.__eq__(self, other)
+ else:
+ return hash(other) == hash(self)
+
+
+
+# hard-generate Annotated subclasses. this technique
+# is used instead of on-the-fly types (i.e. type.__new__())
+# so that the resulting objects are pickleable.
+annotated_classes = {}
+
+
+
+def _deep_annotate(element, annotations, exclude=None):
+ """Deep copy the given ClauseElement, annotating each element
+ with the given annotations dictionary.
+
+ Elements within the exclude collection will be cloned but not annotated.
+
+ """
+ def clone(elem):
+ if exclude and \
+ hasattr(elem, 'proxy_set') and \
+ elem.proxy_set.intersection(exclude):
+ newelem = elem._clone()
+ elif annotations != elem._annotations:
+ newelem = elem._annotate(annotations)
+ else:
+ newelem = elem
+ newelem._copy_internals(clone=clone)
+ return newelem
+
+ if element is not None:
+ element = clone(element)
+ return element
+
+
+def _deep_deannotate(element, values=None):
+ """Deep copy the given element, removing annotations."""
+
+ cloned = util.column_dict()
+
+ def clone(elem):
+ # if a values dict is given,
+ # the elem must be cloned each time it appears,
+ # as there may be different annotations in source
+ # elements that are remaining. if totally
+ # removing all annotations, can assume the same
+ # slate...
+ if values or elem not in cloned:
+ newelem = elem._deannotate(values=values, clone=True)
+ newelem._copy_internals(clone=clone)
+ if not values:
+ cloned[elem] = newelem
+ return newelem
+ else:
+ return cloned[elem]
+
+ if element is not None:
+ element = clone(element)
+ return element
+
+
+def _shallow_annotate(element, annotations):
+ """Annotate the given ClauseElement and copy its internals so that
+ internal objects refer to the new annotated object.
+
+ Basically used to apply a "dont traverse" annotation to a
+ selectable, without digging throughout the whole
+ structure wasting time.
+ """
+ element = element._annotate(annotations)
+ element._copy_internals()
+ return element
+
+def _new_annotation_type(cls, base_cls):
+ if issubclass(cls, Annotated):
+ return cls
+ elif cls in annotated_classes:
+ return annotated_classes[cls]
+ annotated_classes[cls] = anno_cls = type(
+ "Annotated%s" % cls.__name__,
+ (base_cls, cls), {})
+ globals()["Annotated%s" % cls.__name__] = anno_cls
+ return anno_cls
+
+def _prepare_annotations(target_hierarchy, base_cls):
+ stack = [target_hierarchy]
+ while stack:
+ cls = stack.pop()
+ stack.extend(cls.__subclasses__())
+
+ _new_annotation_type(cls, base_cls)
diff --git a/lib/sqlalchemy/sql/base.py b/lib/sqlalchemy/sql/base.py
new file mode 100644
index 000000000..4a7dd65d3
--- /dev/null
+++ b/lib/sqlalchemy/sql/base.py
@@ -0,0 +1,460 @@
+# sql/base.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Foundational utilities common to many sql modules.
+
+"""
+
+
+from .. import util, exc
+import itertools
+from .visitors import ClauseVisitor
+import re
+import collections
+
+PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
+NO_ARG = util.symbol('NO_ARG')
+
+class Immutable(object):
+ """mark a ClauseElement as 'immutable' when expressions are cloned."""
+
+ def unique_params(self, *optionaldict, **kwargs):
+ raise NotImplementedError("Immutable objects do not support copying")
+
+ def params(self, *optionaldict, **kwargs):
+ raise NotImplementedError("Immutable objects do not support copying")
+
+ def _clone(self):
+ return self
+
+
+
+def _from_objects(*elements):
+ return itertools.chain(*[element._from_objects for element in elements])
+
+@util.decorator
+def _generative(fn, *args, **kw):
+ """Mark a method as generative."""
+
+ self = args[0]._generate()
+ fn(self, *args[1:], **kw)
+ return self
+
+
+class DialectKWArgs(object):
+ """Establish the ability for a class to have dialect-specific arguments
+ with defaults and validation.
+
+ """
+
+ @util.memoized_property
+ def dialect_kwargs(self):
+ """A collection of keyword arguments specified as dialect-specific
+ options to this construct.
+
+ The arguments are present here in their original ``<dialect>_<kwarg>``
+ format. Only arguments that were actually passed are included;
+ unlike the :attr:`.DialectKWArgs.dialect_options` collection, which
+ contains all options known by this dialect including defaults.
+
+ .. versionadded:: 0.9.2
+
+ .. seealso::
+
+ :attr:`.DialectKWArgs.dialect_options` - nested dictionary form
+
+ """
+
+ return util.immutabledict()
+
+ @property
+ def kwargs(self):
+ """Deprecated; see :attr:`.DialectKWArgs.dialect_kwargs"""
+ return self.dialect_kwargs
+
+ @util.dependencies("sqlalchemy.dialects")
+ def _kw_reg_for_dialect(dialects, dialect_name):
+ dialect_cls = dialects.registry.load(dialect_name)
+ if dialect_cls.construct_arguments is None:
+ return None
+ return dict(dialect_cls.construct_arguments)
+ _kw_registry = util.PopulateDict(_kw_reg_for_dialect)
+
+ def _kw_reg_for_dialect_cls(self, dialect_name):
+ construct_arg_dictionary = DialectKWArgs._kw_registry[dialect_name]
+ if construct_arg_dictionary is None:
+ return {"*": None}
+ else:
+ d = {}
+ for cls in reversed(self.__class__.__mro__):
+ if cls in construct_arg_dictionary:
+ d.update(construct_arg_dictionary[cls])
+ return d
+
+ @util.memoized_property
+ def dialect_options(self):
+ """A collection of keyword arguments specified as dialect-specific
+ options to this construct.
+
+ This is a two-level nested registry, keyed to ``<dialect_name>``
+ and ``<argument_name>``. For example, the ``postgresql_where`` argument
+ would be locatable as::
+
+ arg = my_object.dialect_options['postgresql']['where']
+
+ .. versionadded:: 0.9.2
+
+ .. seealso::
+
+ :attr:`.DialectKWArgs.dialect_kwargs` - flat dictionary form
+
+ """
+
+ return util.PopulateDict(
+ util.portable_instancemethod(self._kw_reg_for_dialect_cls)
+ )
+
+ def _validate_dialect_kwargs(self, kwargs):
+ # validate remaining kwargs that they all specify DB prefixes
+
+ if not kwargs:
+ return
+
+ self.dialect_kwargs = self.dialect_kwargs.union(kwargs)
+
+ for k in kwargs:
+ m = re.match('^(.+?)_(.+)$', k)
+ if m is None:
+ raise TypeError("Additional arguments should be "
+ "named <dialectname>_<argument>, got '%s'" % k)
+ dialect_name, arg_name = m.group(1, 2)
+
+ try:
+ construct_arg_dictionary = self.dialect_options[dialect_name]
+ except exc.NoSuchModuleError:
+ util.warn(
+ "Can't validate argument %r; can't "
+ "locate any SQLAlchemy dialect named %r" %
+ (k, dialect_name))
+ self.dialect_options[dialect_name] = {
+ "*": None,
+ arg_name: kwargs[k]}
+ else:
+ if "*" not in construct_arg_dictionary and \
+ arg_name not in construct_arg_dictionary:
+ raise exc.ArgumentError(
+ "Argument %r is not accepted by "
+ "dialect %r on behalf of %r" % (
+ k,
+ dialect_name, self.__class__
+ ))
+ else:
+ construct_arg_dictionary[arg_name] = kwargs[k]
+
+
+class Generative(object):
+ """Allow a ClauseElement to generate itself via the
+ @_generative decorator.
+
+ """
+
+ def _generate(self):
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ return s
+
+
+class Executable(Generative):
+ """Mark a ClauseElement as supporting execution.
+
+ :class:`.Executable` is a superclass for all "statement" types
+ of objects, including :func:`select`, :func:`delete`, :func:`update`,
+ :func:`insert`, :func:`text`.
+
+ """
+
+ supports_execution = True
+ _execution_options = util.immutabledict()
+ _bind = None
+
+ @_generative
+ def execution_options(self, **kw):
+ """ Set non-SQL options for the statement which take effect during
+ execution.
+
+ Execution options can be set on a per-statement or
+ per :class:`.Connection` basis. Additionally, the
+ :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
+ access to execution options which they in turn configure upon
+ connections.
+
+ The :meth:`execution_options` method is generative. A new
+ instance of this statement is returned that contains the options::
+
+ statement = select([table.c.x, table.c.y])
+ statement = statement.execution_options(autocommit=True)
+
+ Note that only a subset of possible execution options can be applied
+ to a statement - these include "autocommit" and "stream_results",
+ but not "isolation_level" or "compiled_cache".
+ See :meth:`.Connection.execution_options` for a full list of
+ possible options.
+
+ .. seealso::
+
+ :meth:`.Connection.execution_options()`
+
+ :meth:`.Query.execution_options()`
+
+ """
+ if 'isolation_level' in kw:
+ raise exc.ArgumentError(
+ "'isolation_level' execution option may only be specified "
+ "on Connection.execution_options(), or "
+ "per-engine using the isolation_level "
+ "argument to create_engine()."
+ )
+ if 'compiled_cache' in kw:
+ raise exc.ArgumentError(
+ "'compiled_cache' execution option may only be specified "
+ "on Connection.execution_options(), not per statement."
+ )
+ self._execution_options = self._execution_options.union(kw)
+
+ def execute(self, *multiparams, **params):
+ """Compile and execute this :class:`.Executable`."""
+ e = self.bind
+ if e is None:
+ label = getattr(self, 'description', self.__class__.__name__)
+ msg = ('This %s is not directly bound to a Connection or Engine.'
+ 'Use the .execute() method of a Connection or Engine '
+ 'to execute this construct.' % label)
+ raise exc.UnboundExecutionError(msg)
+ return e._execute_clauseelement(self, multiparams, params)
+
+ def scalar(self, *multiparams, **params):
+ """Compile and execute this :class:`.Executable`, returning the
+ result's scalar representation.
+
+ """
+ return self.execute(*multiparams, **params).scalar()
+
+ @property
+ def bind(self):
+ """Returns the :class:`.Engine` or :class:`.Connection` to
+ which this :class:`.Executable` is bound, or None if none found.
+
+ This is a traversal which checks locally, then
+ checks among the "from" clauses of associated objects
+ until a bound engine or connection is found.
+
+ """
+ if self._bind is not None:
+ return self._bind
+
+ for f in _from_objects(self):
+ if f is self:
+ continue
+ engine = f.bind
+ if engine is not None:
+ return engine
+ else:
+ return None
+
+
+class SchemaEventTarget(object):
+ """Base class for elements that are the targets of :class:`.DDLEvents`
+ events.
+
+ This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
+
+ """
+
+ def _set_parent(self, parent):
+ """Associate with this SchemaEvent's parent object."""
+
+ raise NotImplementedError()
+
+ def _set_parent_with_dispatch(self, parent):
+ self.dispatch.before_parent_attach(self, parent)
+ self._set_parent(parent)
+ self.dispatch.after_parent_attach(self, parent)
+
+class SchemaVisitor(ClauseVisitor):
+ """Define the visiting for ``SchemaItem`` objects."""
+
+ __traverse_options__ = {'schema_visitor': True}
+
+class ColumnCollection(util.OrderedProperties):
+ """An ordered dictionary that stores a list of ColumnElement
+ instances.
+
+ Overrides the ``__eq__()`` method to produce SQL clauses between
+ sets of correlated columns.
+
+ """
+
+ def __init__(self, *cols):
+ super(ColumnCollection, self).__init__()
+ self._data.update((c.key, c) for c in cols)
+ self.__dict__['_all_cols'] = util.column_set(self)
+
+ def __str__(self):
+ return repr([str(c) for c in self])
+
+ def replace(self, column):
+ """add the given column to this collection, removing unaliased
+ versions of this column as well as existing columns with the
+ same key.
+
+ e.g.::
+
+ t = Table('sometable', metadata, Column('col1', Integer))
+ t.columns.replace(Column('col1', Integer, key='columnone'))
+
+ will remove the original 'col1' from the collection, and add
+ the new column under the name 'columnname'.
+
+ Used by schema.Column to override columns during table reflection.
+
+ """
+ if column.name in self and column.key != column.name:
+ other = self[column.name]
+ if other.name == other.key:
+ del self._data[other.name]
+ self._all_cols.remove(other)
+ if column.key in self._data:
+ self._all_cols.remove(self._data[column.key])
+ self._all_cols.add(column)
+ self._data[column.key] = column
+
+ def add(self, column):
+ """Add a column to this collection.
+
+ The key attribute of the column will be used as the hash key
+ for this dictionary.
+
+ """
+ self[column.key] = column
+
+ def __delitem__(self, key):
+ raise NotImplementedError()
+
+ def __setattr__(self, key, object):
+ raise NotImplementedError()
+
+ def __setitem__(self, key, value):
+ if key in self:
+
+ # this warning is primarily to catch select() statements
+ # which have conflicting column names in their exported
+ # columns collection
+
+ existing = self[key]
+ if not existing.shares_lineage(value):
+ util.warn('Column %r on table %r being replaced by '
+ '%r, which has the same key. Consider '
+ 'use_labels for select() statements.' % (key,
+ getattr(existing, 'table', None), value))
+ self._all_cols.remove(existing)
+ # pop out memoized proxy_set as this
+ # operation may very well be occurring
+ # in a _make_proxy operation
+ util.memoized_property.reset(value, "proxy_set")
+ self._all_cols.add(value)
+ self._data[key] = value
+
+ def clear(self):
+ self._data.clear()
+ self._all_cols.clear()
+
+ def remove(self, column):
+ del self._data[column.key]
+ self._all_cols.remove(column)
+
+ def update(self, value):
+ self._data.update(value)
+ self._all_cols.clear()
+ self._all_cols.update(self._data.values())
+
+ def extend(self, iter):
+ self.update((c.key, c) for c in iter)
+
+ __hash__ = None
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def __eq__(self, elements, other):
+ l = []
+ for c in other:
+ for local in self:
+ if c.shares_lineage(local):
+ l.append(c == local)
+ return elements.and_(*l)
+
+ def __contains__(self, other):
+ if not isinstance(other, util.string_types):
+ raise exc.ArgumentError("__contains__ requires a string argument")
+ return util.OrderedProperties.__contains__(self, other)
+
+ def __setstate__(self, state):
+ self.__dict__['_data'] = state['_data']
+ self.__dict__['_all_cols'] = util.column_set(self._data.values())
+
+ def contains_column(self, col):
+ # this has to be done via set() membership
+ return col in self._all_cols
+
+ def as_immutable(self):
+ return ImmutableColumnCollection(self._data, self._all_cols)
+
+
+class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
+ def __init__(self, data, colset):
+ util.ImmutableProperties.__init__(self, data)
+ self.__dict__['_all_cols'] = colset
+
+ extend = remove = util.ImmutableProperties._immutable
+
+
+class ColumnSet(util.ordered_column_set):
+ def contains_column(self, col):
+ return col in self
+
+ def extend(self, cols):
+ for col in cols:
+ self.add(col)
+
+ def __add__(self, other):
+ return list(self) + list(other)
+
+ @util.dependencies("sqlalchemy.sql.elements")
+ def __eq__(self, elements, other):
+ l = []
+ for c in other:
+ for local in self:
+ if c.shares_lineage(local):
+ l.append(c == local)
+ return elements.and_(*l)
+
+ def __hash__(self):
+ return hash(tuple(x for x in self))
+
+def _bind_or_error(schemaitem, msg=None):
+ bind = schemaitem.bind
+ if not bind:
+ name = schemaitem.__class__.__name__
+ label = getattr(schemaitem, 'fullname',
+ getattr(schemaitem, 'name', None))
+ if label:
+ item = '%s object %r' % (name, label)
+ else:
+ item = '%s object' % name
+ if msg is None:
+ msg = "%s is not bound to an Engine or Connection. "\
+ "Execution can not proceed without a database to execute "\
+ "against." % item
+ raise exc.UnboundExecutionError(msg)
+ return bind
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index a5f545de9..4448f7c7b 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -1,5 +1,5 @@
# sql/compiler.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -23,13 +23,12 @@ To generate user-defined SQL strings, see
"""
import re
-import sys
-from .. import schema, engine, util, exc, types
-from . import (
- operators, functions, util as sql_util, visitors, expression as sql
-)
+from . import schema, sqltypes, operators, functions, \
+ util as sql_util, visitors, elements, selectable, base
+from .. import util, exc
import decimal
import itertools
+import operator
RESERVED_WORDS = set([
'all', 'analyse', 'analyze', 'and', 'any', 'array',
@@ -115,6 +114,7 @@ OPERATORS = {
operators.asc_op: ' ASC',
operators.nullsfirst_op: ' NULLS FIRST',
operators.nullslast_op: ' NULLS LAST',
+
}
FUNCTIONS = {
@@ -150,14 +150,122 @@ EXTRACT_MAP = {
}
COMPOUND_KEYWORDS = {
- sql.CompoundSelect.UNION: 'UNION',
- sql.CompoundSelect.UNION_ALL: 'UNION ALL',
- sql.CompoundSelect.EXCEPT: 'EXCEPT',
- sql.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL',
- sql.CompoundSelect.INTERSECT: 'INTERSECT',
- sql.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL'
+ selectable.CompoundSelect.UNION: 'UNION',
+ selectable.CompoundSelect.UNION_ALL: 'UNION ALL',
+ selectable.CompoundSelect.EXCEPT: 'EXCEPT',
+ selectable.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL',
+ selectable.CompoundSelect.INTERSECT: 'INTERSECT',
+ selectable.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL'
}
+class Compiled(object):
+ """Represent a compiled SQL or DDL expression.
+
+ The ``__str__`` method of the ``Compiled`` object should produce
+ the actual text of the statement. ``Compiled`` objects are
+ specific to their underlying database dialect, and also may
+ or may not be specific to the columns referenced within a
+ particular set of bind parameters. In no case should the
+ ``Compiled`` object be dependent on the actual values of those
+ bind parameters, even though it may reference those values as
+ defaults.
+ """
+
+ def __init__(self, dialect, statement, bind=None,
+ compile_kwargs=util.immutabledict()):
+ """Construct a new ``Compiled`` object.
+
+ :param dialect: ``Dialect`` to compile against.
+
+ :param statement: ``ClauseElement`` to be compiled.
+
+ :param bind: Optional Engine or Connection to compile this
+ statement against.
+
+ :param compile_kwargs: additional kwargs that will be
+ passed to the initial call to :meth:`.Compiled.process`.
+
+ .. versionadded:: 0.8
+
+ """
+
+ self.dialect = dialect
+ self.bind = bind
+ if statement is not None:
+ self.statement = statement
+ self.can_execute = statement.supports_execution
+ self.string = self.process(self.statement, **compile_kwargs)
+
+ @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
+ "within the constructor.")
+ def compile(self):
+ """Produce the internal string representation of this element.
+ """
+ pass
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_compiled(self, multiparams, params)
+
+ @property
+ def sql_compiler(self):
+ """Return a Compiled that is capable of processing SQL expressions.
+
+ If this compiler is one, it would likely just return 'self'.
+
+ """
+
+ raise NotImplementedError()
+
+ def process(self, obj, **kwargs):
+ return obj._compiler_dispatch(self, **kwargs)
+
+ def __str__(self):
+ """Return the string text of the generated SQL or DDL."""
+
+ return self.string or ''
+
+ def construct_params(self, params=None):
+ """Return the bind params for this compiled object.
+
+ :param params: a dict of string/object pairs whose values will
+ override bind values compiled in to the
+ statement.
+ """
+
+ raise NotImplementedError()
+
+ @property
+ def params(self):
+ """Return the bind params for this compiled object."""
+ return self.construct_params()
+
+ def execute(self, *multiparams, **params):
+ """Execute this compiled object."""
+
+ e = self.bind
+ if e is None:
+ raise exc.UnboundExecutionError(
+ "This Compiled object is not bound to any Engine "
+ "or Connection.")
+ return e._execute_compiled(self, multiparams, params)
+
+ def scalar(self, *multiparams, **params):
+ """Execute this compiled object and return the result's
+ scalar value."""
+
+ return self.execute(*multiparams, **params).scalar()
+
+
+class TypeCompiler(object):
+ """Produces DDL specification for TypeEngine objects."""
+
+ def __init__(self, dialect):
+ self.dialect = dialect
+
+ def process(self, type_):
+ return type_._compiler_dispatch(self)
+
+
class _CompileLabel(visitors.Visitable):
"""lightweight label object which acts as an expression.Label."""
@@ -178,12 +286,8 @@ class _CompileLabel(visitors.Visitable):
def type(self):
return self.element.type
- @property
- def quote(self):
- return self.element.quote
-
-class SQLCompiler(engine.Compiled):
+class SQLCompiler(Compiled):
"""Default implementation of Compiled.
Compiles ClauseElements into SQL strings. Uses a similar visit
@@ -284,7 +388,7 @@ class SQLCompiler(engine.Compiled):
# a map which tracks "truncated" names based on
# dialect.label_length or dialect.max_identifier_length
self.truncated_names = {}
- engine.Compiled.__init__(self, dialect, statement, **kwargs)
+ Compiled.__init__(self, dialect, statement, **kwargs)
if self.positional and dialect.paramstyle == 'numeric':
self._apply_numbered_params()
@@ -397,7 +501,7 @@ class SQLCompiler(engine.Compiled):
render_label_only = render_label_as_label is label
if render_label_only or render_label_with_as:
- if isinstance(label.name, sql._truncated_label):
+ if isinstance(label.name, elements._truncated_label):
labelname = self._truncated_identifier("colident", label.name)
else:
labelname = label.name
@@ -432,7 +536,7 @@ class SQLCompiler(engine.Compiled):
"its 'name' is assigned.")
is_literal = column.is_literal
- if not is_literal and isinstance(name, sql._truncated_label):
+ if not is_literal and isinstance(name, elements._truncated_label):
name = self._truncated_identifier("colident", name)
if add_to_result_map is not None:
@@ -446,24 +550,22 @@ class SQLCompiler(engine.Compiled):
if is_literal:
name = self.escape_literal_column(name)
else:
- name = self.preparer.quote(name, column.quote)
+ name = self.preparer.quote(name)
table = column.table
if table is None or not include_table or not table.named_with_column:
return name
else:
if table.schema:
- schema_prefix = self.preparer.quote_schema(
- table.schema,
- table.quote_schema) + '.'
+ schema_prefix = self.preparer.quote_schema(table.schema) + '.'
else:
schema_prefix = ''
tablename = table.name
- if isinstance(tablename, sql._truncated_label):
+ if isinstance(tablename, elements._truncated_label):
tablename = self._truncated_identifier("alias", tablename)
return schema_prefix + \
- self.preparer.quote(tablename, table.quote) + \
+ self.preparer.quote(tablename) + \
"." + name
def escape_literal_column(self, text):
@@ -484,20 +586,13 @@ class SQLCompiler(engine.Compiled):
def post_process_text(self, text):
return text
- def visit_textclause(self, textclause, **kwargs):
- if textclause.typemap is not None:
- for colname, type_ in textclause.typemap.items():
- self.result_map[colname
- if self.dialect.case_sensitive
- else colname.lower()] = \
- (colname, None, type_)
-
+ def visit_textclause(self, textclause, **kw):
def do_bindparam(m):
name = m.group(1)
- if name in textclause.bindparams:
- return self.process(textclause.bindparams[name])
+ if name in textclause._bindparams:
+ return self.process(textclause._bindparams[name], **kw)
else:
- return self.bindparam_string(name, **kwargs)
+ return self.bindparam_string(name, **kw)
# un-escape any \:params
return BIND_PARAMS_ESC.sub(lambda m: m.group(1),
@@ -505,14 +600,47 @@ class SQLCompiler(engine.Compiled):
self.post_process_text(textclause.text))
)
+ def visit_text_as_from(self, taf, iswrapper=False,
+ compound_index=0, force_result_map=False,
+ asfrom=False,
+ parens=True, **kw):
+
+ toplevel = not self.stack
+ entry = self._default_stack_entry if toplevel else self.stack[-1]
+
+ populate_result_map = force_result_map or (
+ compound_index == 0 and (
+ toplevel or \
+ entry['iswrapper']
+ )
+ )
+
+ if populate_result_map:
+ for c in taf.c:
+ self._add_to_result_map(
+ c.key, c.key, (c,), c.type
+ )
+
+ text = self.process(taf.element, **kw)
+ if asfrom and parens:
+ text = "(%s)" % text
+ return text
+
+
def visit_null(self, expr, **kw):
return 'NULL'
def visit_true(self, expr, **kw):
- return 'true'
+ if self.dialect.supports_native_boolean:
+ return 'true'
+ else:
+ return "1"
def visit_false(self, expr, **kw):
- return 'false'
+ if self.dialect.supports_native_boolean:
+ return 'false'
+ else:
+ return "0"
def visit_clauselist(self, clauselist, order_by_select=None, **kw):
if order_by_select is not None:
@@ -619,6 +747,7 @@ class SQLCompiler(engine.Compiled):
def function_argspec(self, func, **kwargs):
return func.clause_expr._compiler_dispatch(self, **kwargs)
+
def visit_compound_select(self, cs, asfrom=False,
parens=True, compound_index=0, **kwargs):
toplevel = not self.stack
@@ -684,11 +813,23 @@ class SQLCompiler(engine.Compiled):
raise exc.CompileError(
"Unary expression has no operator or modifier")
+ def visit_istrue_unary_operator(self, element, operator, **kw):
+ if self.dialect.supports_native_boolean:
+ return self.process(element.element, **kw)
+ else:
+ return "%s = 1" % self.process(element.element, **kw)
+
+ def visit_isfalse_unary_operator(self, element, operator, **kw):
+ if self.dialect.supports_native_boolean:
+ return "NOT %s" % self.process(element.element, **kw)
+ else:
+ return "%s = 0" % self.process(element.element, **kw)
+
def visit_binary(self, binary, **kw):
# don't allow "? = ?" to render
if self.ansi_bind_rules and \
- isinstance(binary.left, sql.BindParameter) and \
- isinstance(binary.right, sql.BindParameter):
+ isinstance(binary.left, elements.BindParameter) and \
+ isinstance(binary.right, elements.BindParameter):
kw['literal_binds'] = True
operator = binary.operator
@@ -728,7 +869,7 @@ class SQLCompiler(engine.Compiled):
@util.memoized_property
def _like_percent_literal(self):
- return sql.literal_column("'%'", type_=types.String())
+ return elements.literal_column("'%'", type_=sqltypes.STRINGTYPE)
def visit_contains_op_binary(self, binary, operator, **kw):
binary = binary._clone()
@@ -772,39 +913,49 @@ class SQLCompiler(engine.Compiled):
def visit_like_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
+
+ # TODO: use ternary here, not "and"/ "or"
return '%s LIKE %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notlike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT LIKE %s' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) LIKE lower(%s)' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return 'lower(%s) NOT LIKE lower(%s)' % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_bindparam(self, bindparam, within_columns_clause=False,
literal_binds=False,
@@ -820,8 +971,9 @@ class SQLCompiler(engine.Compiled):
(within_columns_clause and \
self.ansi_bind_rules):
if bindparam.value is None:
- raise exc.CompileError("Bind parameter without a "
- "renderable value not allowed here.")
+ raise exc.CompileError("Bind parameter '%s' without a "
+ "renderable value not allowed here."
+ % bindparam.key)
return self.render_literal_bindparam(bindparam,
within_columns_clause=True, **kwargs)
@@ -851,13 +1003,10 @@ class SQLCompiler(engine.Compiled):
self.binds[bindparam.key] = self.binds[name] = bindparam
- return self.bindparam_string(name, quote=bindparam.quote, **kwargs)
+ return self.bindparam_string(name, **kwargs)
def render_literal_bindparam(self, bindparam, **kw):
value = bindparam.value
- processor = bindparam.type._cached_bind_processor(self.dialect)
- if processor:
- value = processor(value)
return self.render_literal_value(value, bindparam.type)
def render_literal_value(self, value, type_):
@@ -870,15 +1019,10 @@ class SQLCompiler(engine.Compiled):
of the DBAPI.
"""
- if isinstance(value, util.string_types):
- value = value.replace("'", "''")
- return "'%s'" % value
- elif value is None:
- return "NULL"
- elif isinstance(value, (float, ) + util.int_types):
- return repr(value)
- elif isinstance(value, decimal.Decimal):
- return str(value)
+
+ processor = type_._cached_literal_processor(self.dialect)
+ if processor:
+ return processor(value)
else:
raise NotImplementedError(
"Don't know how to literal-quote value %r" % value)
@@ -888,7 +1032,7 @@ class SQLCompiler(engine.Compiled):
return self.bind_names[bindparam]
bind_name = bindparam.key
- if isinstance(bind_name, sql._truncated_label):
+ if isinstance(bind_name, elements._truncated_label):
bind_name = self._truncated_identifier("bindparam", bind_name)
# add to bind_names for translation
@@ -921,8 +1065,7 @@ class SQLCompiler(engine.Compiled):
self.anon_map[derived] = anonymous_counter + 1
return derived + "_" + str(anonymous_counter)
- def bindparam_string(self, name, quote=None,
- positional_names=None, **kw):
+ def bindparam_string(self, name, positional_names=None, **kw):
if self.positional:
if positional_names is not None:
positional_names.append(name)
@@ -937,7 +1080,7 @@ class SQLCompiler(engine.Compiled):
if self.positional:
kwargs['positional_names'] = self.cte_positional
- if isinstance(cte.name, sql._truncated_label):
+ if isinstance(cte.name, elements._truncated_label):
cte_name = self._truncated_identifier("alias", cte.name)
else:
cte_name = cte.name
@@ -947,7 +1090,7 @@ class SQLCompiler(engine.Compiled):
# we've generated a same-named CTE that we are enclosed in,
# or this is the same CTE. just return the name.
if cte in existing_cte._restates or cte is existing_cte:
- return cte_name
+ return self.preparer.format_alias(cte, cte_name)
elif existing_cte in cte._restates:
# we've generated a same-named CTE that is
# enclosed in us - we take precedence, so
@@ -961,19 +1104,24 @@ class SQLCompiler(engine.Compiled):
self.ctes_by_name[cte_name] = cte
- if cte.cte_alias:
- if isinstance(cte.cte_alias, sql._truncated_label):
- cte_alias = self._truncated_identifier("alias", cte.cte_alias)
- else:
- cte_alias = cte.cte_alias
- if not cte.cte_alias and cte not in self.ctes:
+ if cte._cte_alias is not None:
+ orig_cte = cte._cte_alias
+ if orig_cte not in self.ctes:
+ self.visit_cte(orig_cte)
+ cte_alias_name = cte._cte_alias.name
+ if isinstance(cte_alias_name, elements._truncated_label):
+ cte_alias_name = self._truncated_identifier("alias", cte_alias_name)
+ else:
+ orig_cte = cte
+ cte_alias_name = None
+ if not cte_alias_name and cte not in self.ctes:
if cte.recursive:
self.ctes_recursive = True
text = self.preparer.format_alias(cte, cte_name)
if cte.recursive:
- if isinstance(cte.original, sql.Select):
+ if isinstance(cte.original, selectable.Select):
col_source = cte.original
- elif isinstance(cte.original, sql.CompoundSelect):
+ elif isinstance(cte.original, selectable.CompoundSelect):
col_source = cte.original.selects[0]
else:
assert False
@@ -989,9 +1137,10 @@ class SQLCompiler(engine.Compiled):
self, asfrom=True, **kwargs
)
self.ctes[cte] = text
+
if asfrom:
- if cte.cte_alias:
- text = self.preparer.format_alias(cte, cte_alias)
+ if cte_alias_name:
+ text = self.preparer.format_alias(cte, cte_alias_name)
text += " AS " + cte_name
else:
return self.preparer.format_alias(cte, cte_name)
@@ -1001,7 +1150,7 @@ class SQLCompiler(engine.Compiled):
iscrud=False,
fromhints=None, **kwargs):
if asfrom or ashint:
- if isinstance(alias.name, sql._truncated_label):
+ if isinstance(alias.name, elements._truncated_label):
alias_name = self._truncated_identifier("alias", alias.name)
else:
alias_name = alias.name
@@ -1059,7 +1208,7 @@ class SQLCompiler(engine.Compiled):
if not within_columns_clause:
result_expr = col_expr
- elif isinstance(column, sql.Label):
+ elif isinstance(column, elements.Label):
if col_expr is not column:
result_expr = _CompileLabel(
col_expr,
@@ -1078,23 +1227,23 @@ class SQLCompiler(engine.Compiled):
elif \
asfrom and \
- isinstance(column, sql.ColumnClause) and \
+ isinstance(column, elements.ColumnClause) and \
not column.is_literal and \
column.table is not None and \
- not isinstance(column.table, sql.Select):
+ not isinstance(column.table, selectable.Select):
result_expr = _CompileLabel(col_expr,
- sql._as_truncated(column.name),
+ elements._as_truncated(column.name),
alt_names=(column.key,))
elif not isinstance(column,
- (sql.UnaryExpression, sql.TextClause)) \
+ (elements.UnaryExpression, elements.TextClause)) \
and (not hasattr(column, 'name') or \
- isinstance(column, sql.Function)):
+ isinstance(column, functions.Function)):
result_expr = _CompileLabel(col_expr, column.anon_label)
elif col_expr is not column:
# TODO: are we sure "column" has a .name and .key here ?
- # assert isinstance(column, sql.ColumnClause)
+ # assert isinstance(column, elements.ColumnClause)
result_expr = _CompileLabel(col_expr,
- sql._as_truncated(column.name),
+ elements._as_truncated(column.name),
alt_names=(column.key,))
else:
result_expr = col_expr
@@ -1137,8 +1286,8 @@ class SQLCompiler(engine.Compiled):
# as this whole system won't work for custom Join/Select
# subclasses where compilation routines
# call down to compiler.visit_join(), compiler.visit_select()
- join_name = sql.Join.__visit_name__
- select_name = sql.Select.__visit_name__
+ join_name = selectable.Join.__visit_name__
+ select_name = selectable.Select.__visit_name__
def visit(element, **kw):
if element in column_translate[-1]:
@@ -1150,24 +1299,27 @@ class SQLCompiler(engine.Compiled):
newelem = cloned[element] = element._clone()
if newelem.__visit_name__ is join_name and \
- isinstance(newelem.right, sql.FromGrouping):
+ isinstance(newelem.right, selectable.FromGrouping):
newelem._reset_exported()
newelem.left = visit(newelem.left, **kw)
right = visit(newelem.right, **kw)
- selectable = sql.select(
+ selectable_ = selectable.Select(
[right.element],
use_labels=True).alias()
- for c in selectable.c:
- c._label = c._key_label = c.name
+ for c in selectable_.c:
+ c._key_label = c.key
+ c._label = c.name
+
translate_dict = dict(
- zip(right.element.c, selectable.c)
- )
- translate_dict[right.element.left] = selectable
- translate_dict[right.element.right] = selectable
+ zip(newelem.right.element.c, selectable_.c)
+ )
+
+ translate_dict[right.element.left] = selectable_
+ translate_dict[right.element.right] = selectable_
# propagate translations that we've gained
# from nested visit(newelem.right) outwards
@@ -1183,7 +1335,8 @@ class SQLCompiler(engine.Compiled):
column_translate[-1].update(translate_dict)
- newelem.right = selectable
+ newelem.right = selectable_
+
newelem.onclause = visit(newelem.onclause, **kw)
elif newelem.__visit_name__ is select_name:
column_translate.append({})
@@ -1199,6 +1352,7 @@ class SQLCompiler(engine.Compiled):
def _transform_result_map_for_nested_joins(self, select, transformed_select):
inner_col = dict((c._key_label, c) for
c in transformed_select.inner_columns)
+
d = dict(
(inner_col[c._key_label], c)
for c in select.inner_columns
@@ -1291,7 +1445,7 @@ class SQLCompiler(engine.Compiled):
explicit_correlate_froms=correlate_froms,
implicit_correlate_froms=asfrom_froms)
- new_correlate_froms = set(sql._from_objects(*froms))
+ new_correlate_froms = set(selectable._from_objects(*froms))
all_correlate_froms = new_correlate_froms.union(correlate_froms)
new_entry = {
@@ -1382,9 +1536,11 @@ class SQLCompiler(engine.Compiled):
text += self.order_by_clause(select,
order_by_select=order_by_select, **kwargs)
+
if select._limit is not None or select._offset is not None:
text += self.limit_clause(select)
- if select.for_update:
+
+ if select._for_update_arg is not None:
text += self.for_update_clause(select)
if self.ctes and \
@@ -1440,10 +1596,7 @@ class SQLCompiler(engine.Compiled):
return ""
def for_update_clause(self, select):
- if select.for_update:
- return " FOR UPDATE"
- else:
- return ""
+ return " FOR UPDATE"
def returning_clause(self, stmt, returning_cols):
raise exc.CompileError(
@@ -1453,23 +1606,21 @@ class SQLCompiler(engine.Compiled):
def limit_clause(self, select):
text = ""
if select._limit is not None:
- text += "\n LIMIT " + self.process(sql.literal(select._limit))
+ text += "\n LIMIT " + self.process(elements.literal(select._limit))
if select._offset is not None:
if select._limit is None:
text += "\n LIMIT -1"
- text += " OFFSET " + self.process(sql.literal(select._offset))
+ text += " OFFSET " + self.process(elements.literal(select._offset))
return text
def visit_table(self, table, asfrom=False, iscrud=False, ashint=False,
fromhints=None, **kwargs):
if asfrom or ashint:
if getattr(table, "schema", None):
- ret = self.preparer.quote_schema(table.schema,
- table.quote_schema) + \
- "." + self.preparer.quote(table.name,
- table.quote)
+ ret = self.preparer.quote_schema(table.schema) + \
+ "." + self.preparer.quote(table.name)
else:
- ret = self.preparer.quote(table.name, table.quote)
+ ret = self.preparer.quote(table.name)
if fromhints and table in fromhints:
ret = self.format_from_hint_text(ret, table,
fromhints[table], iscrud)
@@ -1488,7 +1639,7 @@ class SQLCompiler(engine.Compiled):
def visit_insert(self, insert_stmt, **kw):
self.isinsert = True
- colparams = self._get_colparams(insert_stmt)
+ colparams = self._get_colparams(insert_stmt, **kw)
if not colparams and \
not self.dialect.supports_default_values and \
@@ -1621,7 +1772,7 @@ class SQLCompiler(engine.Compiled):
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
- colparams = self._get_colparams(update_stmt, extra_froms)
+ colparams = self._get_colparams(update_stmt, **kw)
if update_stmt._hints:
dialect_hints = dict([
@@ -1651,11 +1802,12 @@ class SQLCompiler(engine.Compiled):
'=' + c[1] for c in colparams
)
- if update_stmt._returning:
- self.returning = update_stmt._returning
+ if self.returning or update_stmt._returning:
+ if not self.returning:
+ self.returning = update_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
+ update_stmt, self.returning)
if extra_froms:
extra_from_text = self.update_from_clause(
@@ -1675,7 +1827,7 @@ class SQLCompiler(engine.Compiled):
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
+ update_stmt, self.returning)
self.stack.pop(-1)
@@ -1684,13 +1836,45 @@ class SQLCompiler(engine.Compiled):
def _create_crud_bind_param(self, col, value, required=False, name=None):
if name is None:
name = col.key
- bindparam = sql.bindparam(name, value,
- type_=col.type, required=required,
- quote=col.quote)
+ bindparam = elements.BindParameter(name, value,
+ type_=col.type, required=required)
bindparam._is_crud = True
return bindparam._compiler_dispatch(self)
- def _get_colparams(self, stmt, extra_tables=None):
+ @util.memoized_property
+ def _key_getters_for_crud_column(self):
+ if self.isupdate and self.statement._extra_froms:
+ # when extra tables are present, refer to the columns
+ # in those extra tables as table-qualified, including in
+ # dictionaries and when rendering bind param names.
+ # the "main" table of the statement remains unqualified,
+ # allowing the most compatibility with a non-multi-table
+ # statement.
+ _et = set(self.statement._extra_froms)
+ def _column_as_key(key):
+ str_key = elements._column_as_key(key)
+ if hasattr(key, 'table') and key.table in _et:
+ return (key.table.name, str_key)
+ else:
+ return str_key
+ def _getattr_col_key(col):
+ if col.table in _et:
+ return (col.table.name, col.key)
+ else:
+ return col.key
+ def _col_bind_name(col):
+ if col.table in _et:
+ return "%s_%s" % (col.table.name, col.key)
+ else:
+ return col.key
+
+ else:
+ _column_as_key = elements._column_as_key
+ _getattr_col_key = _col_bind_name = operator.attrgetter("key")
+
+ return _column_as_key, _getattr_col_key, _col_bind_name
+
+ def _get_colparams(self, stmt, **kw):
"""create a set of tuples representing column/string pairs for use
in an INSERT or UPDATE statement.
@@ -1719,12 +1903,18 @@ class SQLCompiler(engine.Compiled):
else:
stmt_parameters = stmt.parameters
+ # getters - these are normally just column.key,
+ # but in the case of mysql multi-table update, the rules for
+ # .key must conditionally take tablename into account
+ _column_as_key, _getattr_col_key, _col_bind_name = \
+ self._key_getters_for_crud_column
+
# if we have statement parameters - set defaults in the
# compiled params
if self.column_keys is None:
parameters = {}
else:
- parameters = dict((sql._column_as_key(key), REQUIRED)
+ parameters = dict((_column_as_key(key), REQUIRED)
for key in self.column_keys
if not stmt_parameters or
key not in stmt_parameters)
@@ -1734,17 +1924,19 @@ class SQLCompiler(engine.Compiled):
if stmt_parameters is not None:
for k, v in stmt_parameters.items():
- colkey = sql._column_as_key(k)
+ colkey = _column_as_key(k)
if colkey is not None:
parameters.setdefault(colkey, v)
else:
# a non-Column expression on the left side;
# add it to values() in an "as-is" state,
# coercing right side to bound param
- if sql._is_literal(v):
- v = self.process(sql.bindparam(None, v, type_=k.type))
+ if elements._is_literal(v):
+ v = self.process(
+ elements.BindParameter(None, v, type_=k.type),
+ **kw)
else:
- v = self.process(v.self_group())
+ v = self.process(v.self_group(), **kw)
values.append((k, v))
@@ -1756,30 +1948,44 @@ class SQLCompiler(engine.Compiled):
self.dialect.implicit_returning and \
stmt.table.implicit_returning
+ if self.isinsert:
+ implicit_return_defaults = implicit_returning and stmt._return_defaults
+ elif self.isupdate:
+ implicit_return_defaults = self.dialect.implicit_returning and \
+ stmt.table.implicit_returning and \
+ stmt._return_defaults
+
+ if implicit_return_defaults:
+ if stmt._return_defaults is True:
+ implicit_return_defaults = set(stmt.table.c)
+ else:
+ implicit_return_defaults = set(stmt._return_defaults)
+
postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid
check_columns = {}
+
# special logic that only occurs for multi-table UPDATE
# statements
- if extra_tables and stmt_parameters:
+ if self.isupdate and stmt._extra_froms and stmt_parameters:
normalized_params = dict(
- (sql._clause_element_as_expr(c), param)
+ (elements._clause_element_as_expr(c), param)
for c, param in stmt_parameters.items()
)
- assert self.isupdate
affected_tables = set()
- for t in extra_tables:
+ for t in stmt._extra_froms:
for c in t.c:
if c in normalized_params:
affected_tables.add(t)
- check_columns[c.key] = c
+ check_columns[_getattr_col_key(c)] = c
value = normalized_params[c]
- if sql._is_literal(value):
+ if elements._is_literal(value):
value = self._create_crud_bind_param(
- c, value, required=value is REQUIRED)
+ c, value, required=value is REQUIRED,
+ name=_col_bind_name(c))
else:
self.postfetch.append(c)
- value = self.process(value.self_group())
+ value = self.process(value.self_group(), **kw)
values.append((c, value))
# determine tables which are actually
# to be updated - process onupdate and
@@ -1791,36 +1997,60 @@ class SQLCompiler(engine.Compiled):
elif c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
- (c, self.process(c.onupdate.arg.self_group()))
+ (c, self.process(
+ c.onupdate.arg.self_group(),
+ **kw)
+ )
)
self.postfetch.append(c)
else:
values.append(
- (c, self._create_crud_bind_param(c, None))
+ (c, self._create_crud_bind_param(
+ c, None, name=_col_bind_name(c)
+ )
+ )
)
self.prefetch.append(c)
elif c.server_onupdate is not None:
self.postfetch.append(c)
- # iterating through columns at the top to maintain ordering.
- # otherwise we might iterate through individual sets of
- # "defaults", "primary key cols", etc.
- for c in stmt.table.columns:
- if c.key in parameters and c.key not in check_columns:
- value = parameters.pop(c.key)
- if sql._is_literal(value):
+ if self.isinsert and stmt.select_names:
+ # for an insert from select, we can only use names that
+ # are given, so only select for those names.
+ cols = (stmt.table.c[_column_as_key(name)]
+ for name in stmt.select_names)
+ else:
+ # iterate through all table columns to maintain
+ # ordering, even for those cols that aren't included
+ cols = stmt.table.columns
+
+ for c in cols:
+ col_key = _getattr_col_key(c)
+ if col_key in parameters and col_key not in check_columns:
+ value = parameters.pop(col_key)
+ if elements._is_literal(value):
value = self._create_crud_bind_param(
c, value, required=value is REQUIRED,
- name=c.key
+ name=_col_bind_name(c)
if not stmt._has_multi_parameters
- else "%s_0" % c.key
+ else "%s_0" % _col_bind_name(c)
)
- elif c.primary_key and implicit_returning:
- self.returning.append(c)
- value = self.process(value.self_group())
else:
- self.postfetch.append(c)
- value = self.process(value.self_group())
+ if isinstance(value, elements.BindParameter) and \
+ value.type._isnull:
+ value = value._clone()
+ value.type = c.type
+
+ if c.primary_key and implicit_returning:
+ self.returning.append(c)
+ value = self.process(value.self_group(), **kw)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ value = self.process(value.self_group(), **kw)
+ else:
+ self.postfetch.append(c)
+ value = self.process(value.self_group(), **kw)
values.append((c, value))
elif self.isinsert:
@@ -1838,13 +2068,13 @@ class SQLCompiler(engine.Compiled):
if self.dialect.supports_sequences and \
(not c.default.optional or \
not self.dialect.sequences_optional):
- proc = self.process(c.default)
+ proc = self.process(c.default, **kw)
values.append((c, proc))
self.returning.append(c)
elif c.default.is_clause_element:
values.append(
(c,
- self.process(c.default.arg.self_group()))
+ self.process(c.default.arg.self_group(), **kw))
)
self.returning.append(c)
else:
@@ -1855,7 +2085,13 @@ class SQLCompiler(engine.Compiled):
else:
self.returning.append(c)
else:
- if c.default is not None or \
+ if (
+ c.default is not None and
+ (
+ not c.default.is_sequence or
+ self.dialect.supports_sequences
+ )
+ ) or \
c is stmt.table._autoincrement_column and (
self.dialect.supports_sequences or
self.dialect.preexecute_autoincrement_sequences
@@ -1872,16 +2108,22 @@ class SQLCompiler(engine.Compiled):
if self.dialect.supports_sequences and \
(not c.default.optional or \
not self.dialect.sequences_optional):
- proc = self.process(c.default)
+ proc = self.process(c.default, **kw)
values.append((c, proc))
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
self.postfetch.append(c)
elif c.default.is_clause_element:
values.append(
- (c, self.process(c.default.arg.self_group()))
+ (c, self.process(c.default.arg.self_group(), **kw))
)
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
# dont add primary key column to postfetch
self.postfetch.append(c)
else:
@@ -1890,32 +2132,49 @@ class SQLCompiler(engine.Compiled):
)
self.prefetch.append(c)
elif c.server_default is not None:
- if not c.primary_key:
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ elif not c.primary_key:
self.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
elif self.isupdate:
if c.onupdate is not None and not c.onupdate.is_sequence:
if c.onupdate.is_clause_element:
values.append(
- (c, self.process(c.onupdate.arg.self_group()))
+ (c, self.process(c.onupdate.arg.self_group(), **kw))
)
- self.postfetch.append(c)
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ else:
+ self.postfetch.append(c)
else:
values.append(
(c, self._create_crud_bind_param(c, None))
)
self.prefetch.append(c)
elif c.server_onupdate is not None:
- self.postfetch.append(c)
+ if implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
+ else:
+ self.postfetch.append(c)
+ elif implicit_return_defaults and \
+ c in implicit_return_defaults:
+ self.returning.append(c)
if parameters and stmt_parameters:
check = set(parameters).intersection(
- sql._column_as_key(k) for k in stmt.parameters
+ _column_as_key(k) for k in stmt.parameters
).difference(check_columns)
if check:
raise exc.CompileError(
"Unconsumed column names: %s" %
- (", ".join(check))
+ (", ".join("%s" % c for c in check))
)
if stmt._has_multi_parameters:
@@ -1924,17 +2183,17 @@ class SQLCompiler(engine.Compiled):
values.extend(
[
- (
- c,
- self._create_crud_bind_param(
- c, row[c.key],
- name="%s_%d" % (c.key, i + 1)
- )
- if c.key in row else param
- )
- for (c, param) in values_0
- ]
- for i, row in enumerate(stmt.parameters[1:])
+ (
+ c,
+ self._create_crud_bind_param(
+ c, row[c.key],
+ name="%s_%d" % (c.key, i + 1)
+ )
+ if c.key in row else param
+ )
+ for (c, param) in values_0
+ ]
+ for i, row in enumerate(stmt.parameters[1:])
)
return values
@@ -2005,7 +2264,7 @@ class SQLCompiler(engine.Compiled):
self.preparer.format_savepoint(savepoint_stmt)
-class DDLCompiler(engine.Compiled):
+class DDLCompiler(Compiled):
@util.memoized_property
def sql_compiler(self):
@@ -2042,11 +2301,11 @@ class DDLCompiler(engine.Compiled):
return self.sql_compiler.post_process_text(ddl.statement % context)
def visit_create_schema(self, create):
- schema = self.preparer.format_schema(create.element, create.quote)
+ schema = self.preparer.format_schema(create.element)
return "CREATE SCHEMA " + schema
def visit_drop_schema(self, drop):
- schema = self.preparer.format_schema(drop.element, drop.quote)
+ schema = self.preparer.format_schema(drop.element)
text = "DROP SCHEMA " + schema
if drop.cascade:
text += " CASCADE"
@@ -2068,11 +2327,13 @@ class DDLCompiler(engine.Compiled):
for create_column in create.columns:
column = create_column.element
try:
- text += separator
- separator = ", \n"
- text += "\t" + self.process(create_column,
+ processed = self.process(create_column,
first_pk=column.primary_key
and not first_pk)
+ if processed is not None:
+ text += separator
+ separator = ", \n"
+ text += "\t" + processed
if column.primary_key:
first_pk = True
except exc.CompileError as ce:
@@ -2093,6 +2354,9 @@ class DDLCompiler(engine.Compiled):
def visit_create_column(self, create, first_pk=False):
column = create.element
+ if column.system:
+ return None
+
text = self.get_column_specification(
column,
first_pk=first_pk
@@ -2156,7 +2420,7 @@ class DDLCompiler(engine.Compiled):
use_schema=include_table_schema),
', '.join(
self.sql_compiler.process(expr,
- include_table=False) for
+ include_table=False, literal_binds=True) for
expr in index.expressions)
)
return text
@@ -2169,13 +2433,12 @@ class DDLCompiler(engine.Compiled):
def _prepared_index_name(self, index, include_schema=False):
if include_schema and index.table is not None and index.table.schema:
schema = index.table.schema
- schema_name = self.preparer.quote_schema(schema,
- index.table.quote_schema)
+ schema_name = self.preparer.quote_schema(schema)
else:
schema_name = None
ident = index.name
- if isinstance(ident, sql._truncated_label):
+ if isinstance(ident, elements._truncated_label):
max_ = self.dialect.max_index_name_length or \
self.dialect.max_identifier_length
if len(ident) > max_:
@@ -2184,9 +2447,7 @@ class DDLCompiler(engine.Compiled):
else:
self.dialect.validate_identifier(ident)
- index_name = self.preparer.quote(
- ident,
- index.quote)
+ index_name = self.preparer.quote(ident)
if schema_name:
index_name = schema_name + "." + index_name
@@ -2246,8 +2507,9 @@ class DDLCompiler(engine.Compiled):
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
- sqltext = sql_util.expression_as_ddl(constraint.sqltext)
- text += "CHECK (%s)" % self.sql_compiler.process(sqltext)
+ text += "CHECK (%s)" % self.sql_compiler.process(constraint.sqltext,
+ include_table=False,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2268,7 +2530,7 @@ class DDLCompiler(engine.Compiled):
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
for c in constraint)
text += self.define_constraint_deferrability(constraint)
return text
@@ -2281,11 +2543,11 @@ class DDLCompiler(engine.Compiled):
preparer.format_constraint(constraint)
remote_table = list(constraint._elements.values())[0].column.table
text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
+ ', '.join(preparer.quote(f.parent.name)
for f in constraint._elements.values()),
self.define_constraint_remote_table(
constraint, remote_table, preparer),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
+ ', '.join(preparer.quote(f.column.name)
for f in constraint._elements.values())
)
text += self.define_constraint_match(constraint)
@@ -2299,12 +2561,14 @@ class DDLCompiler(engine.Compiled):
return preparer.format_table(table)
def visit_unique_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
text = ""
if constraint.name is not None:
text += "CONSTRAINT %s " % \
self.preparer.format_constraint(constraint)
text += "UNIQUE (%s)" % (
- ', '.join(self.preparer.quote(c.name, c.quote)
+ ', '.join(self.preparer.quote(c.name)
for c in constraint))
text += self.define_constraint_deferrability(constraint)
return text
@@ -2335,7 +2599,7 @@ class DDLCompiler(engine.Compiled):
return text
-class GenericTypeCompiler(engine.TypeCompiler):
+class GenericTypeCompiler(TypeCompiler):
def visit_FLOAT(self, type_):
return "FLOAT"
@@ -2558,15 +2822,25 @@ class IdentifierPreparer(object):
or not self.legal_characters.match(util.text_type(value))
or (lc_value != value))
- def quote_schema(self, schema, force):
- """Quote a schema.
+ def quote_schema(self, schema, force=None):
+ """Conditionally quote a schema.
+
+ Subclasses can override this to provide database-dependent
+ quoting behavior for schema names.
+
+ the 'force' flag should be considered deprecated.
- Subclasses should override this to provide database-dependent
- quoting behavior.
"""
return self.quote(schema, force)
- def quote(self, ident, force):
+ def quote(self, ident, force=None):
+ """Conditionally quote an identifier.
+
+ the 'force' flag should be considered deprecated.
+ """
+
+ force = getattr(ident, "quote", None)
+
if force is None:
if ident in self._strings:
return self._strings[ident]
@@ -2582,38 +2856,35 @@ class IdentifierPreparer(object):
return ident
def format_sequence(self, sequence, use_schema=True):
- name = self.quote(sequence.name, sequence.quote)
- if not self.omit_schema and use_schema and \
- sequence.schema is not None:
- name = self.quote_schema(sequence.schema, sequence.quote) + \
- "." + name
+ name = self.quote(sequence.name)
+ if not self.omit_schema and use_schema and sequence.schema is not None:
+ name = self.quote_schema(sequence.schema) + "." + name
return name
def format_label(self, label, name=None):
- return self.quote(name or label.name, label.quote)
+ return self.quote(name or label.name)
def format_alias(self, alias, name=None):
- return self.quote(name or alias.name, alias.quote)
+ return self.quote(name or alias.name)
def format_savepoint(self, savepoint, name=None):
- return self.quote(name or savepoint.ident, savepoint.quote)
+ return self.quote(name or savepoint.ident)
def format_constraint(self, constraint):
- return self.quote(constraint.name, constraint.quote)
+ return self.quote(constraint.name)
def format_table(self, table, use_schema=True, name=None):
"""Prepare a quoted table and schema name."""
if name is None:
name = table.name
- result = self.quote(name, table.quote)
+ result = self.quote(name)
if not self.omit_schema and use_schema \
and getattr(table, "schema", None):
- result = self.quote_schema(table.schema, table.quote_schema) + \
- "." + result
+ result = self.quote_schema(table.schema) + "." + result
return result
- def format_schema(self, name, quote):
+ def format_schema(self, name, quote=None):
"""Prepare a quoted schema name."""
return self.quote(name, quote)
@@ -2628,10 +2899,9 @@ class IdentifierPreparer(object):
if use_table:
return self.format_table(
column.table, use_schema=False,
- name=table_name) + "." + \
- self.quote(name, column.quote)
+ name=table_name) + "." + self.quote(name)
else:
- return self.quote(name, column.quote)
+ return self.quote(name)
else:
# literal textual elements get stuck into ColumnClause a lot,
# which shouldn't get quoted
@@ -2651,7 +2921,7 @@ class IdentifierPreparer(object):
if not self.omit_schema and use_schema and \
getattr(table, 'schema', None):
- return (self.quote_schema(table.schema, table.quote_schema),
+ return (self.quote_schema(table.schema),
self.format_table(table, use_schema=False))
else:
return (self.format_table(table, use_schema=False), )
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py
new file mode 100644
index 000000000..bda876502
--- /dev/null
+++ b/lib/sqlalchemy/sql/ddl.py
@@ -0,0 +1,864 @@
+# sql/ddl.py
+# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Provides the hierarchy of DDL-defining schema items as well as routines
+to invoke them for a create/drop call.
+
+"""
+
+from .. import util
+from .elements import ClauseElement
+from .visitors import traverse
+from .base import Executable, _generative, SchemaVisitor, _bind_or_error
+from ..util import topological
+from .. import event
+from .. import exc
+
+class _DDLCompiles(ClauseElement):
+ def _compiler(self, dialect, **kw):
+ """Return a compiler appropriate for this ClauseElement, given a
+ Dialect."""
+
+ return dialect.ddl_compiler(dialect, self, **kw)
+
+
+class DDLElement(Executable, _DDLCompiles):
+ """Base class for DDL expression constructs.
+
+ This class is the base for the general purpose :class:`.DDL` class,
+ as well as the various create/drop clause constructs such as
+ :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
+ etc.
+
+ :class:`.DDLElement` integrates closely with SQLAlchemy events,
+ introduced in :ref:`event_toplevel`. An instance of one is
+ itself an event receiving callable::
+
+ event.listen(
+ users,
+ 'after_create',
+ AddConstraint(constraint).execute_if(dialect='postgresql')
+ )
+
+ .. seealso::
+
+ :class:`.DDL`
+
+ :class:`.DDLEvents`
+
+ :ref:`event_toplevel`
+
+ :ref:`schema_ddl_sequences`
+
+ """
+
+ _execution_options = Executable.\
+ _execution_options.union({'autocommit': True})
+
+ target = None
+ on = None
+ dialect = None
+ callable_ = None
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_ddl(self, multiparams, params)
+
+ def execute(self, bind=None, target=None):
+ """Execute this DDL immediately.
+
+ Executes the DDL statement in isolation using the supplied
+ :class:`.Connectable` or
+ :class:`.Connectable` assigned to the ``.bind``
+ property, if not supplied. If the DDL has a conditional ``on``
+ criteria, it will be invoked with None as the event.
+
+ :param bind:
+ Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
+ :class:`.Connectable` must be present in the
+ ``.bind`` property.
+
+ :param target:
+ Optional, defaults to None. The target SchemaItem for the
+ execute call. Will be passed to the ``on`` callable if any,
+ and may also provide string expansion data for the
+ statement. See ``execute_at`` for more information.
+
+ """
+
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ if self._should_execute(target, bind):
+ return bind.execute(self.against(target))
+ else:
+ bind.engine.logger.info(
+ "DDL execution skipped, criteria not met.")
+
+ @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
+ ":meth:`.DDLElement.execute_if`.")
+ def execute_at(self, event_name, target):
+ """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
+
+ Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
+ executing it when that schema item is created or dropped. The DDL
+ statement will be executed using the same Connection and transactional
+ context as the Table create/drop itself. The ``.bind`` property of
+ this statement is ignored.
+
+ :param event:
+ One of the events defined in the schema item's ``.ddl_events``;
+ e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
+
+ :param target:
+ The Table or MetaData instance for which this DDLElement will
+ be associated with.
+
+ A DDLElement instance can be linked to any number of schema items.
+
+ ``execute_at`` builds on the ``append_ddl_listener`` interface of
+ :class:`.MetaData` and :class:`.Table` objects.
+
+ Caveat: Creating or dropping a Table in isolation will also trigger
+ any DDL set to ``execute_at`` that Table's MetaData. This may change
+ in a future release.
+
+ """
+
+ def call_event(target, connection, **kw):
+ if self._should_execute_deprecated(event_name,
+ target, connection, **kw):
+ return connection.execute(self.against(target))
+
+ event.listen(target, "" + event_name.replace('-', '_'), call_event)
+
+ @_generative
+ def against(self, target):
+ """Return a copy of this DDL against a specific schema item."""
+
+ self.target = target
+
+ @_generative
+ def execute_if(self, dialect=None, callable_=None, state=None):
+ """Return a callable that will execute this
+ DDLElement conditionally.
+
+ Used to provide a wrapper for event listening::
+
+ event.listen(
+ metadata,
+ 'before_create',
+ DDL("my_ddl").execute_if(dialect='postgresql')
+ )
+
+ :param dialect: May be a string, tuple or a callable
+ predicate. If a string, it will be compared to the name of the
+ executing database dialect::
+
+ DDL('something').execute_if(dialect='postgresql')
+
+ If a tuple, specifies multiple dialect names::
+
+ DDL('something').execute_if(dialect=('postgresql', 'mysql'))
+
+ :param callable_: A callable, which will be invoked with
+ four positional arguments as well as optional keyword
+ arguments:
+
+ :ddl:
+ This DDL element.
+
+ :target:
+ The :class:`.Table` or :class:`.MetaData` object which is the
+ target of this event. May be None if the DDL is executed
+ explicitly.
+
+ :bind:
+ The :class:`.Connection` being used for DDL execution
+
+ :tables:
+ Optional keyword argument - a list of Table objects which are to
+ be created/ dropped within a MetaData.create_all() or drop_all()
+ method call.
+
+ :state:
+ Optional keyword argument - will be the ``state`` argument
+ passed to this function.
+
+ :checkfirst:
+ Keyword argument, will be True if the 'checkfirst' flag was
+ set during the call to ``create()``, ``create_all()``,
+ ``drop()``, ``drop_all()``.
+
+ If the callable returns a true value, the DDL statement will be
+ executed.
+
+ :param state: any value which will be passed to the callable\_
+ as the ``state`` keyword argument.
+
+ .. seealso::
+
+ :class:`.DDLEvents`
+
+ :ref:`event_toplevel`
+
+ """
+ self.dialect = dialect
+ self.callable_ = callable_
+ self.state = state
+
+ def _should_execute(self, target, bind, **kw):
+ if self.on is not None and \
+ not self._should_execute_deprecated(None, target, bind, **kw):
+ return False
+
+ if isinstance(self.dialect, util.string_types):
+ if self.dialect != bind.engine.name:
+ return False
+ elif isinstance(self.dialect, (tuple, list, set)):
+ if bind.engine.name not in self.dialect:
+ return False
+ if self.callable_ is not None and \
+ not self.callable_(self, target, bind, state=self.state, **kw):
+ return False
+
+ return True
+
+ def _should_execute_deprecated(self, event, target, bind, **kw):
+ if self.on is None:
+ return True
+ elif isinstance(self.on, util.string_types):
+ return self.on == bind.engine.name
+ elif isinstance(self.on, (tuple, list, set)):
+ return bind.engine.name in self.on
+ else:
+ return self.on(self, event, target, bind, **kw)
+
+ def __call__(self, target, bind, **kw):
+ """Execute the DDL as a ddl_listener."""
+
+ if self._should_execute(target, bind, **kw):
+ return bind.execute(self.against(target))
+
+ def _check_ddl_on(self, on):
+ if (on is not None and
+ (not isinstance(on, util.string_types + (tuple, list, set)) and
+ not util.callable(on))):
+ raise exc.ArgumentError(
+ "Expected the name of a database dialect, a tuple "
+ "of names, or a callable for "
+ "'on' criteria, got type '%s'." % type(on).__name__)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+ def _generate(self):
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ return s
+
+
+class DDL(DDLElement):
+ """A literal DDL statement.
+
+ Specifies literal SQL DDL to be executed by the database. DDL objects
+ function as DDL event listeners, and can be subscribed to those events
+ listed in :class:`.DDLEvents`, using either :class:`.Table` or
+ :class:`.MetaData` objects as targets. Basic templating support allows
+ a single DDL instance to handle repetitive tasks for multiple tables.
+
+ Examples::
+
+ from sqlalchemy import event, DDL
+
+ tbl = Table('users', metadata, Column('uid', Integer))
+ event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
+
+ spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
+ event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
+
+ drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
+ connection.execute(drop_spow)
+
+ When operating on Table events, the following ``statement``
+ string substitions are available::
+
+ %(table)s - the Table name, with any required quoting applied
+ %(schema)s - the schema name, with any required quoting applied
+ %(fullname)s - the Table name including schema, quoted if needed
+
+ The DDL's "context", if any, will be combined with the standard
+ substutions noted above. Keys present in the context will override
+ the standard substitutions.
+
+ """
+
+ __visit_name__ = "ddl"
+
+ def __init__(self, statement, on=None, context=None, bind=None):
+ """Create a DDL statement.
+
+ :param statement:
+ A string or unicode string to be executed. Statements will be
+ processed with Python's string formatting operator. See the
+ ``context`` argument and the ``execute_at`` method.
+
+ A literal '%' in a statement must be escaped as '%%'.
+
+ SQL bind parameters are not available in DDL statements.
+
+ :param on:
+ .. deprecated:: 0.7
+ See :meth:`.DDLElement.execute_if`.
+
+ Optional filtering criteria. May be a string, tuple or a callable
+ predicate. If a string, it will be compared to the name of the
+ executing database dialect::
+
+ DDL('something', on='postgresql')
+
+ If a tuple, specifies multiple dialect names::
+
+ DDL('something', on=('postgresql', 'mysql'))
+
+ If a callable, it will be invoked with four positional arguments
+ as well as optional keyword arguments:
+
+ :ddl:
+ This DDL element.
+
+ :event:
+ The name of the event that has triggered this DDL, such as
+ 'after-create' Will be None if the DDL is executed explicitly.
+
+ :target:
+ The ``Table`` or ``MetaData`` object which is the target of
+ this event. May be None if the DDL is executed explicitly.
+
+ :connection:
+ The ``Connection`` being used for DDL execution
+
+ :tables:
+ Optional keyword argument - a list of Table objects which are to
+ be created/ dropped within a MetaData.create_all() or drop_all()
+ method call.
+
+
+ If the callable returns a true value, the DDL statement will be
+ executed.
+
+ :param context:
+ Optional dictionary, defaults to None. These values will be
+ available for use in string substitutions on the DDL statement.
+
+ :param bind:
+ Optional. A :class:`.Connectable`, used by
+ default when ``execute()`` is invoked without a bind argument.
+
+
+ .. seealso::
+
+ :class:`.DDLEvents`
+
+ :mod:`sqlalchemy.event`
+
+ """
+
+ if not isinstance(statement, util.string_types):
+ raise exc.ArgumentError(
+ "Expected a string or unicode SQL statement, got '%r'" %
+ statement)
+
+ self.statement = statement
+ self.context = context or {}
+
+ self._check_ddl_on(on)
+ self.on = on
+ self._bind = bind
+
+ def __repr__(self):
+ return '<%s@%s; %s>' % (
+ type(self).__name__, id(self),
+ ', '.join([repr(self.statement)] +
+ ['%s=%r' % (key, getattr(self, key))
+ for key in ('on', 'context')
+ if getattr(self, key)]))
+
+
+
+class _CreateDropBase(DDLElement):
+ """Base class for DDL constucts that represent CREATE and DROP or
+ equivalents.
+
+ The common theme of _CreateDropBase is a single
+ ``element`` attribute which refers to the element
+ to be created or dropped.
+
+ """
+
+ def __init__(self, element, on=None, bind=None):
+ self.element = element
+ self._check_ddl_on(on)
+ self.on = on
+ self.bind = bind
+
+ def _create_rule_disable(self, compiler):
+ """Allow disable of _create_rule using a callable.
+
+ Pass to _create_rule using
+ util.portable_instancemethod(self._create_rule_disable)
+ to retain serializability.
+
+ """
+ return False
+
+
+class CreateSchema(_CreateDropBase):
+ """Represent a CREATE SCHEMA statement.
+
+ .. versionadded:: 0.7.4
+
+ The argument here is the string name of the schema.
+
+ """
+
+ __visit_name__ = "create_schema"
+
+ def __init__(self, name, quote=None, **kw):
+ """Create a new :class:`.CreateSchema` construct."""
+
+ self.quote = quote
+ super(CreateSchema, self).__init__(name, **kw)
+
+
+class DropSchema(_CreateDropBase):
+ """Represent a DROP SCHEMA statement.
+
+ The argument here is the string name of the schema.
+
+ .. versionadded:: 0.7.4
+
+ """
+
+ __visit_name__ = "drop_schema"
+
+ def __init__(self, name, quote=None, cascade=False, **kw):
+ """Create a new :class:`.DropSchema` construct."""
+
+ self.quote = quote
+ self.cascade = cascade
+ super(DropSchema, self).__init__(name, **kw)
+
+
+class CreateTable(_CreateDropBase):
+ """Represent a CREATE TABLE statement."""
+
+ __visit_name__ = "create_table"
+
+ def __init__(self, element, on=None, bind=None):
+ """Create a :class:`.CreateTable` construct.
+
+ :param element: a :class:`.Table` that's the subject
+ of the CREATE
+ :param on: See the description for 'on' in :class:`.DDL`.
+ :param bind: See the description for 'bind' in :class:`.DDL`.
+
+ """
+ super(CreateTable, self).__init__(element, on=on, bind=bind)
+ self.columns = [CreateColumn(column)
+ for column in element.columns
+ ]
+
+
+class _DropView(_CreateDropBase):
+ """Semi-public 'DROP VIEW' construct.
+
+ Used by the test suite for dialect-agnostic drops of views.
+ This object will eventually be part of a public "view" API.
+
+ """
+ __visit_name__ = "drop_view"
+
+
+class CreateColumn(_DDLCompiles):
+ """Represent a :class:`.Column` as rendered in a CREATE TABLE statement,
+ via the :class:`.CreateTable` construct.
+
+ This is provided to support custom column DDL within the generation
+ of CREATE TABLE statements, by using the
+ compiler extension documented in :ref:`sqlalchemy.ext.compiler_toplevel`
+ to extend :class:`.CreateColumn`.
+
+ Typical integration is to examine the incoming :class:`.Column`
+ object, and to redirect compilation if a particular flag or condition
+ is found::
+
+ from sqlalchemy import schema
+ from sqlalchemy.ext.compiler import compiles
+
+ @compiles(schema.CreateColumn)
+ def compile(element, compiler, **kw):
+ column = element.element
+
+ if "special" not in column.info:
+ return compiler.visit_create_column(element, **kw)
+
+ text = "%s SPECIAL DIRECTIVE %s" % (
+ column.name,
+ compiler.type_compiler.process(column.type)
+ )
+ default = compiler.get_column_default_string(column)
+ if default is not None:
+ text += " DEFAULT " + default
+
+ if not column.nullable:
+ text += " NOT NULL"
+
+ if column.constraints:
+ text += " ".join(
+ compiler.process(const)
+ for const in column.constraints)
+ return text
+
+ The above construct can be applied to a :class:`.Table` as follows::
+
+ from sqlalchemy import Table, Metadata, Column, Integer, String
+ from sqlalchemy import schema
+
+ metadata = MetaData()
+
+ table = Table('mytable', MetaData(),
+ Column('x', Integer, info={"special":True}, primary_key=True),
+ Column('y', String(50)),
+ Column('z', String(20), info={"special":True})
+ )
+
+ metadata.create_all(conn)
+
+ Above, the directives we've added to the :attr:`.Column.info` collection
+ will be detected by our custom compilation scheme::
+
+ CREATE TABLE mytable (
+ x SPECIAL DIRECTIVE INTEGER NOT NULL,
+ y VARCHAR(50),
+ z SPECIAL DIRECTIVE VARCHAR(20),
+ PRIMARY KEY (x)
+ )
+
+ The :class:`.CreateColumn` construct can also be used to skip certain
+ columns when producing a ``CREATE TABLE``. This is accomplished by
+ creating a compilation rule that conditionally returns ``None``.
+ This is essentially how to produce the same effect as using the
+ ``system=True`` argument on :class:`.Column`, which marks a column
+ as an implicitly-present "system" column.
+
+ For example, suppose we wish to produce a :class:`.Table` which skips
+ rendering of the Postgresql ``xmin`` column against the Postgresql backend,
+ but on other backends does render it, in anticipation of a triggered rule.
+ A conditional compilation rule could skip this name only on Postgresql::
+
+ from sqlalchemy.schema import CreateColumn
+
+ @compiles(CreateColumn, "postgresql")
+ def skip_xmin(element, compiler, **kw):
+ if element.element.name == 'xmin':
+ return None
+ else:
+ return compiler.visit_create_column(element, **kw)
+
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('xmin', Integer)
+ )
+
+ Above, a :class:`.CreateTable` construct will generate a ``CREATE TABLE``
+ which only includes the ``id`` column in the string; the ``xmin`` column
+ will be omitted, but only against the Postgresql backend.
+
+ .. versionadded:: 0.8.3 The :class:`.CreateColumn` construct supports
+ skipping of columns by returning ``None`` from a custom compilation rule.
+
+ .. versionadded:: 0.8 The :class:`.CreateColumn` construct was added
+ to support custom column creation styles.
+
+ """
+ __visit_name__ = 'create_column'
+
+ def __init__(self, element):
+ self.element = element
+
+
+class DropTable(_CreateDropBase):
+ """Represent a DROP TABLE statement."""
+
+ __visit_name__ = "drop_table"
+
+
+class CreateSequence(_CreateDropBase):
+ """Represent a CREATE SEQUENCE statement."""
+
+ __visit_name__ = "create_sequence"
+
+
+class DropSequence(_CreateDropBase):
+ """Represent a DROP SEQUENCE statement."""
+
+ __visit_name__ = "drop_sequence"
+
+
+class CreateIndex(_CreateDropBase):
+ """Represent a CREATE INDEX statement."""
+
+ __visit_name__ = "create_index"
+
+
+class DropIndex(_CreateDropBase):
+ """Represent a DROP INDEX statement."""
+
+ __visit_name__ = "drop_index"
+
+
+class AddConstraint(_CreateDropBase):
+ """Represent an ALTER TABLE ADD CONSTRAINT statement."""
+
+ __visit_name__ = "add_constraint"
+
+ def __init__(self, element, *args, **kw):
+ super(AddConstraint, self).__init__(element, *args, **kw)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
+
+
+class DropConstraint(_CreateDropBase):
+ """Represent an ALTER TABLE DROP CONSTRAINT statement."""
+
+ __visit_name__ = "drop_constraint"
+
+ def __init__(self, element, cascade=False, **kw):
+ self.cascade = cascade
+ super(DropConstraint, self).__init__(element, **kw)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
+
+
+class DDLBase(SchemaVisitor):
+ def __init__(self, connection):
+ self.connection = connection
+
+
+class SchemaGenerator(DDLBase):
+
+ def __init__(self, dialect, connection, checkfirst=False,
+ tables=None, **kwargs):
+ super(SchemaGenerator, self).__init__(connection, **kwargs)
+ self.checkfirst = checkfirst
+ self.tables = tables
+ self.preparer = dialect.identifier_preparer
+ self.dialect = dialect
+ self.memo = {}
+
+ def _can_create_table(self, table):
+ self.dialect.validate_identifier(table.name)
+ if table.schema:
+ self.dialect.validate_identifier(table.schema)
+ return not self.checkfirst or \
+ not self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_create_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ (
+ (not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (
+ not self.checkfirst or
+ not self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema)
+ )
+ )
+
+ def visit_metadata(self, metadata):
+ if self.tables is not None:
+ tables = self.tables
+ else:
+ tables = list(metadata.tables.values())
+ collection = [t for t in sort_tables(tables)
+ if self._can_create_table(t)]
+ seq_coll = [s for s in metadata._sequences.values()
+ if s.column is None and self._can_create_sequence(s)]
+
+ metadata.dispatch.before_create(metadata, self.connection,
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for seq in seq_coll:
+ self.traverse_single(seq, create_ok=True)
+
+ for table in collection:
+ self.traverse_single(table, create_ok=True)
+
+ metadata.dispatch.after_create(metadata, self.connection,
+ tables=collection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_table(self, table, create_ok=False):
+ if not create_ok and not self._can_create_table(table):
+ return
+
+ table.dispatch.before_create(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for column in table.columns:
+ if column.default is not None:
+ self.traverse_single(column.default)
+
+ self.connection.execute(CreateTable(table))
+
+ if hasattr(table, 'indexes'):
+ for index in table.indexes:
+ self.traverse_single(index)
+
+ table.dispatch.after_create(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_sequence(self, sequence, create_ok=False):
+ if not create_ok and not self._can_create_sequence(sequence):
+ return
+ self.connection.execute(CreateSequence(sequence))
+
+ def visit_index(self, index):
+ self.connection.execute(CreateIndex(index))
+
+
+class SchemaDropper(DDLBase):
+
+ def __init__(self, dialect, connection, checkfirst=False,
+ tables=None, **kwargs):
+ super(SchemaDropper, self).__init__(connection, **kwargs)
+ self.checkfirst = checkfirst
+ self.tables = tables
+ self.preparer = dialect.identifier_preparer
+ self.dialect = dialect
+ self.memo = {}
+
+ def visit_metadata(self, metadata):
+ if self.tables is not None:
+ tables = self.tables
+ else:
+ tables = list(metadata.tables.values())
+
+ collection = [
+ t
+ for t in reversed(sort_tables(tables))
+ if self._can_drop_table(t)
+ ]
+
+ seq_coll = [
+ s
+ for s in metadata._sequences.values()
+ if s.column is None and self._can_drop_sequence(s)
+ ]
+
+ metadata.dispatch.before_drop(
+ metadata, self.connection, tables=collection,
+ checkfirst=self.checkfirst, _ddl_runner=self)
+
+ for table in collection:
+ self.traverse_single(table, drop_ok=True)
+
+ for seq in seq_coll:
+ self.traverse_single(seq, drop_ok=True)
+
+ metadata.dispatch.after_drop(
+ metadata, self.connection, tables=collection,
+ checkfirst=self.checkfirst, _ddl_runner=self)
+
+ def _can_drop_table(self, table):
+ self.dialect.validate_identifier(table.name)
+ if table.schema:
+ self.dialect.validate_identifier(table.schema)
+ return not self.checkfirst or self.dialect.has_table(self.connection,
+ table.name, schema=table.schema)
+
+ def _can_drop_sequence(self, sequence):
+ return self.dialect.supports_sequences and \
+ ((not self.dialect.sequences_optional or
+ not sequence.optional) and
+ (not self.checkfirst or
+ self.dialect.has_sequence(
+ self.connection,
+ sequence.name,
+ schema=sequence.schema))
+ )
+
+ def visit_index(self, index):
+ self.connection.execute(DropIndex(index))
+
+ def visit_table(self, table, drop_ok=False):
+ if not drop_ok and not self._can_drop_table(table):
+ return
+
+ table.dispatch.before_drop(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ for column in table.columns:
+ if column.default is not None:
+ self.traverse_single(column.default)
+
+ self.connection.execute(DropTable(table))
+
+ table.dispatch.after_drop(table, self.connection,
+ checkfirst=self.checkfirst,
+ _ddl_runner=self)
+
+ def visit_sequence(self, sequence, drop_ok=False):
+ if not drop_ok and not self._can_drop_sequence(sequence):
+ return
+ self.connection.execute(DropSequence(sequence))
+
+def sort_tables(tables, skip_fn=None, extra_dependencies=None):
+ """sort a collection of Table objects in order of
+ their foreign-key dependency."""
+
+ tables = list(tables)
+ tuples = []
+ if extra_dependencies is not None:
+ tuples.extend(extra_dependencies)
+
+ def visit_foreign_key(fkey):
+ if fkey.use_alter:
+ return
+ elif skip_fn and skip_fn(fkey):
+ return
+ parent_table = fkey.column.table
+ if parent_table in tables:
+ child_table = fkey.parent.table
+ if parent_table is not child_table:
+ tuples.append((parent_table, child_table))
+
+ for table in tables:
+ traverse(table,
+ {'schema_visitor': True},
+ {'foreign_key': visit_foreign_key})
+
+ tuples.extend(
+ [parent, table] for parent in table._extra_dependencies
+ )
+
+ return list(topological.sort(tuples, tables))
+
diff --git a/lib/sqlalchemy/sql/default_comparator.py b/lib/sqlalchemy/sql/default_comparator.py
new file mode 100644
index 000000000..c39dce9c6
--- /dev/null
+++ b/lib/sqlalchemy/sql/default_comparator.py
@@ -0,0 +1,278 @@
+# sql/default_comparator.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Default implementation of SQL comparison operations.
+"""
+
+from .. import exc, util
+from . import operators
+from . import type_api
+from .elements import BindParameter, True_, False_, BinaryExpression, \
+ Null, _const_expr, _clause_element_as_expr, \
+ ClauseList, ColumnElement, TextClause, UnaryExpression, \
+ collate, _is_literal, _literal_as_text
+from .selectable import SelectBase, Alias, Selectable, ScalarSelect
+
+class _DefaultColumnComparator(operators.ColumnOperators):
+ """Defines comparison and math operations.
+
+ See :class:`.ColumnOperators` and :class:`.Operators` for descriptions
+ of all operations.
+
+ """
+
+ @util.memoized_property
+ def type(self):
+ return self.expr.type
+
+ def operate(self, op, *other, **kwargs):
+ o = self.operators[op.__name__]
+ return o[0](self, self.expr, op, *(other + o[1:]), **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ o = self.operators[op.__name__]
+ return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
+
+ def _adapt_expression(self, op, other_comparator):
+ """evaluate the return type of <self> <op> <othertype>,
+ and apply any adaptations to the given operator.
+
+ This method determines the type of a resulting binary expression
+ given two source types and an operator. For example, two
+ :class:`.Column` objects, both of the type :class:`.Integer`, will
+ produce a :class:`.BinaryExpression` that also has the type
+ :class:`.Integer` when compared via the addition (``+``) operator.
+ However, using the addition operator with an :class:`.Integer`
+ and a :class:`.Date` object will produce a :class:`.Date`, assuming
+ "days delta" behavior by the database (in reality, most databases
+ other than Postgresql don't accept this particular operation).
+
+ The method returns a tuple of the form <operator>, <type>.
+ The resulting operator and type will be those applied to the
+ resulting :class:`.BinaryExpression` as the final operator and the
+ right-hand side of the expression.
+
+ Note that only a subset of operators make usage of
+ :meth:`._adapt_expression`,
+ including math operators and user-defined operators, but not
+ boolean comparison or special SQL keywords like MATCH or BETWEEN.
+
+ """
+ return op, other_comparator.type
+
+ def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
+ _python_is_types=(util.NoneType, bool),
+ **kwargs):
+
+ if isinstance(obj, _python_is_types + (Null, True_, False_)):
+
+ # allow x ==/!= True/False to be treated as a literal.
+ # this comes out to "== / != true/false" or "1/0" if those
+ # constants aren't supported and works on all platforms
+ if op in (operators.eq, operators.ne) and \
+ isinstance(obj, (bool, True_, False_)):
+ return BinaryExpression(expr,
+ _literal_as_text(obj),
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+ else:
+ # all other None/True/False uses IS, IS NOT
+ if op in (operators.eq, operators.is_):
+ return BinaryExpression(expr, _const_expr(obj),
+ operators.is_,
+ negate=operators.isnot)
+ elif op in (operators.ne, operators.isnot):
+ return BinaryExpression(expr, _const_expr(obj),
+ operators.isnot,
+ negate=operators.is_)
+ else:
+ raise exc.ArgumentError(
+ "Only '=', '!=', 'is_()', 'isnot()' operators can "
+ "be used with None/True/False")
+ else:
+ obj = self._check_literal(expr, op, obj)
+
+ if reverse:
+ return BinaryExpression(obj,
+ expr,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+ else:
+ return BinaryExpression(expr,
+ obj,
+ op,
+ type_=type_api.BOOLEANTYPE,
+ negate=negate, modifiers=kwargs)
+
+ def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
+ **kw):
+ obj = self._check_literal(expr, op, obj)
+
+ if reverse:
+ left, right = obj, expr
+ else:
+ left, right = expr, obj
+
+ if result_type is None:
+ op, result_type = left.comparator._adapt_expression(
+ op, right.comparator)
+
+ return BinaryExpression(left, right, op, type_=result_type)
+
+ def _scalar(self, expr, op, fn, **kw):
+ return fn(expr)
+
+ def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw):
+ seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
+
+ if isinstance(seq_or_selectable, ScalarSelect):
+ return self._boolean_compare(expr, op, seq_or_selectable,
+ negate=negate_op)
+ elif isinstance(seq_or_selectable, SelectBase):
+
+ # TODO: if we ever want to support (x, y, z) IN (select x,
+ # y, z from table), we would need a multi-column version of
+ # as_scalar() to produce a multi- column selectable that
+ # does not export itself as a FROM clause
+
+ return self._boolean_compare(
+ expr, op, seq_or_selectable.as_scalar(),
+ negate=negate_op, **kw)
+ elif isinstance(seq_or_selectable, (Selectable, TextClause)):
+ return self._boolean_compare(expr, op, seq_or_selectable,
+ negate=negate_op, **kw)
+
+ # Handle non selectable arguments as sequences
+ args = []
+ for o in seq_or_selectable:
+ if not _is_literal(o):
+ if not isinstance(o, operators.ColumnOperators):
+ raise exc.InvalidRequestError('in() function accept'
+ 's either a list of non-selectable values, '
+ 'or a selectable: %r' % o)
+ elif o is None:
+ o = Null()
+ else:
+ o = expr._bind_param(op, o)
+ args.append(o)
+ if len(args) == 0:
+
+ # Special case handling for empty IN's, behave like
+ # comparison against zero row selectable. We use != to
+ # build the contradiction as it handles NULL values
+ # appropriately, i.e. "not (x IN ())" should not return NULL
+ # values for x.
+
+ util.warn('The IN-predicate on "%s" was invoked with an '
+ 'empty sequence. This results in a '
+ 'contradiction, which nonetheless can be '
+ 'expensive to evaluate. Consider alternative '
+ 'strategies for improved performance.' % expr)
+ if op is operators.in_op:
+ return expr != expr
+ else:
+ return expr == expr
+
+ return self._boolean_compare(expr, op,
+ ClauseList(*args).self_group(against=op),
+ negate=negate_op)
+
+ def _unsupported_impl(self, expr, op, *arg, **kw):
+ raise NotImplementedError("Operator '%s' is not supported on "
+ "this expression" % op.__name__)
+
+ def _neg_impl(self, expr, op, **kw):
+ """See :meth:`.ColumnOperators.__neg__`."""
+ return UnaryExpression(expr, operator=operators.neg)
+
+ def _match_impl(self, expr, op, other, **kw):
+ """See :meth:`.ColumnOperators.match`."""
+ return self._boolean_compare(expr, operators.match_op,
+ self._check_literal(expr, operators.match_op,
+ other))
+
+ def _distinct_impl(self, expr, op, **kw):
+ """See :meth:`.ColumnOperators.distinct`."""
+ return UnaryExpression(expr, operator=operators.distinct_op,
+ type_=expr.type)
+
+ def _between_impl(self, expr, op, cleft, cright, **kw):
+ """See :meth:`.ColumnOperators.between`."""
+ return BinaryExpression(
+ expr,
+ ClauseList(
+ self._check_literal(expr, operators.and_, cleft),
+ self._check_literal(expr, operators.and_, cright),
+ operator=operators.and_,
+ group=False, group_contents=False),
+ operators.between_op)
+
+ def _collate_impl(self, expr, op, other, **kw):
+ return collate(expr, other)
+
+ # a mapping of operators with the method they use, along with
+ # their negated operator for comparison operators
+ operators = {
+ "add": (_binary_operate,),
+ "mul": (_binary_operate,),
+ "sub": (_binary_operate,),
+ "div": (_binary_operate,),
+ "mod": (_binary_operate,),
+ "truediv": (_binary_operate,),
+ "custom_op": (_binary_operate,),
+ "concat_op": (_binary_operate,),
+ "lt": (_boolean_compare, operators.ge),
+ "le": (_boolean_compare, operators.gt),
+ "ne": (_boolean_compare, operators.eq),
+ "gt": (_boolean_compare, operators.le),
+ "ge": (_boolean_compare, operators.lt),
+ "eq": (_boolean_compare, operators.ne),
+ "like_op": (_boolean_compare, operators.notlike_op),
+ "ilike_op": (_boolean_compare, operators.notilike_op),
+ "notlike_op": (_boolean_compare, operators.like_op),
+ "notilike_op": (_boolean_compare, operators.ilike_op),
+ "contains_op": (_boolean_compare, operators.notcontains_op),
+ "startswith_op": (_boolean_compare, operators.notstartswith_op),
+ "endswith_op": (_boolean_compare, operators.notendswith_op),
+ "desc_op": (_scalar, UnaryExpression._create_desc),
+ "asc_op": (_scalar, UnaryExpression._create_asc),
+ "nullsfirst_op": (_scalar, UnaryExpression._create_nullsfirst),
+ "nullslast_op": (_scalar, UnaryExpression._create_nullslast),
+ "in_op": (_in_impl, operators.notin_op),
+ "notin_op": (_in_impl, operators.in_op),
+ "is_": (_boolean_compare, operators.is_),
+ "isnot": (_boolean_compare, operators.isnot),
+ "collate": (_collate_impl,),
+ "match_op": (_match_impl,),
+ "distinct_op": (_distinct_impl,),
+ "between_op": (_between_impl, ),
+ "neg": (_neg_impl,),
+ "getitem": (_unsupported_impl,),
+ "lshift": (_unsupported_impl,),
+ "rshift": (_unsupported_impl,),
+ }
+
+ def _check_literal(self, expr, operator, other):
+ if isinstance(other, (ColumnElement, TextClause)):
+ if isinstance(other, BindParameter) and \
+ other.type._isnull:
+ other = other._clone()
+ other.type = expr.type
+ return other
+ elif hasattr(other, '__clause_element__'):
+ other = other.__clause_element__()
+ elif isinstance(other, type_api.TypeEngine.Comparator):
+ other = other.expr
+
+ if isinstance(other, (SelectBase, Alias)):
+ return other.as_scalar()
+ elif not isinstance(other, (ColumnElement, TextClause)):
+ return expr._bind_param(operator, other)
+ else:
+ return other
+
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
new file mode 100644
index 000000000..854b894ee
--- /dev/null
+++ b/lib/sqlalchemy/sql/dml.py
@@ -0,0 +1,769 @@
+# sql/dml.py
+# Copyright (C) 2009-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`.
+
+"""
+
+from .base import Executable, _generative, _from_objects, DialectKWArgs
+from .elements import ClauseElement, _literal_as_text, Null, and_, _clone
+from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes
+from .. import util
+from .. import exc
+
+class UpdateBase(DialectKWArgs, HasPrefixes, Executable, ClauseElement):
+ """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
+
+ """
+
+ __visit_name__ = 'update_base'
+
+ _execution_options = \
+ Executable._execution_options.union({'autocommit': True})
+ _hints = util.immutabledict()
+ _prefixes = ()
+
+ def _process_colparams(self, parameters):
+ def process_single(p):
+ if isinstance(p, (list, tuple)):
+ return dict(
+ (c.key, pval)
+ for c, pval in zip(self.table.c, p)
+ )
+ else:
+ return p
+
+ if isinstance(parameters, (list, tuple)) and \
+ isinstance(parameters[0], (list, tuple, dict)):
+
+ if not self._supports_multi_parameters:
+ raise exc.InvalidRequestError(
+ "This construct does not support "
+ "multiple parameter sets.")
+
+ return [process_single(p) for p in parameters], True
+ else:
+ return process_single(parameters), False
+
+ def params(self, *arg, **kw):
+ """Set the parameters for the statement.
+
+ This method raises ``NotImplementedError`` on the base class,
+ and is overridden by :class:`.ValuesBase` to provide the
+ SET/VALUES clause of UPDATE and INSERT.
+
+ """
+ raise NotImplementedError(
+ "params() is not supported for INSERT/UPDATE/DELETE statements."
+ " To set the values for an INSERT or UPDATE statement, use"
+ " stmt.values(**parameters).")
+
+ def bind(self):
+ """Return a 'bind' linked to this :class:`.UpdateBase`
+ or a :class:`.Table` associated with it.
+
+ """
+ return self._bind or self.table.bind
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+ @_generative
+ def returning(self, *cols):
+ """Add a :term:`RETURNING` or equivalent clause to this statement.
+
+ e.g.::
+
+ stmt = table.update().\\
+ where(table.c.data == 'value').\\
+ values(status='X').\\
+ returning(table.c.server_flag, table.c.updated_timestamp)
+
+ for server_flag, updated_timestamp in connection.execute(stmt):
+ print(server_flag, updated_timestamp)
+
+ The given collection of column expressions should be derived from
+ the table that is
+ the target of the INSERT, UPDATE, or DELETE. While :class:`.Column`
+ objects are typical, the elements can also be expressions::
+
+ stmt = table.insert().returning(
+ (table.c.first_name + " " + table.c.last_name).label('fullname')
+ )
+
+ Upon compilation, a RETURNING clause, or database equivalent,
+ will be rendered within the statement. For INSERT and UPDATE,
+ the values are the newly inserted/updated values. For DELETE,
+ the values are those of the rows which were deleted.
+
+ Upon execution, the values of the columns to be returned
+ are made available via the result set and can be iterated
+ using :meth:`.ResultProxy.fetchone` and similar. For DBAPIs which do not
+ natively support returning values (i.e. cx_oracle),
+ SQLAlchemy will approximate this behavior at the result level
+ so that a reasonable amount of behavioral neutrality is
+ provided.
+
+ Note that not all databases/DBAPIs
+ support RETURNING. For those backends with no support,
+ an exception is raised upon compilation and/or execution.
+ For those who do support it, the functionality across backends
+ varies greatly, including restrictions on executemany()
+ and other statements which return multiple rows. Please
+ read the documentation notes for the database in use in
+ order to determine the availability of RETURNING.
+
+ .. seealso::
+
+ :meth:`.ValuesBase.return_defaults` - an alternative method tailored
+ towards efficient fetching of server-side defaults and triggers
+ for single-row INSERTs or UPDATEs.
+
+
+ """
+ self._returning = cols
+
+
+ @_generative
+ def with_hint(self, text, selectable=None, dialect_name="*"):
+ """Add a table hint for a single table to this
+ INSERT/UPDATE/DELETE statement.
+
+ .. note::
+
+ :meth:`.UpdateBase.with_hint` currently applies only to
+ Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
+ :meth:`.UpdateBase.prefix_with`.
+
+ The text of the hint is rendered in the appropriate
+ location for the database backend in use, relative
+ to the :class:`.Table` that is the subject of this
+ statement, or optionally to that of the given
+ :class:`.Table` passed as the ``selectable`` argument.
+
+ The ``dialect_name`` option will limit the rendering of a particular
+ hint to a particular backend. Such as, to add a hint
+ that only takes effect for SQL Server::
+
+ mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
+
+ .. versionadded:: 0.7.6
+
+ :param text: Text of the hint.
+ :param selectable: optional :class:`.Table` that specifies
+ an element of the FROM clause within an UPDATE or DELETE
+ to be the subject of the hint - applies only to certain backends.
+ :param dialect_name: defaults to ``*``, if specified as the name
+ of a particular dialect, will apply these hints only when
+ that dialect is in use.
+ """
+ if selectable is None:
+ selectable = self.table
+
+ self._hints = self._hints.union(
+ {(selectable, dialect_name): text})
+
+
+class ValuesBase(UpdateBase):
+ """Supplies support for :meth:`.ValuesBase.values` to
+ INSERT and UPDATE constructs."""
+
+ __visit_name__ = 'values_base'
+
+ _supports_multi_parameters = False
+ _has_multi_parameters = False
+ select = None
+
+ def __init__(self, table, values, prefixes):
+ self.table = _interpret_as_from(table)
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(values)
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ @_generative
+ def values(self, *args, **kwargs):
+ """specify a fixed VALUES clause for an INSERT statement, or the SET
+ clause for an UPDATE.
+
+ Note that the :class:`.Insert` and :class:`.Update` constructs support
+ per-execution time formatting of the VALUES and/or SET clauses,
+ based on the arguments passed to :meth:`.Connection.execute`. However,
+ the :meth:`.ValuesBase.values` method can be used to "fix" a particular
+ set of parameters into the statement.
+
+ Multiple calls to :meth:`.ValuesBase.values` will produce a new
+ construct, each one with the parameter list modified to include
+ the new parameters sent. In the typical case of a single
+ dictionary of parameters, the newly passed keys will replace
+ the same keys in the previous construct. In the case of a list-based
+ "multiple values" construct, each new list of values is extended
+ onto the existing list of values.
+
+ :param \**kwargs: key value pairs representing the string key
+ of a :class:`.Column` mapped to the value to be rendered into the
+ VALUES or SET clause::
+
+ users.insert().values(name="some name")
+
+ users.update().where(users.c.id==5).values(name="some name")
+
+ :param \*args: Alternatively, a dictionary, tuple or list
+ of dictionaries or tuples can be passed as a single positional
+ argument in order to form the VALUES or
+ SET clause of the statement. The single dictionary form
+ works the same as the kwargs form::
+
+ users.insert().values({"name": "some name"})
+
+ If a tuple is passed, the tuple should contain the same number
+ of columns as the target :class:`.Table`::
+
+ users.insert().values((5, "some name"))
+
+ The :class:`.Insert` construct also supports multiply-rendered VALUES
+ construct, for those backends which support this SQL syntax
+ (SQLite, Postgresql, MySQL). This mode is indicated by passing a list
+ of one or more dictionaries/tuples::
+
+ users.insert().values([
+ {"name": "some name"},
+ {"name": "some other name"},
+ {"name": "yet another name"},
+ ])
+
+ In the case of an :class:`.Update`
+ construct, only the single dictionary/tuple form is accepted,
+ else an exception is raised. It is also an exception case to
+ attempt to mix the single-/multiple- value styles together,
+ either through multiple :meth:`.ValuesBase.values` calls
+ or by sending a list + kwargs at the same time.
+
+ .. note::
+
+ Passing a multiple values list is *not* the same
+ as passing a multiple values list to the :meth:`.Connection.execute`
+ method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
+ produces a construct of this form::
+
+ INSERT INTO table (col1, col2, col3) VALUES
+ (col1_0, col2_0, col3_0),
+ (col1_1, col2_1, col3_1),
+ ...
+
+ whereas a multiple list passed to :meth:`.Connection.execute`
+ has the effect of using the DBAPI
+ `executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
+ method, which provides a high-performance system of invoking
+ a single-row INSERT statement many times against a series
+ of parameter sets. The "executemany" style is supported by
+ all database backends, as it does not depend on a special SQL
+ syntax.
+
+ .. versionadded:: 0.8
+ Support for multiple-VALUES INSERT statements.
+
+
+ .. seealso::
+
+ :ref:`inserts_and_updates` - SQL Expression
+ Language Tutorial
+
+ :func:`~.expression.insert` - produce an ``INSERT`` statement
+
+ :func:`~.expression.update` - produce an ``UPDATE`` statement
+
+ """
+ if self.select is not None:
+ raise exc.InvalidRequestError(
+ "This construct already inserts from a SELECT")
+ if self._has_multi_parameters and kwargs:
+ raise exc.InvalidRequestError(
+ "This construct already has multiple parameter sets.")
+
+ if args:
+ if len(args) > 1:
+ raise exc.ArgumentError(
+ "Only a single dictionary/tuple or list of "
+ "dictionaries/tuples is accepted positionally.")
+ v = args[0]
+ else:
+ v = {}
+
+ if self.parameters is None:
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(v)
+ else:
+ if self._has_multi_parameters:
+ self.parameters = list(self.parameters)
+ p, self._has_multi_parameters = self._process_colparams(v)
+ if not self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't mix single-values and multiple values "
+ "formats in one statement")
+
+ self.parameters.extend(p)
+ else:
+ self.parameters = self.parameters.copy()
+ p, self._has_multi_parameters = self._process_colparams(v)
+ if self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't mix single-values and multiple values "
+ "formats in one statement")
+ self.parameters.update(p)
+
+ if kwargs:
+ if self._has_multi_parameters:
+ raise exc.ArgumentError(
+ "Can't pass kwargs and multiple parameter sets "
+ "simultaenously")
+ else:
+ self.parameters.update(kwargs)
+
+ @_generative
+ def return_defaults(self, *cols):
+ """Make use of a :term:`RETURNING` clause for the purpose
+ of fetching server-side expressions and defaults.
+
+ E.g.::
+
+ stmt = table.insert().values(data='newdata').return_defaults()
+
+ result = connection.execute(stmt)
+
+ server_created_at = result.returned_defaults['created_at']
+
+ When used against a backend that supports RETURNING, all column
+ values generated by SQL expression or server-side-default will be added
+ to any existing RETURNING clause, provided that
+ :meth:`.UpdateBase.returning` is not used simultaneously. The column values
+ will then be available on the result using the
+ :attr:`.ResultProxy.returned_defaults` accessor as a
+ dictionary, referring to values keyed to the :class:`.Column` object
+ as well as its ``.key``.
+
+ This method differs from :meth:`.UpdateBase.returning` in these ways:
+
+ 1. :meth:`.ValuesBase.return_defaults` is only intended for use with
+ an INSERT or an UPDATE statement that matches exactly one row.
+ While the RETURNING construct in the general sense supports multiple
+ rows for a multi-row UPDATE or DELETE statement, or for special
+ cases of INSERT that return multiple rows (e.g. INSERT from SELECT,
+ multi-valued VALUES clause), :meth:`.ValuesBase.return_defaults`
+ is intended only
+ for an "ORM-style" single-row INSERT/UPDATE statement. The row
+ returned by the statement is also consumed implcitly when
+ :meth:`.ValuesBase.return_defaults` is used. By contrast,
+ :meth:`.UpdateBase.returning` leaves the RETURNING result-set intact
+ with a collection of any number of rows.
+
+ 2. It is compatible with the existing logic to fetch auto-generated
+ primary key values, also known as "implicit returning". Backends that
+ support RETURNING will automatically make use of RETURNING in order
+ to fetch the value of newly generated primary keys; while the
+ :meth:`.UpdateBase.returning` method circumvents this behavior,
+ :meth:`.ValuesBase.return_defaults` leaves it intact.
+
+ 3. It can be called against any backend. Backends that don't support
+ RETURNING will skip the usage of the feature, rather than raising
+ an exception. The return value of :attr:`.ResultProxy.returned_defaults`
+ will be ``None``
+
+ :meth:`.ValuesBase.return_defaults` is used by the ORM to provide
+ an efficient implementation for the ``eager_defaults`` feature of
+ :func:`.mapper`.
+
+ :param cols: optional list of column key names or :class:`.Column`
+ objects. If omitted, all column expressions evaulated on the server
+ are added to the returning list.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :meth:`.UpdateBase.returning`
+
+ :attr:`.ResultProxy.returned_defaults`
+
+ """
+ self._return_defaults = cols or True
+
+
+class Insert(ValuesBase):
+ """Represent an INSERT construct.
+
+ The :class:`.Insert` object is created using the
+ :func:`~.expression.insert()` function.
+
+ .. seealso::
+
+ :ref:`coretutorial_insert_expressions`
+
+ """
+ __visit_name__ = 'insert'
+
+ _supports_multi_parameters = True
+
+ def __init__(self,
+ table,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
+ """Construct an :class:`.Insert` object.
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.insert` method on
+ :class:`~.schema.Table`.
+
+ :param table: :class:`.TableClause` which is the subject of the insert.
+
+ :param values: collection of values to be inserted; see
+ :meth:`.Insert.values` for a description of allowed formats here.
+ Can be omitted entirely; a :class:`.Insert` construct will also
+ dynamically render the VALUES clause at execution time based on
+ the parameters passed to :meth:`.Connection.execute`.
+
+ :param inline: if True, SQL defaults will be compiled 'inline' into the
+ statement and not pre-executed.
+
+ If both `values` and compile-time bind parameters are present, the
+ compile-time bind parameters override the information specified
+ within `values` on a per-key basis.
+
+ The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
+ objects or their string identifiers. Each key may reference one of:
+
+ * a literal data value (i.e. string, number, etc.);
+ * a Column object;
+ * a SELECT statement.
+
+ If a ``SELECT`` statement is specified which references this
+ ``INSERT`` statement's table, the statement will be correlated
+ against the ``INSERT`` statement.
+
+ .. seealso::
+
+ :ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
+
+ :ref:`inserts_and_updates` - SQL Expression Tutorial
+
+ """
+ ValuesBase.__init__(self, table, values, prefixes)
+ self._bind = bind
+ self.select = self.select_names = None
+ self.inline = inline
+ self._returning = returning
+ self._validate_dialect_kwargs(dialect_kw)
+ self._return_defaults = return_defaults
+
+ def get_children(self, **kwargs):
+ if self.select is not None:
+ return self.select,
+ else:
+ return ()
+
+ @_generative
+ def from_select(self, names, select):
+ """Return a new :class:`.Insert` construct which represents
+ an ``INSERT...FROM SELECT`` statement.
+
+ e.g.::
+
+ sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
+ ins = table2.insert().from_select(['a', 'b'], sel)
+
+ :param names: a sequence of string column names or :class:`.Column`
+ objects representing the target columns.
+ :param select: a :func:`.select` construct, :class:`.FromClause`
+ or other construct which resolves into a :class:`.FromClause`,
+ such as an ORM :class:`.Query` object, etc. The order of
+ columns returned from this FROM clause should correspond to the
+ order of columns sent as the ``names`` parameter; while this
+ is not checked before passing along to the database, the database
+ would normally raise an exception if these column lists don't
+ correspond.
+
+ .. note::
+
+ Depending on backend, it may be necessary for the :class:`.Insert`
+ statement to be constructed using the ``inline=True`` flag; this
+ flag will prevent the implicit usage of ``RETURNING`` when the
+ ``INSERT`` statement is rendered, which isn't supported on a backend
+ such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
+
+ sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
+ ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
+
+ .. note::
+
+ A SELECT..INSERT construct in SQL has no VALUES clause. Therefore
+ :class:`.Column` objects which utilize Python-side defaults
+ (e.g. as described at :ref:`metadata_defaults_toplevel`)
+ will **not** take effect when using :meth:`.Insert.from_select`.
+
+ .. versionadded:: 0.8.3
+
+ """
+ if self.parameters:
+ raise exc.InvalidRequestError(
+ "This construct already inserts value expressions")
+
+ self.parameters, self._has_multi_parameters = \
+ self._process_colparams(dict((n, Null()) for n in names))
+
+ self.select_names = names
+ self.select = _interpret_as_select(select)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self.parameters = self.parameters.copy()
+ if self.select is not None:
+ self.select = _clone(self.select)
+
+
+class Update(ValuesBase):
+ """Represent an Update construct.
+
+ The :class:`.Update` object is created using the :func:`update()` function.
+
+ """
+ __visit_name__ = 'update'
+
+ def __init__(self,
+ table,
+ whereclause=None,
+ values=None,
+ inline=False,
+ bind=None,
+ prefixes=None,
+ returning=None,
+ return_defaults=False,
+ **dialect_kw):
+ """Construct an :class:`.Update` object.
+
+ E.g.::
+
+ from sqlalchemy import update
+
+ stmt = update(users).where(users.c.id==5).\\
+ values(name='user #5')
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.update` method on
+ :class:`.Table`::
+
+ stmt = users.update().\\
+ where(users.c.id==5).\\
+ values(name='user #5')
+
+ :param table: A :class:`.Table` object representing the database
+ table to be updated.
+
+ :param whereclause: Optional SQL expression describing the ``WHERE``
+ condition of the ``UPDATE`` statement. Modern applications
+ may prefer to use the generative :meth:`~Update.where()`
+ method to specify the ``WHERE`` clause.
+
+ The WHERE clause can refer to multiple tables.
+ For databases which support this, an ``UPDATE FROM`` clause will
+ be generated, or on MySQL, a multi-table update. The statement
+ will fail on databases that don't have support for multi-table
+ update statements. A SQL-standard method of referring to
+ additional tables in the WHERE clause is to use a correlated
+ subquery::
+
+ users.update().values(name='ed').where(
+ users.c.name==select([addresses.c.email_address]).\\
+ where(addresses.c.user_id==users.c.id).\\
+ as_scalar()
+ )
+
+ .. versionchanged:: 0.7.4
+ The WHERE clause can refer to multiple tables.
+
+ :param values:
+ Optional dictionary which specifies the ``SET`` conditions of the
+ ``UPDATE``. If left as ``None``, the ``SET``
+ conditions are determined from those parameters passed to the
+ statement during the execution and/or compilation of the
+ statement. When compiled standalone without any parameters,
+ the ``SET`` clause generates for all columns.
+
+ Modern applications may prefer to use the generative
+ :meth:`.Update.values` method to set the values of the
+ UPDATE statement.
+
+ :param inline:
+ if True, SQL defaults present on :class:`.Column` objects via
+ the ``default`` keyword will be compiled 'inline' into the statement
+ and not pre-executed. This means that their values will not
+ be available in the dictionary returned from
+ :meth:`.ResultProxy.last_updated_params`.
+
+ If both ``values`` and compile-time bind parameters are present, the
+ compile-time bind parameters override the information specified
+ within ``values`` on a per-key basis.
+
+ The keys within ``values`` can be either :class:`.Column`
+ objects or their string identifiers (specifically the "key" of the
+ :class:`.Column`, normally but not necessarily equivalent to
+ its "name"). Normally, the
+ :class:`.Column` objects used here are expected to be
+ part of the target :class:`.Table` that is the table
+ to be updated. However when using MySQL, a multiple-table
+ UPDATE statement can refer to columns from any of
+ the tables referred to in the WHERE clause.
+
+ The values referred to in ``values`` are typically:
+
+ * a literal data value (i.e. string, number, etc.)
+ * a SQL expression, such as a related :class:`.Column`,
+ a scalar-returning :func:`.select` construct,
+ etc.
+
+ When combining :func:`.select` constructs within the values
+ clause of an :func:`.update` construct,
+ the subquery represented by the :func:`.select` should be
+ *correlated* to the parent table, that is, providing criterion
+ which links the table inside the subquery to the outer table
+ being updated::
+
+ users.update().values(
+ name=select([addresses.c.email_address]).\\
+ where(addresses.c.user_id==users.c.id).\\
+ as_scalar()
+ )
+
+ .. seealso::
+
+ :ref:`inserts_and_updates` - SQL Expression
+ Language Tutorial
+
+
+ """
+ ValuesBase.__init__(self, table, values, prefixes)
+ self._bind = bind
+ self._returning = returning
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+ self.inline = inline
+ self._validate_dialect_kwargs(dialect_kw)
+ self._return_defaults = return_defaults
+
+
+ def get_children(self, **kwargs):
+ if self._whereclause is not None:
+ return self._whereclause,
+ else:
+ return ()
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self._whereclause = clone(self._whereclause, **kw)
+ self.parameters = self.parameters.copy()
+
+ @_generative
+ def where(self, whereclause):
+ """return a new update() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+ if self._whereclause is not None:
+ self._whereclause = and_(self._whereclause,
+ _literal_as_text(whereclause))
+ else:
+ self._whereclause = _literal_as_text(whereclause)
+
+ @property
+ def _extra_froms(self):
+ # TODO: this could be made memoized
+ # if the memoization is reset on each generative call.
+ froms = []
+ seen = set([self.table])
+
+ if self._whereclause is not None:
+ for item in _from_objects(self._whereclause):
+ if not seen.intersection(item._cloned_set):
+ froms.append(item)
+ seen.update(item._cloned_set)
+
+ return froms
+
+
+class Delete(UpdateBase):
+ """Represent a DELETE construct.
+
+ The :class:`.Delete` object is created using the :func:`delete()` function.
+
+ """
+
+ __visit_name__ = 'delete'
+
+ def __init__(self,
+ table,
+ whereclause=None,
+ bind=None,
+ returning=None,
+ prefixes=None,
+ **dialect_kw):
+ """Construct :class:`.Delete` object.
+
+ Similar functionality is available via the
+ :meth:`~.TableClause.delete` method on
+ :class:`~.schema.Table`.
+
+ :param table: The table to be updated.
+
+ :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
+ condition of the ``UPDATE`` statement. Note that the
+ :meth:`~Delete.where()` generative method may be used instead.
+
+ .. seealso::
+
+ :ref:`deletes` - SQL Expression Tutorial
+
+ """
+ self._bind = bind
+ self.table = _interpret_as_from(table)
+ self._returning = returning
+
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+
+ self._validate_dialect_kwargs(dialect_kw)
+
+ def get_children(self, **kwargs):
+ if self._whereclause is not None:
+ return self._whereclause,
+ else:
+ return ()
+
+ @_generative
+ def where(self, whereclause):
+ """Add the given WHERE clause to a newly returned delete construct."""
+
+ if self._whereclause is not None:
+ self._whereclause = and_(self._whereclause,
+ _literal_as_text(whereclause))
+ else:
+ self._whereclause = _literal_as_text(whereclause)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # TODO: coverage
+ self._whereclause = clone(self._whereclause, **kw)
+
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
new file mode 100644
index 000000000..0e888fcf7
--- /dev/null
+++ b/lib/sqlalchemy/sql/elements.py
@@ -0,0 +1,2880 @@
+# sql/elements.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Core SQL expression elements, including :class:`.ClauseElement`,
+:class:`.ColumnElement`, and derived classes.
+
+"""
+
+from __future__ import unicode_literals
+
+from .. import util, exc, inspection
+from . import type_api
+from . import operators
+from .visitors import Visitable, cloned_traverse, traverse
+from .annotation import Annotated
+import itertools
+from .base import Executable, PARSE_AUTOCOMMIT, Immutable, NO_ARG
+from .base import _generative, Generative
+
+import re
+import operator
+
+def _clone(element, **kw):
+ return element._clone()
+
+def collate(expression, collation):
+ """Return the clause ``expression COLLATE collation``.
+
+ e.g.::
+
+ collate(mycolumn, 'utf8_bin')
+
+ produces::
+
+ mycolumn COLLATE utf8_bin
+
+ """
+
+ expr = _literal_as_binds(expression)
+ return BinaryExpression(
+ expr,
+ _literal_as_text(collation),
+ operators.collate, type_=expr.type)
+
+def between(ctest, cleft, cright):
+ """Return a ``BETWEEN`` predicate clause.
+
+ Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
+
+ The :func:`between()` method on all
+ :class:`.ColumnElement` subclasses provides
+ similar functionality.
+
+ """
+ ctest = _literal_as_binds(ctest)
+ return ctest.between(cleft, cright)
+
+def literal(value, type_=None):
+ """Return a literal clause, bound to a bind parameter.
+
+ Literal clauses are created automatically when non- :class:`.ClauseElement`
+ objects (such as strings, ints, dates, etc.) are used in a comparison
+ operation with a :class:`.ColumnElement`
+ subclass, such as a :class:`~sqlalchemy.schema.Column` object.
+ Use this function to force the
+ generation of a literal clause, which will be created as a
+ :class:`BindParameter` with a bound value.
+
+ :param value: the value to be bound. Can be any Python object supported by
+ the underlying DB-API, or is translatable via the given type argument.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
+ will provide bind-parameter translation for this literal.
+
+ """
+ return BindParameter(None, value, type_=type_, unique=True)
+
+
+
+def type_coerce(expression, type_):
+ """Coerce the given expression into the given type,
+ on the Python side only.
+
+ :func:`.type_coerce` is roughly similar to :func:`.cast`, except no
+ "CAST" expression is rendered - the given type is only applied towards
+ expression typing and against received result values.
+
+ e.g.::
+
+ from sqlalchemy.types import TypeDecorator
+ import uuid
+
+ class AsGuid(TypeDecorator):
+ impl = String
+
+ def process_bind_param(self, value, dialect):
+ if value is not None:
+ return str(value)
+ else:
+ return None
+
+ def process_result_value(self, value, dialect):
+ if value is not None:
+ return uuid.UUID(value)
+ else:
+ return None
+
+ conn.execute(
+ select([type_coerce(mytable.c.ident, AsGuid)]).\\
+ where(
+ type_coerce(mytable.c.ident, AsGuid) ==
+ uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
+ )
+ )
+
+ :param expression: Column-oriented expression.
+ :param type_: A :class:`.TypeEngine` class or instance indicating
+ the type to which the CAST should apply.
+
+ .. seealso::
+
+ :func:`.cast`
+
+ """
+ type_ = type_api.to_instance(type_)
+
+ if hasattr(expression, '__clause_element__'):
+ return type_coerce(expression.__clause_element__(), type_)
+ elif isinstance(expression, BindParameter):
+ bp = expression._clone()
+ bp.type = type_
+ return bp
+ elif not isinstance(expression, Visitable):
+ if expression is None:
+ return Null()
+ else:
+ return literal(expression, type_=type_)
+ else:
+ return Label(None, expression, type_=type_)
+
+
+
+
+
+def outparam(key, type_=None):
+ """Create an 'OUT' parameter for usage in functions (stored procedures),
+ for databases which support them.
+
+ The ``outparam`` can be used like a regular function parameter.
+ The "output" value will be available from the
+ :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
+ attribute, which returns a dictionary containing the values.
+
+ """
+ return BindParameter(
+ key, None, type_=type_, unique=False, isoutparam=True)
+
+
+
+
+def not_(clause):
+ """Return a negation of the given clause, i.e. ``NOT(clause)``.
+
+ The ``~`` operator is also overloaded on all
+ :class:`.ColumnElement` subclasses to produce the
+ same result.
+
+ """
+ return operators.inv(_literal_as_binds(clause))
+
+
+
+@inspection._self_inspects
+class ClauseElement(Visitable):
+ """Base class for elements of a programmatically constructed SQL
+ expression.
+
+ """
+ __visit_name__ = 'clause'
+
+ _annotations = {}
+ supports_execution = False
+ _from_objects = []
+ bind = None
+ _is_clone_of = None
+ is_selectable = False
+ is_clause_element = True
+
+ _order_by_label_element = None
+
+ def _clone(self):
+ """Create a shallow copy of this ClauseElement.
+
+ This method may be used by a generative API. Its also used as
+ part of the "deep" copy afforded by a traversal that combines
+ the _copy_internals() method.
+
+ """
+ c = self.__class__.__new__(self.__class__)
+ c.__dict__ = self.__dict__.copy()
+ ClauseElement._cloned_set._reset(c)
+ ColumnElement.comparator._reset(c)
+
+ # this is a marker that helps to "equate" clauses to each other
+ # when a Select returns its list of FROM clauses. the cloning
+ # process leaves around a lot of remnants of the previous clause
+ # typically in the form of column expressions still attached to the
+ # old table.
+ c._is_clone_of = self
+
+ return c
+
+ @property
+ def _constructor(self):
+ """return the 'constructor' for this ClauseElement.
+
+ This is for the purposes for creating a new object of
+ this type. Usually, its just the element's __class__.
+ However, the "Annotated" version of the object overrides
+ to return the class of its proxied element.
+
+ """
+ return self.__class__
+
+ @util.memoized_property
+ def _cloned_set(self):
+ """Return the set consisting all cloned ancestors of this
+ ClauseElement.
+
+ Includes this ClauseElement. This accessor tends to be used for
+ FromClause objects to identify 'equivalent' FROM clauses, regardless
+ of transformative operations.
+
+ """
+ s = util.column_set()
+ f = self
+ while f is not None:
+ s.add(f)
+ f = f._is_clone_of
+ return s
+
+ def __getstate__(self):
+ d = self.__dict__.copy()
+ d.pop('_is_clone_of', None)
+ return d
+
+ def _annotate(self, values):
+ """return a copy of this ClauseElement with annotations
+ updated by the given dictionary.
+
+ """
+ return Annotated(self, values)
+
+ def _with_annotations(self, values):
+ """return a copy of this ClauseElement with annotations
+ replaced by the given dictionary.
+
+ """
+ return Annotated(self, values)
+
+ def _deannotate(self, values=None, clone=False):
+ """return a copy of this :class:`.ClauseElement` with annotations
+ removed.
+
+ :param values: optional tuple of individual values
+ to remove.
+
+ """
+ if clone:
+ # clone is used when we are also copying
+ # the expression for a deep deannotation
+ return self._clone()
+ else:
+ # if no clone, since we have no annotations we return
+ # self
+ return self
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_clauseelement(self, multiparams, params)
+
+ def unique_params(self, *optionaldict, **kwargs):
+ """Return a copy with :func:`bindparam()` elements replaced.
+
+ Same functionality as ``params()``, except adds `unique=True`
+ to affected bind parameters so that multiple statements can be
+ used.
+
+ """
+ return self._params(True, optionaldict, kwargs)
+
+ def params(self, *optionaldict, **kwargs):
+ """Return a copy with :func:`bindparam()` elements replaced.
+
+ Returns a copy of this ClauseElement with :func:`bindparam()`
+ elements replaced with values taken from the given dictionary::
+
+ >>> clause = column('x') + bindparam('foo')
+ >>> print clause.compile().params
+ {'foo':None}
+ >>> print clause.params({'foo':7}).compile().params
+ {'foo':7}
+
+ """
+ return self._params(False, optionaldict, kwargs)
+
+ def _params(self, unique, optionaldict, kwargs):
+ if len(optionaldict) == 1:
+ kwargs.update(optionaldict[0])
+ elif len(optionaldict) > 1:
+ raise exc.ArgumentError(
+ "params() takes zero or one positional dictionary argument")
+
+ def visit_bindparam(bind):
+ if bind.key in kwargs:
+ bind.value = kwargs[bind.key]
+ bind.required = False
+ if unique:
+ bind._convert_to_unique()
+ return cloned_traverse(self, {}, {'bindparam': visit_bindparam})
+
+ def compare(self, other, **kw):
+ """Compare this ClauseElement to the given ClauseElement.
+
+ Subclasses should override the default behavior, which is a
+ straight identity comparison.
+
+ \**kw are arguments consumed by subclass compare() methods and
+ may be used to modify the criteria for comparison.
+ (see :class:`.ColumnElement`)
+
+ """
+ return self is other
+
+ def _copy_internals(self, clone=_clone, **kw):
+ """Reassign internal elements to be clones of themselves.
+
+ Called during a copy-and-traverse operation on newly
+ shallow-copied elements to create a deep copy.
+
+ The given clone function should be used, which may be applying
+ additional transformations to the element (i.e. replacement
+ traversal, cloned traversal, annotations).
+
+ """
+ pass
+
+ def get_children(self, **kwargs):
+ """Return immediate child elements of this :class:`.ClauseElement`.
+
+ This is used for visit traversal.
+
+ \**kwargs may contain flags that change the collection that is
+ returned, for example to return a subset of items in order to
+ cut down on larger traversals, or to return child items from a
+ different context (such as schema-level collections instead of
+ clause-level).
+
+ """
+ return []
+
+ def self_group(self, against=None):
+ """Apply a 'grouping' to this :class:`.ClauseElement`.
+
+ This method is overridden by subclasses to return a
+ "grouping" construct, i.e. parenthesis. In particular
+ it's used by "binary" expressions to provide a grouping
+ around themselves when placed into a larger expression,
+ as well as by :func:`.select` constructs when placed into
+ the FROM clause of another :func:`.select`. (Note that
+ subqueries should be normally created using the
+ :meth:`.Select.alias` method, as many platforms require
+ nested SELECT statements to be named).
+
+ As expressions are composed together, the application of
+ :meth:`self_group` is automatic - end-user code should never
+ need to use this method directly. Note that SQLAlchemy's
+ clause constructs take operator precedence into account -
+ so parenthesis might not be needed, for example, in
+ an expression like ``x OR (y AND z)`` - AND takes precedence
+ over OR.
+
+ The base :meth:`self_group` method of :class:`.ClauseElement`
+ just returns self.
+ """
+ return self
+
+ @util.dependencies("sqlalchemy.engine.default")
+ def compile(self, default, bind=None, dialect=None, **kw):
+ """Compile this SQL expression.
+
+ The return value is a :class:`~.Compiled` object.
+ Calling ``str()`` or ``unicode()`` on the returned value will yield a
+ string representation of the result. The
+ :class:`~.Compiled` object also can return a
+ dictionary of bind parameter names and values
+ using the ``params`` accessor.
+
+ :param bind: An ``Engine`` or ``Connection`` from which a
+ ``Compiled`` will be acquired. This argument takes precedence over
+ this :class:`.ClauseElement`'s bound engine, if any.
+
+ :param column_keys: Used for INSERT and UPDATE statements, a list of
+ column names which should be present in the VALUES clause of the
+ compiled statement. If ``None``, all columns from the target table
+ object are rendered.
+
+ :param dialect: A ``Dialect`` instance from which a ``Compiled``
+ will be acquired. This argument takes precedence over the `bind`
+ argument as well as this :class:`.ClauseElement`'s bound engine, if
+ any.
+
+ :param inline: Used for INSERT statements, for a dialect which does
+ not support inline retrieval of newly generated primary key
+ columns, will force the expression used to create the new primary
+ key value to be rendered inline within the INSERT statement's
+ VALUES clause. This typically refers to Sequence execution but may
+ also refer to any server-side default generation function
+ associated with a primary key `Column`.
+
+ """
+
+ if not dialect:
+ if bind:
+ dialect = bind.dialect
+ elif self.bind:
+ dialect = self.bind.dialect
+ bind = self.bind
+ else:
+ dialect = default.DefaultDialect()
+ return self._compiler(dialect, bind=bind, **kw)
+
+ def _compiler(self, dialect, **kw):
+ """Return a compiler appropriate for this ClauseElement, given a
+ Dialect."""
+
+ return dialect.statement_compiler(dialect, self, **kw)
+
+ def __str__(self):
+ if util.py3k:
+ return str(self.compile())
+ else:
+ return unicode(self.compile()).encode('ascii', 'backslashreplace')
+
+ def __and__(self, other):
+ return and_(self, other)
+
+ def __or__(self, other):
+ return or_(self, other)
+
+ def __invert__(self):
+ if hasattr(self, 'negation_clause'):
+ return self.negation_clause
+ else:
+ return self._negate()
+
+ def __bool__(self):
+ raise TypeError("Boolean value of this clause is not defined")
+
+ __nonzero__ = __bool__
+
+ def _negate(self):
+ return UnaryExpression(
+ self.self_group(against=operators.inv),
+ operator=operators.inv,
+ negate=None)
+
+ def __repr__(self):
+ friendly = getattr(self, 'description', None)
+ if friendly is None:
+ return object.__repr__(self)
+ else:
+ return '<%s.%s at 0x%x; %s>' % (
+ self.__module__, self.__class__.__name__, id(self), friendly)
+
+
+
+class ColumnElement(ClauseElement, operators.ColumnOperators):
+ """Represent a column-oriented SQL expression suitable for usage in the
+ "columns" clause, WHERE clause etc. of a statement.
+
+ While the most familiar kind of :class:`.ColumnElement` is the
+ :class:`.Column` object, :class:`.ColumnElement` serves as the basis
+ for any unit that may be present in a SQL expression, including
+ the expressions themselves, SQL functions, bound parameters,
+ literal expressions, keywords such as ``NULL``, etc.
+ :class:`.ColumnElement` is the ultimate base class for all such elements.
+
+ A :class:`.ColumnElement` provides the ability to generate new
+ :class:`.ColumnElement`
+ objects using Python expressions. This means that Python operators
+ such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
+ and allow the instantiation of further :class:`.ColumnElement` instances
+ which are composed from other, more fundamental :class:`.ColumnElement`
+ objects. For example, two :class:`.ColumnClause` objects can be added
+ together with the addition operator ``+`` to produce
+ a :class:`.BinaryExpression`.
+ Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
+ of :class:`.ColumnElement`::
+
+ >>> from sqlalchemy.sql import column
+ >>> column('a') + column('b')
+ <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+ >>> print column('a') + column('b')
+ a + b
+
+ :class:`.ColumnElement` supports the ability to be a *proxy* element,
+ which indicates that the :class:`.ColumnElement` may be associated with
+ a :class:`.Selectable` which was derived from another :class:`.Selectable`.
+ An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
+ :class:`~sqlalchemy.schema.Table`. For the ambitious, an in-depth
+ discussion of this concept can be found at
+ `Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_.
+
+ """
+
+ __visit_name__ = 'column'
+ primary_key = False
+ foreign_keys = []
+ _label = None
+ _key_label = None
+ _alt_names = ()
+
+ def self_group(self, against=None):
+ if against in (operators.and_, operators.or_, operators._asbool) and \
+ self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return AsBoolean(self, operators.istrue, operators.isfalse)
+ else:
+ return self
+
+ def _negate(self):
+ if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
+ return AsBoolean(self, operators.isfalse, operators.istrue)
+ else:
+ return super(ColumnElement, self)._negate()
+
+ @util.memoized_property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @util.memoized_property
+ def comparator(self):
+ return self.type.comparator_factory(self)
+
+ def __getattr__(self, key):
+ try:
+ return getattr(self.comparator, key)
+ except AttributeError:
+ raise AttributeError(
+ 'Neither %r object nor %r object has an attribute %r' % (
+ type(self).__name__,
+ type(self.comparator).__name__,
+ key)
+ )
+
+ def operate(self, op, *other, **kwargs):
+ return op(self.comparator, *other, **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ return op(other, self.comparator, **kwargs)
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(None, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+
+ @property
+ def expression(self):
+ """Return a column expression.
+
+ Part of the inspection interface; returns self.
+
+ """
+ return self
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ @util.memoized_property
+ def base_columns(self):
+ return util.column_set(c for c in self.proxy_set
+ if not hasattr(c, '_proxies'))
+
+ @util.memoized_property
+ def proxy_set(self):
+ s = util.column_set([self])
+ if hasattr(self, '_proxies'):
+ for c in self._proxies:
+ s.update(c.proxy_set)
+ return s
+
+ def shares_lineage(self, othercolumn):
+ """Return True if the given :class:`.ColumnElement`
+ has a common ancestor to this :class:`.ColumnElement`."""
+
+ return bool(self.proxy_set.intersection(othercolumn.proxy_set))
+
+ def _compare_name_for_result(self, other):
+ """Return True if the given column element compares to this one
+ when targeting within a result row."""
+
+ return hasattr(other, 'name') and hasattr(self, 'name') and \
+ other.name == self.name
+
+ def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
+ """Create a new :class:`.ColumnElement` representing this
+ :class:`.ColumnElement` as it appears in the select list of a
+ descending selectable.
+
+ """
+ if name is None:
+ name = self.anon_label
+ try:
+ key = str(self)
+ except exc.UnsupportedCompilationError:
+ key = self.anon_label
+ else:
+ key = name
+ co = ColumnClause(
+ _as_truncated(name) if name_is_truncatable else name,
+ type_=getattr(self, 'type', None),
+ _selectable=selectable
+ )
+ co._proxies = [self]
+ if selectable._is_clone_of is not None:
+ co._is_clone_of = \
+ selectable._is_clone_of.columns.get(key)
+ selectable._columns[key] = co
+ return co
+
+ def compare(self, other, use_proxies=False, equivalents=None, **kw):
+ """Compare this ColumnElement to another.
+
+ Special arguments understood:
+
+ :param use_proxies: when True, consider two columns that
+ share a common base column as equivalent (i.e. shares_lineage())
+
+ :param equivalents: a dictionary of columns as keys mapped to sets
+ of columns. If the given "other" column is present in this
+ dictionary, if any of the columns in the corresponding set() pass the
+ comparison test, the result is True. This is used to expand the
+ comparison to other columns that may be known to be equivalent to
+ this one via foreign key or other criterion.
+
+ """
+ to_compare = (other, )
+ if equivalents and other in equivalents:
+ to_compare = equivalents[other].union(to_compare)
+
+ for oth in to_compare:
+ if use_proxies and self.shares_lineage(oth):
+ return True
+ elif hash(oth) == hash(self):
+ return True
+ else:
+ return False
+
+ def label(self, name):
+ """Produce a column label, i.e. ``<columnname> AS <name>``.
+
+ This is a shortcut to the :func:`~.expression.label` function.
+
+ if 'name' is None, an anonymous label name will be generated.
+
+ """
+ return Label(name, self, self.type)
+
+ @util.memoized_property
+ def anon_label(self):
+ """provides a constant 'anonymous label' for this ColumnElement.
+
+ This is a label() expression which will be named at compile time.
+ The same label() is returned each time anon_label is called so
+ that expressions can reference anon_label multiple times, producing
+ the same label name at compile time.
+
+ the compiler uses this function automatically at compile time
+ for expressions that are known to be 'unnamed' like binary
+ expressions and function calls.
+
+ """
+ return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
+ 'name', 'anon')))
+
+
+
+class BindParameter(ColumnElement):
+ """Represent a bound parameter value.
+
+ """
+
+ __visit_name__ = 'bindparam'
+
+ _is_crud = False
+
+ def __init__(self, key, value=NO_ARG, type_=None,
+ unique=False, required=NO_ARG,
+ quote=None, callable_=None,
+ isoutparam=False,
+ _compared_to_operator=None,
+ _compared_to_type=None):
+ """Construct a new :class:`.BindParameter`.
+
+ :param key:
+ the key for this bind param. Will be used in the generated
+ SQL statement for dialects that use named parameters. This
+ value may be modified when part of a compilation operation,
+ if other :class:`BindParameter` objects exist with the same
+ key, or if its length is too long and truncation is
+ required.
+
+ :param value:
+ Initial value for this bind param. This value may be
+ overridden by the dictionary of parameters sent to statement
+ compilation/execution.
+
+ Defaults to ``None``, however if neither ``value`` nor
+ ``callable`` are passed explicitly, the ``required`` flag will be
+ set to ``True`` which has the effect of requiring a value be present
+ when the statement is actually executed.
+
+ .. versionchanged:: 0.8 The ``required`` flag is set to ``True``
+ automatically if ``value`` or ``callable`` is not passed.
+
+ :param callable\_:
+ A callable function that takes the place of "value". The function
+ will be called at statement execution time to determine the
+ ultimate value. Used for scenarios where the actual bind
+ value cannot be determined at the point at which the clause
+ construct is created, but embedded bind values are still desirable.
+
+ :param type\_:
+ A ``TypeEngine`` object that will be used to pre-process the
+ value corresponding to this :class:`BindParameter` at
+ execution time.
+
+ :param unique:
+ if True, the key name of this BindParamClause will be
+ modified if another :class:`BindParameter` of the same name
+ already has been located within the containing
+ :class:`.ClauseElement`.
+
+ :param required:
+ If ``True``, a value is required at execution time. If not passed,
+ is set to ``True`` or ``False`` based on whether or not
+ one of ``value`` or ``callable`` were passed..
+
+ .. versionchanged:: 0.8 If the ``required`` flag is not specified,
+ it will be set automatically to ``True`` or ``False`` depending
+ on whether or not the ``value`` or ``callable`` parameters
+ were specified.
+
+ :param quote:
+ True if this parameter name requires quoting and is not
+ currently known as a SQLAlchemy reserved word; this currently
+ only applies to the Oracle backend.
+
+ :param isoutparam:
+ if True, the parameter should be treated like a stored procedure
+ "OUT" parameter.
+
+ .. seealso::
+
+ :func:`.outparam`
+
+
+
+ """
+ if isinstance(key, ColumnClause):
+ type_ = key.type
+ key = key.name
+ if required is NO_ARG:
+ required = (value is NO_ARG and callable_ is None)
+ if value is NO_ARG:
+ value = None
+
+ if quote is not None:
+ key = quoted_name(key, quote)
+
+ if unique:
+ self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
+ or 'param'))
+ else:
+ self.key = key or _anonymous_label('%%(%d param)s'
+ % id(self))
+
+ # identifying key that won't change across
+ # clones, used to identify the bind's logical
+ # identity
+ self._identifying_key = self.key
+
+ # key that was passed in the first place, used to
+ # generate new keys
+ self._orig_key = key or 'param'
+
+ self.unique = unique
+ self.value = value
+ self.callable = callable_
+ self.isoutparam = isoutparam
+ self.required = required
+ if type_ is None:
+ if _compared_to_type is not None:
+ self.type = \
+ _compared_to_type.coerce_compared_value(
+ _compared_to_operator, value)
+ else:
+ self.type = type_api._type_map.get(type(value),
+ type_api.NULLTYPE)
+ elif isinstance(type_, type):
+ self.type = type_()
+ else:
+ self.type = type_
+
+ def _with_value(self, value):
+ """Return a copy of this :class:`.BindParameter` with the given value set."""
+ cloned = self._clone()
+ cloned.value = value
+ cloned.callable = None
+ cloned.required = False
+ if cloned.type is type_api.NULLTYPE:
+ cloned.type = type_api._type_map.get(type(value),
+ type_api.NULLTYPE)
+ return cloned
+
+ @property
+ def effective_value(self):
+ """Return the value of this bound parameter,
+ taking into account if the ``callable`` parameter
+ was set.
+
+ The ``callable`` value will be evaluated
+ and returned if present, else ``value``.
+
+ """
+ if self.callable:
+ return self.callable()
+ else:
+ return self.value
+
+ def _clone(self):
+ c = ClauseElement._clone(self)
+ if self.unique:
+ c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
+ or 'param'))
+ return c
+
+ def _convert_to_unique(self):
+ if not self.unique:
+ self.unique = True
+ self.key = _anonymous_label('%%(%d %s)s' % (id(self),
+ self._orig_key or 'param'))
+
+ def compare(self, other, **kw):
+ """Compare this :class:`BindParameter` to the given
+ clause."""
+
+ return isinstance(other, BindParameter) \
+ and self.type._compare_type_affinity(other.type) \
+ and self.value == other.value
+
+ def __getstate__(self):
+ """execute a deferred value for serialization purposes."""
+
+ d = self.__dict__.copy()
+ v = self.value
+ if self.callable:
+ v = self.callable()
+ d['callable'] = None
+ d['value'] = v
+ return d
+
+ def __repr__(self):
+ return 'BindParameter(%r, %r, type_=%r)' % (self.key,
+ self.value, self.type)
+
+
+class TypeClause(ClauseElement):
+ """Handle a type keyword in a SQL statement.
+
+ Used by the ``Case`` statement.
+
+ """
+
+ __visit_name__ = 'typeclause'
+
+ def __init__(self, type):
+ self.type = type
+
+
+class TextClause(Executable, ClauseElement):
+ """Represent a literal SQL text fragment.
+
+ Public constructor is the :func:`text()` function.
+
+ """
+
+ __visit_name__ = 'textclause'
+
+ _bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
+ _execution_options = \
+ Executable._execution_options.union(
+ {'autocommit': PARSE_AUTOCOMMIT})
+
+ @property
+ def _select_iterable(self):
+ return (self,)
+
+ @property
+ def selectable(self):
+ return self
+
+ _hide_froms = []
+
+ def __init__(
+ self,
+ text,
+ bind=None):
+ self._bind = bind
+ self._bindparams = {}
+
+ def repl(m):
+ self._bindparams[m.group(1)] = BindParameter(m.group(1))
+ return ':%s' % m.group(1)
+
+ # scan the string and search for bind parameter names, add them
+ # to the list of bindparams
+ self.text = self._bind_params_regex.sub(repl, text)
+
+ @classmethod
+ def _create_text(self, text, bind=None, bindparams=None,
+ typemap=None, autocommit=None):
+ """Construct a new :class:`.TextClause` clause, representing
+ a textual SQL string directly.
+
+ E.g.::
+
+ fom sqlalchemy import text
+
+ t = text("SELECT * FROM users")
+ result = connection.execute(t)
+
+ The advantages :func:`.text` provides over a plain string are
+ backend-neutral support for bind parameters, per-statement
+ execution options, as well as
+ bind parameter and result-column typing behavior, allowing
+ SQLAlchemy type constructs to play a role when executing
+ a statement that is specified literally. The construct can also
+ be provided with a ``.c`` collection of column elements, allowing
+ it to be embedded in other SQL expression constructs as a subquery.
+
+ Bind parameters are specified by name, using the format ``:name``.
+ E.g.::
+
+ t = text("SELECT * FROM users WHERE id=:user_id")
+ result = connection.execute(t, user_id=12)
+
+ For SQL statements where a colon is required verbatim, as within
+ an inline string, use a backslash to escape::
+
+ t = text("SELECT * FROM users WHERE name='\\:username'")
+
+ The :class:`.TextClause` construct includes methods which can
+ provide information about the bound parameters as well as the column
+ values which would be returned from the textual statement, assuming
+ it's an executable SELECT type of statement. The :meth:`.TextClause.bindparams`
+ method is used to provide bound parameter detail, and
+ :meth:`.TextClause.columns` method allows specification of
+ return columns including names and types::
+
+ t = text("SELECT * FROM users WHERE id=:user_id").\\
+ bindparams(user_id=7).\\
+ columns(id=Integer, name=String)
+
+ for id, name in connection.execute(t):
+ print(id, name)
+
+ The :func:`.text` construct is used internally in cases when
+ a literal string is specified for part of a larger query, such as
+ when a string is specified to the :meth:`.Select.where` method of
+ :class:`.Select`. In those cases, the same
+ bind parameter syntax is applied::
+
+ s = select([users.c.id, users.c.name]).where("id=:user_id")
+ result = connection.execute(s, user_id=12)
+
+ Using :func:`.text` explicitly usually implies the construction
+ of a full, standalone statement. As such, SQLAlchemy refers
+ to it as an :class:`.Executable` object, and it supports
+ the :meth:`Executable.execution_options` method. For example,
+ a :func:`.text` construct that should be subject to "autocommit"
+ can be set explicitly so using the ``autocommit`` option::
+
+ t = text("EXEC my_procedural_thing()").\\
+ execution_options(autocommit=True)
+
+ Note that SQLAlchemy's usual "autocommit" behavior applies to
+ :func:`.text` constructs implicitly - that is, statements which begin
+ with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
+ or a variety of other phrases specific to certain backends, will
+ be eligible for autocommit if no transaction is in progress.
+
+ :param text:
+ the text of the SQL statement to be created. use ``:<param>``
+ to specify bind parameters; they will be compiled to their
+ engine-specific format.
+
+ :param autocommit:
+ Deprecated. Use .execution_options(autocommit=<True|False>)
+ to set the autocommit option.
+
+ :param bind:
+ an optional connection or engine to be used for this text query.
+
+ :param bindparams:
+ Deprecated. A list of :func:`.bindparam` instances used to
+ provide information about parameters embedded in the statement.
+ This argument now invokes the :meth:`.TextClause.bindparams`
+ method on the construct before returning it. E.g.::
+
+ stmt = text("SELECT * FROM table WHERE id=:id",
+ bindparams=[bindparam('id', value=5, type_=Integer)])
+
+ Is equivalent to::
+
+ stmt = text("SELECT * FROM table WHERE id=:id").\\
+ bindparams(bindparam('id', value=5, type_=Integer))
+
+ .. deprecated:: 0.9.0 the :meth:`.TextClause.bindparams` method
+ supersedes the ``bindparams`` argument to :func:`.text`.
+
+ :param typemap:
+ Deprecated. A dictionary mapping the names of columns
+ represented in the columns clause of a ``SELECT`` statement
+ to type objects,
+ which will be used to perform post-processing on columns within
+ the result set. This parameter now invokes the :meth:`.TextClause.columns`
+ method, which returns a :class:`.TextAsFrom` construct that gains
+ a ``.c`` collection and can be embedded in other expressions. E.g.::
+
+ stmt = text("SELECT * FROM table",
+ typemap={'id': Integer, 'name': String},
+ )
+
+ Is equivalent to::
+
+ stmt = text("SELECT * FROM table").columns(id=Integer, name=String)
+
+ Or alternatively::
+
+ from sqlalchemy.sql import column
+ stmt = text("SELECT * FROM table").columns(
+ column('id', Integer),
+ column('name', String)
+ )
+
+ .. deprecated:: 0.9.0 the :meth:`.TextClause.columns` method
+ supersedes the ``typemap`` argument to :func:`.text`.
+
+ """
+ stmt = TextClause(text, bind=bind)
+ if bindparams:
+ stmt = stmt.bindparams(*bindparams)
+ if typemap:
+ stmt = stmt.columns(**typemap)
+ if autocommit is not None:
+ util.warn_deprecated('autocommit on text() is deprecated. '
+ 'Use .execution_options(autocommit=True)')
+ stmt = stmt.execution_options(autocommit=autocommit)
+
+ return stmt
+
+ @_generative
+ def bindparams(self, *binds, **names_to_values):
+ """Establish the values and/or types of bound parameters within
+ this :class:`.TextClause` construct.
+
+ Given a text construct such as::
+
+ from sqlalchemy import text
+ stmt = text("SELECT id, name FROM user WHERE name=:name "
+ "AND timestamp=:timestamp")
+
+ the :meth:`.TextClause.bindparams` method can be used to establish
+ the initial value of ``:name`` and ``:timestamp``,
+ using simple keyword arguments::
+
+ stmt = stmt.bindparams(name='jack',
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
+
+ Where above, new :class:`.BindParameter` objects
+ will be generated with the names ``name`` and ``timestamp``, and
+ values of ``jack`` and ``datetime.datetime(2012, 10, 8, 15, 12, 5)``,
+ respectively. The types will be
+ inferred from the values given, in this case :class:`.String` and
+ :class:`.DateTime`.
+
+ When specific typing behavior is needed, the positional ``*binds``
+ argument can be used in which to specify :func:`.bindparam` constructs
+ directly. These constructs must include at least the ``key`` argument,
+ then an optional value and type::
+
+ from sqlalchemy import bindparam
+ stmt = stmt.bindparams(
+ bindparam('name', value='jack', type_=String),
+ bindparam('timestamp', type_=DateTime)
+ )
+
+ Above, we specified the type of :class:`.DateTime` for the ``timestamp``
+ bind, and the type of :class:`.String` for the ``name`` bind. In
+ the case of ``name`` we also set the default value of ``"jack"``.
+
+ Additional bound parameters can be supplied at statement execution
+ time, e.g.::
+
+ result = connection.execute(stmt,
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
+
+ The :meth:`.TextClause.bindparams` method can be called repeatedly, where
+ it will re-use existing :class:`.BindParameter` objects to add new information.
+ For example, we can call :meth:`.TextClause.bindparams` first with
+ typing information, and a second time with value information, and it
+ will be combined::
+
+ stmt = text("SELECT id, name FROM user WHERE name=:name "
+ "AND timestamp=:timestamp")
+ stmt = stmt.bindparams(
+ bindparam('name', type_=String),
+ bindparam('timestamp', type_=DateTime)
+ )
+ stmt = stmt.bindparams(
+ name='jack',
+ timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
+ )
+
+
+ .. versionadded:: 0.9.0 The :meth:`.TextClause.bindparams` method supersedes
+ the argument ``bindparams`` passed to :func:`~.expression.text`.
+
+
+ """
+ self._bindparams = new_params = self._bindparams.copy()
+
+ for bind in binds:
+ try:
+ existing = new_params[bind.key]
+ except KeyError:
+ raise exc.ArgumentError(
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % bind.key)
+ else:
+ new_params[existing.key] = bind
+
+ for key, value in names_to_values.items():
+ try:
+ existing = new_params[key]
+ except KeyError:
+ raise exc.ArgumentError(
+ "This text() construct doesn't define a "
+ "bound parameter named %r" % key)
+ else:
+ new_params[key] = existing._with_value(value)
+
+
+
+ @util.dependencies('sqlalchemy.sql.selectable')
+ def columns(self, selectable, *cols, **types):
+ """Turn this :class:`.TextClause` object into a :class:`.TextAsFrom`
+ object that can be embedded into another statement.
+
+ This function essentially bridges the gap between an entirely
+ textual SELECT statement and the SQL expression language concept
+ of a "selectable"::
+
+ from sqlalchemy.sql import column, text
+
+ stmt = text("SELECT id, name FROM some_table")
+ stmt = stmt.columns(column('id'), column('name')).alias('st')
+
+ stmt = select([mytable]).\\
+ select_from(
+ mytable.join(stmt, mytable.c.name == stmt.c.name)
+ ).where(stmt.c.id > 5)
+
+ Above, we used untyped :func:`.column` elements. These can also have
+ types specified, which will impact how the column behaves in expressions
+ as well as determining result set behavior::
+
+ stmt = text("SELECT id, name, timestamp FROM some_table")
+ stmt = stmt.columns(
+ column('id', Integer),
+ column('name', Unicode),
+ column('timestamp', DateTime)
+ )
+
+ for id, name, timestamp in connection.execute(stmt):
+ print(id, name, timestamp)
+
+ Keyword arguments allow just the names and types of columns to be specified,
+ where the :func:`.column` elements will be generated automatically::
+
+ stmt = text("SELECT id, name, timestamp FROM some_table")
+ stmt = stmt.columns(
+ id=Integer,
+ name=Unicode,
+ timestamp=DateTime
+ )
+
+ for id, name, timestamp in connection.execute(stmt):
+ print(id, name, timestamp)
+
+ The :meth:`.TextClause.columns` method provides a direct
+ route to calling :meth:`.FromClause.alias` as well as :meth:`.SelectBase.cte`
+ against a textual SELECT statement::
+
+ stmt = stmt.columns(id=Integer, name=String).cte('st')
+
+ stmt = select([sometable]).where(sometable.c.id == stmt.c.id)
+
+ .. versionadded:: 0.9.0 :func:`.text` can now be converted into a fully
+ featured "selectable" construct using the :meth:`.TextClause.columns`
+ method. This method supersedes the ``typemap`` argument to
+ :func:`.text`.
+
+ """
+
+ col_by_name = dict(
+ (col.key, col) for col in cols
+ )
+ for key, type_ in types.items():
+ col_by_name[key] = ColumnClause(key, type_)
+
+ return selectable.TextAsFrom(self, list(col_by_name.values()))
+
+ @property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @property
+ def comparator(self):
+ return self.type.comparator_factory(self)
+
+ def self_group(self, against=None):
+ if against is operators.in_op:
+ return Grouping(self)
+ else:
+ return self
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._bindparams = dict((b.key, clone(b, **kw))
+ for b in self._bindparams.values())
+
+ def get_children(self, **kwargs):
+ return list(self._bindparams.values())
+
+
+class Null(ColumnElement):
+ """Represent the NULL keyword in a SQL statement.
+
+ :class:`.Null` is accessed as a constant via the
+ :func:`.null` function.
+
+ """
+
+ __visit_name__ = 'null'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.NULLTYPE
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.Null` construct."""
+
+ return NULL
+
+ def compare(self, other):
+ return isinstance(other, Null)
+
+
+class False_(ColumnElement):
+ """Represent the ``false`` keyword, or equivalent, in a SQL statement.
+
+ :class:`.False_` is accessed as a constant via the
+ :func:`.false` function.
+
+ """
+
+ __visit_name__ = 'false'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.BOOLEANTYPE
+
+ def _negate(self):
+ return TRUE
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.False_` construct.
+
+ E.g.::
+
+ >>> from sqlalchemy import false
+ >>> print select([t.c.x]).where(false())
+ SELECT x FROM t WHERE false
+
+ A backend which does not support true/false constants will render as
+ an expression against 1 or 0::
+
+ >>> print select([t.c.x]).where(false())
+ SELECT x FROM t WHERE 0 = 1
+
+ The :func:`.true` and :func:`.false` constants also feature
+ "short circuit" operation within an :func:`.and_` or :func:`.or_`
+ conjunction::
+
+ >>> print select([t.c.x]).where(or_(t.c.x > 5, true()))
+ SELECT x FROM t WHERE true
+
+ >>> print select([t.c.x]).where(and_(t.c.x > 5, false()))
+ SELECT x FROM t WHERE false
+
+ .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
+ better integrated behavior within conjunctions and on dialects
+ that don't support true/false constants.
+
+ .. seealso::
+
+ :func:`.true`
+
+ """
+
+ return FALSE
+
+ def compare(self, other):
+ return isinstance(other, False_)
+
+class True_(ColumnElement):
+ """Represent the ``true`` keyword, or equivalent, in a SQL statement.
+
+ :class:`.True_` is accessed as a constant via the
+ :func:`.true` function.
+
+ """
+
+ __visit_name__ = 'true'
+
+ @util.memoized_property
+ def type(self):
+ return type_api.BOOLEANTYPE
+
+ def _negate(self):
+ return FALSE
+
+ @classmethod
+ def _ifnone(cls, other):
+ if other is None:
+ return cls._singleton()
+ else:
+ return other
+
+ @classmethod
+ def _singleton(cls):
+ """Return a constant :class:`.True_` construct.
+
+ E.g.::
+
+ >>> from sqlalchemy import true
+ >>> print select([t.c.x]).where(true())
+ SELECT x FROM t WHERE true
+
+ A backend which does not support true/false constants will render as
+ an expression against 1 or 0::
+
+ >>> print select([t.c.x]).where(true())
+ SELECT x FROM t WHERE 1 = 1
+
+ The :func:`.true` and :func:`.false` constants also feature
+ "short circuit" operation within an :func:`.and_` or :func:`.or_`
+ conjunction::
+
+ >>> print select([t.c.x]).where(or_(t.c.x > 5, true()))
+ SELECT x FROM t WHERE true
+
+ >>> print select([t.c.x]).where(and_(t.c.x > 5, false()))
+ SELECT x FROM t WHERE false
+
+ .. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
+ better integrated behavior within conjunctions and on dialects
+ that don't support true/false constants.
+
+ .. seealso::
+
+ :func:`.false`
+
+ """
+
+ return TRUE
+
+ def compare(self, other):
+ return isinstance(other, True_)
+
+NULL = Null()
+FALSE = False_()
+TRUE = True_()
+
+class ClauseList(ClauseElement):
+ """Describe a list of clauses, separated by an operator.
+
+ By default, is comma-separated, such as a column listing.
+
+ """
+ __visit_name__ = 'clauselist'
+
+ def __init__(self, *clauses, **kwargs):
+ self.operator = kwargs.pop('operator', operators.comma_op)
+ self.group = kwargs.pop('group', True)
+ self.group_contents = kwargs.pop('group_contents', True)
+ if self.group_contents:
+ self.clauses = [
+ _literal_as_text(clause).self_group(against=self.operator)
+ for clause in clauses]
+ else:
+ self.clauses = [
+ _literal_as_text(clause)
+ for clause in clauses]
+
+ def __iter__(self):
+ return iter(self.clauses)
+
+ def __len__(self):
+ return len(self.clauses)
+
+ @property
+ def _select_iterable(self):
+ return iter(self)
+
+ def append(self, clause):
+ if self.group_contents:
+ self.clauses.append(_literal_as_text(clause).\
+ self_group(against=self.operator))
+ else:
+ self.clauses.append(_literal_as_text(clause))
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clauses = [clone(clause, **kw) for clause in self.clauses]
+
+ def get_children(self, **kwargs):
+ return self.clauses
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(*[c._from_objects for c in self.clauses]))
+
+ def self_group(self, against=None):
+ if self.group and operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+ def compare(self, other, **kw):
+ """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
+ including a comparison of all the clause items.
+
+ """
+ if not isinstance(other, ClauseList) and len(self.clauses) == 1:
+ return self.clauses[0].compare(other, **kw)
+ elif isinstance(other, ClauseList) and \
+ len(self.clauses) == len(other.clauses):
+ for i in range(0, len(self.clauses)):
+ if not self.clauses[i].compare(other.clauses[i], **kw):
+ return False
+ else:
+ return self.operator == other.operator
+ else:
+ return False
+
+
+
+class BooleanClauseList(ClauseList, ColumnElement):
+ __visit_name__ = 'clauselist'
+
+ def __init__(self, *arg, **kw):
+ raise NotImplementedError(
+ "BooleanClauseList has a private constructor")
+
+ @classmethod
+ def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
+ convert_clauses = []
+
+ clauses = util.coerce_generator_arg(clauses)
+ for clause in clauses:
+ clause = _literal_as_text(clause)
+
+ if isinstance(clause, continue_on):
+ continue
+ elif isinstance(clause, skip_on):
+ return clause.self_group(against=operators._asbool)
+
+ convert_clauses.append(clause)
+
+ if len(convert_clauses) == 1:
+ return convert_clauses[0].self_group(against=operators._asbool)
+ elif not convert_clauses and clauses:
+ return clauses[0].self_group(against=operators._asbool)
+
+ convert_clauses = [c.self_group(against=operator)
+ for c in convert_clauses]
+
+ self = cls.__new__(cls)
+ self.clauses = convert_clauses
+ self.group = True
+ self.operator = operator
+ self.group_contents = True
+ self.type = type_api.BOOLEANTYPE
+ return self
+
+ @classmethod
+ def and_(cls, *clauses):
+ """Join a list of clauses together using the ``AND`` operator.
+
+ The ``&`` operator is also overloaded on all :class:`.ColumnElement`
+ subclasses to produce the
+ same result.
+
+ """
+ return cls._construct(operators.and_, True_, False_, *clauses)
+
+ @classmethod
+ def or_(cls, *clauses):
+ """Join a list of clauses together using the ``OR`` operator.
+
+ The ``|`` operator is also overloaded on all
+ :class:`.ColumnElement` subclasses to produce the
+ same result.
+
+ """
+ return cls._construct(operators.or_, False_, True_, *clauses)
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ def self_group(self, against=None):
+ if not self.clauses:
+ return self
+ else:
+ return super(BooleanClauseList, self).self_group(against=against)
+
+ def _negate(self):
+ return ClauseList._negate(self)
+
+
+and_ = BooleanClauseList.and_
+or_ = BooleanClauseList.or_
+
+class Tuple(ClauseList, ColumnElement):
+ """Represent a SQL tuple."""
+
+ def __init__(self, *clauses, **kw):
+ """Return a :class:`.Tuple`.
+
+ Main usage is to produce a composite IN construct::
+
+ from sqlalchemy import tuple_
+
+ tuple_(table.c.col1, table.c.col2).in_(
+ [(1, 2), (5, 12), (10, 19)]
+ )
+
+ .. warning::
+
+ The composite IN construct is not supported by all backends,
+ and is currently known to work on Postgresql and MySQL,
+ but not SQLite. Unsupported backends will raise
+ a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such
+ an expression is invoked.
+
+ """
+
+ clauses = [_literal_as_binds(c) for c in clauses]
+ self.type = kw.pop('type_', None)
+ if self.type is None:
+ self.type = _type_from_args(clauses)
+ super(Tuple, self).__init__(*clauses, **kw)
+
+ @property
+ def _select_iterable(self):
+ return (self, )
+
+ def _bind_param(self, operator, obj):
+ return Tuple(*[
+ BindParameter(None, o, _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+ for o in obj
+ ]).self_group()
+
+
+class Case(ColumnElement):
+ """Represent a SQL ``CASE`` construct.
+
+
+ """
+ __visit_name__ = 'case'
+
+ def __init__(self, whens, value=None, else_=None):
+ """Produce a :class:`.Case` object.
+
+ :param whens: A sequence of pairs, or alternatively a dict,
+ to be translated into "WHEN / THEN" clauses.
+
+ :param value: Optional for simple case statements, produces
+ a column expression as in "CASE <expr> WHEN ..."
+
+ :param else\_: Optional as well, for case defaults produces
+ the "ELSE" portion of the "CASE" statement.
+
+ The expressions used for THEN and ELSE,
+ when specified as strings, will be interpreted
+ as bound values. To specify textual SQL expressions
+ for these, use the :func:`literal_column`
+ construct.
+
+ The expressions used for the WHEN criterion
+ may only be literal strings when "value" is
+ present, i.e. CASE table.somecol WHEN "x" THEN "y".
+ Otherwise, literal strings are not accepted
+ in this position, and either the text(<string>)
+ or literal(<string>) constructs must be used to
+ interpret raw string values.
+
+ Usage examples::
+
+ case([(orderline.c.qty > 100, item.c.specialprice),
+ (orderline.c.qty > 10, item.c.bulkprice)
+ ], else_=item.c.regularprice)
+
+ case(value=emp.c.type, whens={
+ 'engineer': emp.c.salary * 1.1,
+ 'manager': emp.c.salary * 3,
+ })
+
+ Using :func:`.literal_column()`, to allow for databases that
+ do not support bind parameters in the ``then`` clause. The type
+ can be specified which determines the type of the :func:`case()` construct
+ overall::
+
+ case([(orderline.c.qty > 100,
+ literal_column("'greaterthan100'", String)),
+ (orderline.c.qty > 10, literal_column("'greaterthan10'",
+ String))
+ ], else_=literal_column("'lethan10'", String))
+
+ """
+
+ try:
+ whens = util.dictlike_iteritems(whens)
+ except TypeError:
+ pass
+
+ if value is not None:
+ whenlist = [
+ (_literal_as_binds(c).self_group(),
+ _literal_as_binds(r)) for (c, r) in whens
+ ]
+ else:
+ whenlist = [
+ (_no_literals(c).self_group(),
+ _literal_as_binds(r)) for (c, r) in whens
+ ]
+
+ if whenlist:
+ type_ = list(whenlist[-1])[-1].type
+ else:
+ type_ = None
+
+ if value is None:
+ self.value = None
+ else:
+ self.value = _literal_as_binds(value)
+
+ self.type = type_
+ self.whens = whenlist
+ if else_ is not None:
+ self.else_ = _literal_as_binds(else_)
+ else:
+ self.else_ = None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ if self.value is not None:
+ self.value = clone(self.value, **kw)
+ self.whens = [(clone(x, **kw), clone(y, **kw))
+ for x, y in self.whens]
+ if self.else_ is not None:
+ self.else_ = clone(self.else_, **kw)
+
+ def get_children(self, **kwargs):
+ if self.value is not None:
+ yield self.value
+ for x, y in self.whens:
+ yield x
+ yield y
+ if self.else_ is not None:
+ yield self.else_
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(*[x._from_objects for x in
+ self.get_children()]))
+
+
+def literal_column(text, type_=None):
+ """Return a textual column expression, as would be in the columns
+ clause of a ``SELECT`` statement.
+
+ The object returned supports further expressions in the same way as any
+ other column object, including comparison, math and string operations.
+ The type\_ parameter is important to determine proper expression behavior
+ (such as, '+' means string concatenation or numerical addition based on
+ the type).
+
+ :param text: the text of the expression; can be any SQL expression.
+ Quoting rules will not be applied. To specify a column-name expression
+ which should be subject to quoting rules, use the :func:`column`
+ function.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
+ object which will
+ provide result-set translation and additional expression semantics for
+ this column. If left as None the type will be NullType.
+
+ """
+ return ColumnClause(text, type_=type_, is_literal=True)
+
+
+
+class Cast(ColumnElement):
+ """Represent the SQL ``CAST`` construct."""
+
+ __visit_name__ = 'cast'
+
+ def __init__(self, expression, type_):
+ """Return a :class:`.Cast` object.
+
+ Equivalent of SQL ``CAST(clause AS totype)``.
+
+ E.g.::
+
+ cast(table.c.unit_price * table.c.qty, Numeric(10,4))
+
+ or::
+
+ cast(table.c.timestamp, DATE)
+
+ :param expression: Column-oriented expression.
+ :param type_: A :class:`.TypeEngine` class or instance indicating
+ the type to which the CAST should apply.
+
+ .. seealso::
+
+ :func:`.type_coerce` - Python-side type coercion without emitting
+ CAST.
+
+ """
+ self.type = type_api.to_instance(type_)
+ self.clause = _literal_as_binds(expression, type_=self.type)
+ self.typeclause = TypeClause(self.type)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clause = clone(self.clause, **kw)
+ self.typeclause = clone(self.typeclause, **kw)
+
+ def get_children(self, **kwargs):
+ return self.clause, self.typeclause
+
+ @property
+ def _from_objects(self):
+ return self.clause._from_objects
+
+
+class Extract(ColumnElement):
+ """Represent a SQL EXTRACT clause, ``extract(field FROM expr)``."""
+
+ __visit_name__ = 'extract'
+
+ def __init__(self, field, expr, **kwargs):
+ """Return a :class:`.Extract` construct.
+
+ This is typically available as :func:`.extract`
+ as well as ``func.extract`` from the
+ :data:`.func` namespace.
+
+ """
+ self.type = type_api.INTEGERTYPE
+ self.field = field
+ self.expr = _literal_as_binds(expr, None)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.expr = clone(self.expr, **kw)
+
+ def get_children(self, **kwargs):
+ return self.expr,
+
+ @property
+ def _from_objects(self):
+ return self.expr._from_objects
+
+
+class UnaryExpression(ColumnElement):
+ """Define a 'unary' expression.
+
+ A unary expression has a single column expression
+ and an operator. The operator can be placed on the left
+ (where it is called the 'operator') or right (where it is called the
+ 'modifier') of the column expression.
+
+ """
+ __visit_name__ = 'unary'
+
+ def __init__(self, element, operator=None, modifier=None,
+ type_=None, negate=None):
+ self.operator = operator
+ self.modifier = modifier
+ self.element = element.self_group(against=self.operator or self.modifier)
+ self.type = type_api.to_instance(type_)
+ self.negate = negate
+
+ @classmethod
+ def _create_nullsfirst(cls, column):
+ """Return a NULLS FIRST ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol).nullsfirst())
+
+ produces::
+
+ ORDER BY mycol DESC NULLS FIRST
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.nullsfirst_op)
+
+
+ @classmethod
+ def _create_nullslast(cls, column):
+ """Return a NULLS LAST ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol).nullslast())
+
+ produces::
+
+ ORDER BY mycol DESC NULLS LAST
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.nullslast_op)
+
+
+ @classmethod
+ def _create_desc(cls, column):
+ """Return a descending ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(desc(table1.mycol))
+
+ produces::
+
+ ORDER BY mycol DESC
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.desc_op)
+
+ @classmethod
+ def _create_asc(cls, column):
+ """Return an ascending ``ORDER BY`` clause element.
+
+ e.g.::
+
+ someselect.order_by(asc(table1.mycol))
+
+ produces::
+
+ ORDER BY mycol ASC
+
+ """
+ return UnaryExpression(
+ _literal_as_text(column), modifier=operators.asc_op)
+
+ @classmethod
+ def _create_distinct(cls, expr):
+ """Return a ``DISTINCT`` clause.
+
+ e.g.::
+
+ distinct(a)
+
+ renders::
+
+ DISTINCT a
+
+ """
+ expr = _literal_as_binds(expr)
+ return UnaryExpression(expr,
+ operator=operators.distinct_op, type_=expr.type)
+
+ @util.memoized_property
+ def _order_by_label_element(self):
+ if self.modifier in (operators.desc_op, operators.asc_op):
+ return self.element._order_by_label_element
+ else:
+ return None
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def compare(self, other, **kw):
+ """Compare this :class:`UnaryExpression` against the given
+ :class:`.ClauseElement`."""
+
+ return (
+ isinstance(other, UnaryExpression) and
+ self.operator == other.operator and
+ self.modifier == other.modifier and
+ self.element.compare(other.element, **kw)
+ )
+
+ def _negate(self):
+ if self.negate is not None:
+ return UnaryExpression(
+ self.element,
+ operator=self.negate,
+ negate=self.operator,
+ modifier=self.modifier,
+ type_=self.type)
+ else:
+ return ClauseElement._negate(self)
+
+ def self_group(self, against=None):
+ if self.operator and operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+
+class AsBoolean(UnaryExpression):
+
+ def __init__(self, element, operator, negate):
+ self.element = element
+ self.type = type_api.BOOLEANTYPE
+ self.operator = operator
+ self.negate = negate
+ self.modifier = None
+
+ def self_group(self, against=None):
+ return self
+
+ def _negate(self):
+ return self.element._negate()
+
+
+class BinaryExpression(ColumnElement):
+ """Represent an expression that is ``LEFT <operator> RIGHT``.
+
+ A :class:`.BinaryExpression` is generated automatically
+ whenever two column expressions are used in a Python binary expresion::
+
+ >>> from sqlalchemy.sql import column
+ >>> column('a') + column('b')
+ <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
+ >>> print column('a') + column('b')
+ a + b
+
+ """
+
+ __visit_name__ = 'binary'
+
+ def __init__(self, left, right, operator, type_=None,
+ negate=None, modifiers=None):
+ # allow compatibility with libraries that
+ # refer to BinaryExpression directly and pass strings
+ if isinstance(operator, util.string_types):
+ operator = operators.custom_op(operator)
+ self._orig = (left, right)
+ self.left = left.self_group(against=operator)
+ self.right = right.self_group(against=operator)
+ self.operator = operator
+ self.type = type_api.to_instance(type_)
+ self.negate = negate
+
+ if modifiers is None:
+ self.modifiers = {}
+ else:
+ self.modifiers = modifiers
+
+ def __bool__(self):
+ if self.operator in (operator.eq, operator.ne):
+ return self.operator(hash(self._orig[0]), hash(self._orig[1]))
+ else:
+ raise TypeError("Boolean value of this clause is not defined")
+
+ __nonzero__ = __bool__
+
+ @property
+ def is_comparison(self):
+ return operators.is_comparison(self.operator)
+
+ @property
+ def _from_objects(self):
+ return self.left._from_objects + self.right._from_objects
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.left = clone(self.left, **kw)
+ self.right = clone(self.right, **kw)
+
+ def get_children(self, **kwargs):
+ return self.left, self.right
+
+ def compare(self, other, **kw):
+ """Compare this :class:`BinaryExpression` against the
+ given :class:`BinaryExpression`."""
+
+ return (
+ isinstance(other, BinaryExpression) and
+ self.operator == other.operator and
+ (
+ self.left.compare(other.left, **kw) and
+ self.right.compare(other.right, **kw) or
+ (
+ operators.is_commutative(self.operator) and
+ self.left.compare(other.right, **kw) and
+ self.right.compare(other.left, **kw)
+ )
+ )
+ )
+
+ def self_group(self, against=None):
+ if operators.is_precedent(self.operator, against):
+ return Grouping(self)
+ else:
+ return self
+
+ def _negate(self):
+ if self.negate is not None:
+ return BinaryExpression(
+ self.left,
+ self.right,
+ self.negate,
+ negate=self.operator,
+ type_=type_api.BOOLEANTYPE,
+ modifiers=self.modifiers)
+ else:
+ return super(BinaryExpression, self)._negate()
+
+
+
+
+class Grouping(ColumnElement):
+ """Represent a grouping within a column expression"""
+
+ __visit_name__ = 'grouping'
+
+ def __init__(self, element):
+ self.element = element
+ self.type = getattr(element, 'type', type_api.NULLTYPE)
+
+ def self_group(self, against=None):
+ return self
+
+ @property
+ def _label(self):
+ return getattr(self.element, '_label', None) or self.anon_label
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def __getattr__(self, attr):
+ return getattr(self.element, attr)
+
+ def __getstate__(self):
+ return {'element': self.element, 'type': self.type}
+
+ def __setstate__(self, state):
+ self.element = state['element']
+ self.type = state['type']
+
+ def compare(self, other, **kw):
+ return isinstance(other, Grouping) and \
+ self.element.compare(other.element)
+
+
+class Over(ColumnElement):
+ """Represent an OVER clause.
+
+ This is a special operator against a so-called
+ "window" function, as well as any aggregate function,
+ which produces results relative to the result set
+ itself. It's supported only by certain database
+ backends.
+
+ """
+ __visit_name__ = 'over'
+
+ order_by = None
+ partition_by = None
+
+ def __init__(self, func, partition_by=None, order_by=None):
+ """Produce an :class:`.Over` object against a function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ E.g.::
+
+ from sqlalchemy import over
+ over(func.row_number(), order_by='x')
+
+ Would produce "ROW_NUMBER() OVER(ORDER BY x)".
+
+ :param func: a :class:`.FunctionElement` construct, typically
+ generated by :data:`~.expression.func`.
+ :param partition_by: a column element or string, or a list
+ of such, that will be used as the PARTITION BY clause
+ of the OVER construct.
+ :param order_by: a column element or string, or a list
+ of such, that will be used as the ORDER BY clause
+ of the OVER construct.
+
+ This function is also available from the :data:`~.expression.func`
+ construct itself via the :meth:`.FunctionElement.over` method.
+
+ .. versionadded:: 0.7
+
+ """
+ self.func = func
+ if order_by is not None:
+ self.order_by = ClauseList(*util.to_list(order_by))
+ if partition_by is not None:
+ self.partition_by = ClauseList(*util.to_list(partition_by))
+
+ @util.memoized_property
+ def type(self):
+ return self.func.type
+
+ def get_children(self, **kwargs):
+ return [c for c in
+ (self.func, self.partition_by, self.order_by)
+ if c is not None]
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.func = clone(self.func, **kw)
+ if self.partition_by is not None:
+ self.partition_by = clone(self.partition_by, **kw)
+ if self.order_by is not None:
+ self.order_by = clone(self.order_by, **kw)
+
+ @property
+ def _from_objects(self):
+ return list(itertools.chain(
+ *[c._from_objects for c in
+ (self.func, self.partition_by, self.order_by)
+ if c is not None]
+ ))
+
+
+class Label(ColumnElement):
+ """Represents a column label (AS).
+
+ Represent a label, as typically applied to any column-level
+ element using the ``AS`` sql keyword.
+
+ """
+
+ __visit_name__ = 'label'
+
+ def __init__(self, name, element, type_=None):
+ """Return a :class:`Label` object for the
+ given :class:`.ColumnElement`.
+
+ A label changes the name of an element in the columns clause of a
+ ``SELECT`` statement, typically via the ``AS`` SQL keyword.
+
+ This functionality is more conveniently available via the
+ :meth:`.ColumnElement.label` method on :class:`.ColumnElement`.
+
+ :param name: label name
+
+ :param obj: a :class:`.ColumnElement`.
+
+ """
+ while isinstance(element, Label):
+ element = element.element
+ if name:
+ self.name = name
+ else:
+ self.name = _anonymous_label('%%(%d %s)s' % (id(self),
+ getattr(element, 'name', 'anon')))
+ self.key = self._label = self._key_label = self.name
+ self._element = element
+ self._type = type_
+ self._proxies = [element]
+
+ def __reduce__(self):
+ return self.__class__, (self.name, self._element, self._type)
+
+ @util.memoized_property
+ def _order_by_label_element(self):
+ return self
+
+ @util.memoized_property
+ def type(self):
+ return type_api.to_instance(
+ self._type or getattr(self._element, 'type', None)
+ )
+
+ @util.memoized_property
+ def element(self):
+ return self._element.self_group(against=operators.as_)
+
+ def self_group(self, against=None):
+ sub_element = self._element.self_group(against=against)
+ if sub_element is not self._element:
+ return Label(self.name,
+ sub_element,
+ type_=self._type)
+ else:
+ return self
+
+ @property
+ def primary_key(self):
+ return self.element.primary_key
+
+ @property
+ def foreign_keys(self):
+ return self.element.foreign_keys
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def _make_proxy(self, selectable, name=None, **kw):
+ e = self.element._make_proxy(selectable,
+ name=name if name else self.name)
+ e._proxies.append(self)
+ if self._type is not None:
+ e.type = self._type
+ return e
+
+
+class ColumnClause(Immutable, ColumnElement):
+ """Represents a generic column expression from any textual string.
+
+ This includes columns associated with tables, aliases and select
+ statements, but also any arbitrary text. May or may not be bound
+ to an underlying :class:`.Selectable`.
+
+ :class:`.ColumnClause` is constructed by itself typically via
+ the :func:`~.expression.column` function. It may be placed directly
+ into constructs such as :func:`.select` constructs::
+
+ from sqlalchemy.sql import column, select
+
+ c1, c2 = column("c1"), column("c2")
+ s = select([c1, c2]).where(c1==5)
+
+ There is also a variant on :func:`~.expression.column` known
+ as :func:`~.expression.literal_column` - the difference is that
+ in the latter case, the string value is assumed to be an exact
+ expression, rather than a column name, so that no quoting rules
+ or similar are applied::
+
+ from sqlalchemy.sql import literal_column, select
+
+ s = select([literal_column("5 + 7")])
+
+ :class:`.ColumnClause` can also be used in a table-like
+ fashion by combining the :func:`~.expression.column` function
+ with the :func:`~.expression.table` function, to produce
+ a "lightweight" form of table metadata::
+
+ from sqlalchemy.sql import table, column
+
+ user = table("user",
+ column("id"),
+ column("name"),
+ column("description"),
+ )
+
+ The above construct can be created in an ad-hoc fashion and is
+ not associated with any :class:`.schema.MetaData`, unlike it's
+ more full fledged :class:`.schema.Table` counterpart.
+
+ """
+ __visit_name__ = 'column'
+
+ onupdate = default = server_default = server_onupdate = None
+
+ _memoized_property = util.group_expirable_memoized_property()
+
+ def __init__(self, text, type_=None, is_literal=False, _selectable=None):
+ """Construct a :class:`.ColumnClause` object.
+
+ :param text: the text of the element.
+
+ :param type: :class:`.types.TypeEngine` object which can associate
+ this :class:`.ColumnClause` with a type.
+
+ :param is_literal: if True, the :class:`.ColumnClause` is assumed to
+ be an exact expression that will be delivered to the output with no
+ quoting rules applied regardless of case sensitive settings. the
+ :func:`literal_column()` function is usually used to create such a
+ :class:`.ColumnClause`.
+
+ :param text: the name of the column. Quoting rules will be applied
+ to the clause like any other column name. For textual column constructs
+ that are not to be quoted, use the :func:`literal_column` function.
+
+ :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
+ which will provide result-set translation for this column.
+
+
+ """
+
+ self.key = self.name = text
+ self.table = _selectable
+ self.type = type_api.to_instance(type_)
+ self.is_literal = is_literal
+
+ def _compare_name_for_result(self, other):
+ if self.is_literal or \
+ self.table is None or \
+ not hasattr(other, 'proxy_set') or (
+ isinstance(other, ColumnClause) and other.is_literal
+ ):
+ return super(ColumnClause, self).\
+ _compare_name_for_result(other)
+ else:
+ return other.proxy_set.intersection(self.proxy_set)
+
+ def _get_table(self):
+ return self.__dict__['table']
+
+ def _set_table(self, table):
+ self._memoized_property.expire_instance(self)
+ self.__dict__['table'] = table
+ table = property(_get_table, _set_table)
+
+ @_memoized_property
+ def _from_objects(self):
+ t = self.table
+ if t is not None:
+ return [t]
+ else:
+ return []
+
+ @util.memoized_property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ @_memoized_property
+ def _key_label(self):
+ if self.key != self.name:
+ return self._gen_label(self.key)
+ else:
+ return self._label
+
+ @_memoized_property
+ def _label(self):
+ return self._gen_label(self.name)
+
+ def _gen_label(self, name):
+ t = self.table
+
+ if self.is_literal:
+ return None
+
+ elif t is not None and t.named_with_column:
+ if getattr(t, 'schema', None):
+ label = t.schema.replace('.', '_') + "_" + \
+ t.name + "_" + name
+ else:
+ label = t.name + "_" + name
+
+ # propagate name quoting rules for labels.
+ if getattr(name, "quote", None) is not None:
+ if isinstance(label, quoted_name):
+ label.quote = name.quote
+ else:
+ label = quoted_name(label, name.quote)
+ elif getattr(t.name, "quote", None) is not None:
+ # can't get this situation to occur, so let's
+ # assert false on it for now
+ assert not isinstance(label, quoted_name)
+ label = quoted_name(label, t.name.quote)
+
+ # ensure the label name doesn't conflict with that
+ # of an existing column
+ if label in t.c:
+ _label = label
+ counter = 1
+ while _label in t.c:
+ _label = label + "_" + str(counter)
+ counter += 1
+ label = _label
+
+ return _as_truncated(label)
+
+ else:
+ return name
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(self.name, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
+
+ def _make_proxy(self, selectable, name=None, attach=True,
+ name_is_truncatable=False, **kw):
+ # propagate the "is_literal" flag only if we are keeping our name,
+ # otherwise its considered to be a label
+ is_literal = self.is_literal and (name is None or name == self.name)
+ c = self._constructor(
+ _as_truncated(name or self.name) if \
+ name_is_truncatable else \
+ (name or self.name),
+ type_=self.type,
+ _selectable=selectable,
+ is_literal=is_literal
+ )
+ if name is None:
+ c.key = self.key
+ c._proxies = [self]
+ if selectable._is_clone_of is not None:
+ c._is_clone_of = \
+ selectable._is_clone_of.columns.get(c.key)
+
+ if attach:
+ selectable._columns[c.key] = c
+ return c
+
+
+class _IdentifiedClause(Executable, ClauseElement):
+
+ __visit_name__ = 'identified'
+ _execution_options = \
+ Executable._execution_options.union({'autocommit': False})
+
+ def __init__(self, ident):
+ self.ident = ident
+
+
+class SavepointClause(_IdentifiedClause):
+ __visit_name__ = 'savepoint'
+
+
+class RollbackToSavepointClause(_IdentifiedClause):
+ __visit_name__ = 'rollback_to_savepoint'
+
+
+class ReleaseSavepointClause(_IdentifiedClause):
+ __visit_name__ = 'release_savepoint'
+
+
+class quoted_name(util.text_type):
+ """Represent a SQL identifier combined with quoting preferences.
+
+ :class:`.quoted_name` is a Python unicode/str subclass which
+ represents a particular identifier name along with a
+ ``quote`` flag. This ``quote`` flag, when set to
+ ``True`` or ``False``, overrides automatic quoting behavior
+ for this identifier in order to either unconditionally quote
+ or to not quote the name. If left at its default of ``None``,
+ quoting behavior is applied to the identifier on a per-backend basis
+ based on an examination of the token itself.
+
+ A :class:`.quoted_name` object with ``quote=True`` is also
+ prevented from being modified in the case of a so-called
+ "name normalize" option. Certain database backends, such as
+ Oracle, Firebird, and DB2 "normalize" case-insensitive names
+ as uppercase. The SQLAlchemy dialects for these backends
+ convert from SQLAlchemy's lower-case-means-insensitive convention
+ to the upper-case-means-insensitive conventions of those backends.
+ The ``quote=True`` flag here will prevent this conversion from occurring
+ to support an identifier that's quoted as all lower case against
+ such a backend.
+
+ The :class:`.quoted_name` object is normally created automatically
+ when specifying the name for key schema constructs such as :class:`.Table`,
+ :class:`.Column`, and others. The class can also be passed explicitly
+ as the name to any function that receives a name which can be quoted.
+ Such as to use the :meth:`.Engine.has_table` method with an unconditionally
+ quoted name::
+
+ from sqlaclchemy import create_engine
+ from sqlalchemy.sql.elements import quoted_name
+
+ engine = create_engine("oracle+cx_oracle://some_dsn")
+ engine.has_table(quoted_name("some_table", True))
+
+ The above logic will run the "has table" logic against the Oracle backend,
+ passing the name exactly as ``"some_table"`` without converting to
+ upper case.
+
+ .. versionadded:: 0.9.0
+
+ """
+
+ def __new__(cls, value, quote):
+ if value is None:
+ return None
+ # experimental - don't bother with quoted_name
+ # if quote flag is None. doesn't seem to make any dent
+ # in performance however
+ # elif not sprcls and quote is None:
+ # return value
+ elif isinstance(value, cls) and (
+ quote is None or value.quote == quote
+ ):
+ return value
+ self = super(quoted_name, cls).__new__(cls, value)
+ self.quote = quote
+ return self
+
+ def __reduce__(self):
+ return quoted_name, (util.text_type(self), self.quote)
+
+ @util.memoized_instancemethod
+ def lower(self):
+ if self.quote:
+ return self
+ else:
+ return util.text_type(self).lower()
+
+ @util.memoized_instancemethod
+ def upper(self):
+ if self.quote:
+ return self
+ else:
+ return util.text_type(self).upper()
+
+ def __repr__(self):
+ backslashed = self.encode('ascii', 'backslashreplace')
+ if not util.py2k:
+ backslashed = backslashed.decode('ascii')
+ return "'%s'" % backslashed
+
+class _truncated_label(quoted_name):
+ """A unicode subclass used to identify symbolic "
+ "names that may require truncation."""
+
+ def __new__(cls, value, quote=None):
+ quote = getattr(value, "quote", quote)
+ #return super(_truncated_label, cls).__new__(cls, value, quote, True)
+ return super(_truncated_label, cls).__new__(cls, value, quote)
+
+ def __reduce__(self):
+ return self.__class__, (util.text_type(self), self.quote)
+
+ def apply_map(self, map_):
+ return self
+
+# for backwards compatibility in case
+# someone is re-implementing the
+# _truncated_identifier() sequence in a custom
+# compiler
+_generated_label = _truncated_label
+
+
+class _anonymous_label(_truncated_label):
+ """A unicode subclass used to identify anonymously
+ generated names."""
+
+ def __add__(self, other):
+ return _anonymous_label(
+ quoted_name(
+ util.text_type.__add__(self, util.text_type(other)),
+ self.quote)
+ )
+
+ def __radd__(self, other):
+ return _anonymous_label(
+ quoted_name(
+ util.text_type.__add__(util.text_type(other), self),
+ self.quote)
+ )
+
+ def apply_map(self, map_):
+ if self.quote is not None:
+ # preserve quoting only if necessary
+ return quoted_name(self % map_, self.quote)
+ else:
+ # else skip the constructor call
+ return self % map_
+
+
+def _as_truncated(value):
+ """coerce the given value to :class:`._truncated_label`.
+
+ Existing :class:`._truncated_label` and
+ :class:`._anonymous_label` objects are passed
+ unchanged.
+ """
+
+ if isinstance(value, _truncated_label):
+ return value
+ else:
+ return _truncated_label(value)
+
+
+def _string_or_unprintable(element):
+ if isinstance(element, util.string_types):
+ return element
+ else:
+ try:
+ return str(element)
+ except:
+ return "unprintable element %r" % element
+
+
+def _expand_cloned(elements):
+ """expand the given set of ClauseElements to be the set of all 'cloned'
+ predecessors.
+
+ """
+ return itertools.chain(*[x._cloned_set for x in elements])
+
+
+def _select_iterables(elements):
+ """expand tables into individual columns in the
+ given list of column expressions.
+
+ """
+ return itertools.chain(*[c._select_iterable for c in elements])
+
+
+def _cloned_intersection(a, b):
+ """return the intersection of sets a and b, counting
+ any overlap between 'cloned' predecessors.
+
+ The returned set is in terms of the entities present within 'a'.
+
+ """
+ all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+ return set(elem for elem in a
+ if all_overlap.intersection(elem._cloned_set))
+
+def _cloned_difference(a, b):
+ all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
+ return set(elem for elem in a
+ if not all_overlap.intersection(elem._cloned_set))
+
+
+def _labeled(element):
+ if not hasattr(element, 'name'):
+ return element.label(None)
+ else:
+ return element
+
+
+def _is_column(col):
+ """True if ``col`` is an instance of :class:`.ColumnElement`."""
+
+ return isinstance(col, ColumnElement)
+
+
+def _find_columns(clause):
+ """locate Column objects within the given expression."""
+
+ cols = util.column_set()
+ traverse(clause, {}, {'column': cols.add})
+ return cols
+
+
+# there is some inconsistency here between the usage of
+# inspect() vs. checking for Visitable and __clause_element__.
+# Ideally all functions here would derive from inspect(),
+# however the inspect() versions add significant callcount
+# overhead for critical functions like _interpret_as_column_or_from().
+# Generally, the column-based functions are more performance critical
+# and are fine just checking for __clause_element__(). it's only
+# _interpret_as_from() where we'd like to be able to receive ORM entities
+# that have no defined namespace, hence inspect() is needed there.
+
+
+def _column_as_key(element):
+ if isinstance(element, util.string_types):
+ return element
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ try:
+ return element.key
+ except AttributeError:
+ return None
+
+
+def _clause_element_as_expr(element):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ else:
+ return element
+
+
+def _literal_as_text(element):
+ if isinstance(element, Visitable):
+ return element
+ elif hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
+ elif isinstance(element, (util.NoneType, bool)):
+ return _const_expr(element)
+ else:
+ raise exc.ArgumentError(
+ "SQL expression object or string expected."
+ )
+
+
+def _no_literals(element):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif not isinstance(element, Visitable):
+ raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
+ "function to indicate a SQL expression "
+ "literal, or 'literal()' to indicate a "
+ "bound value." % element)
+ else:
+ return element
+
+
+def _is_literal(element):
+ return not isinstance(element, Visitable) and \
+ not hasattr(element, '__clause_element__')
+
+
+def _only_column_elements_or_none(element, name):
+ if element is None:
+ return None
+ else:
+ return _only_column_elements(element, name)
+
+
+def _only_column_elements(element, name):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, ColumnElement):
+ raise exc.ArgumentError(
+ "Column-based expression object expected for argument "
+ "'%s'; got: '%s', type %s" % (name, element, type(element)))
+ return element
+
+def _literal_as_binds(element, name=None, type_=None):
+ if hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+ elif not isinstance(element, Visitable):
+ if element is None:
+ return Null()
+ else:
+ return BindParameter(name, element, type_=type_, unique=True)
+ else:
+ return element
+
+
+def _interpret_as_column_or_from(element):
+ if isinstance(element, Visitable):
+ return element
+ elif hasattr(element, '__clause_element__'):
+ return element.__clause_element__()
+
+ insp = inspection.inspect(element, raiseerr=False)
+ if insp is None:
+ if isinstance(element, (util.NoneType, bool)):
+ return _const_expr(element)
+ elif hasattr(insp, "selectable"):
+ return insp.selectable
+
+ return ColumnClause(str(element), is_literal=True)
+
+
+def _const_expr(element):
+ if isinstance(element, (Null, False_, True_)):
+ return element
+ elif element is None:
+ return Null()
+ elif element is False:
+ return False_()
+ elif element is True:
+ return True_()
+ else:
+ raise exc.ArgumentError(
+ "Expected None, False, or True"
+ )
+
+
+def _type_from_args(args):
+ for a in args:
+ if not a.type._isnull:
+ return a.type
+ else:
+ return type_api.NULLTYPE
+
+
+def _corresponding_column_or_error(fromclause, column,
+ require_embedded=False):
+ c = fromclause.corresponding_column(column,
+ require_embedded=require_embedded)
+ if c is None:
+ raise exc.InvalidRequestError(
+ "Given column '%s', attached to table '%s', "
+ "failed to locate a corresponding column from table '%s'"
+ %
+ (column,
+ getattr(column, 'table', None),
+ fromclause.description)
+ )
+ return c
+
+
+class AnnotatedColumnElement(Annotated):
+ def __init__(self, element, values):
+ Annotated.__init__(self, element, values)
+ ColumnElement.comparator._reset(self)
+ for attr in ('name', 'key', 'table'):
+ if self.__dict__.get(attr, False) is None:
+ self.__dict__.pop(attr)
+
+ def _with_annotations(self, values):
+ clone = super(AnnotatedColumnElement, self)._with_annotations(values)
+ ColumnElement.comparator._reset(clone)
+ return clone
+
+ @util.memoized_property
+ def name(self):
+ """pull 'name' from parent, if not present"""
+ return self._Annotated__element.name
+
+ @util.memoized_property
+ def table(self):
+ """pull 'table' from parent, if not present"""
+ return self._Annotated__element.table
+
+ @util.memoized_property
+ def key(self):
+ """pull 'key' from parent, if not present"""
+ return self._Annotated__element.key
+
+ @util.memoized_property
+ def info(self):
+ return self._Annotated__element.info
+
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 08ef20a89..c99665b42 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1,47 +1,18 @@
# sql/expression.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""Defines the base components of SQL expression trees.
+"""Defines the public namespace for SQL expression constructs.
-All components are derived from a common base class
-:class:`.ClauseElement`. Common behaviors are organized
-based on class hierarchies, in some cases via mixins.
-
-All object construction from this package occurs via functions which
-in some cases will construct composite :class:`.ClauseElement` structures
-together, and in other cases simply return a single :class:`.ClauseElement`
-constructed directly. The function interface affords a more "DSL-ish"
-feel to constructing SQL expressions and also allows future class
-reorganizations.
-
-Even though classes are not constructed directly from the outside,
-most classes which have additional public methods are considered to be
-public (i.e. have no leading underscore). Other classes which are
-"semi-public" are marked with a single leading underscore; these
-classes usually have few or no public methods and are less guaranteed
-to stay the same in future releases.
+Prior to version 0.9, this module contained all of "elements", "dml",
+"default_comparator" and "selectable". The module was broken up
+and most "factory" functions were moved to be grouped with their associated
+class.
"""
-from __future__ import unicode_literals
-import itertools
-import re
-from operator import attrgetter
-
-from .. import util, exc, inspection
-from . import operators
-from .operators import ColumnOperators
-from .visitors import Visitable, cloned_traverse
-import operator
-
-functions = util.importlater("sqlalchemy.sql", "functions")
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-sqltypes = util.importlater("sqlalchemy", "types")
-default = util.importlater("sqlalchemy.engine", "default")
-
__all__ = [
'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
@@ -52,6579 +23,88 @@ __all__ = [
'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery',
'table', 'text',
- 'tuple_', 'type_coerce', 'union', 'union_all', 'update', ]
-
-PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
-NO_ARG = util.symbol('NO_ARG')
-
-
-def nullsfirst(column):
- """Return a NULLS FIRST ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol).nullsfirst())
-
- produces::
-
- ORDER BY mycol DESC NULLS FIRST
-
- """
- return UnaryExpression(column, modifier=operators.nullsfirst_op)
-
-
-def nullslast(column):
- """Return a NULLS LAST ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol).nullslast())
-
- produces::
-
- ORDER BY mycol DESC NULLS LAST
-
- """
- return UnaryExpression(column, modifier=operators.nullslast_op)
-
-
-def desc(column):
- """Return a descending ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(desc(table1.mycol))
-
- produces::
-
- ORDER BY mycol DESC
-
- """
- return UnaryExpression(column, modifier=operators.desc_op)
-
-
-def asc(column):
- """Return an ascending ``ORDER BY`` clause element.
-
- e.g.::
-
- someselect.order_by(asc(table1.mycol))
-
- produces::
-
- ORDER BY mycol ASC
-
- """
- return UnaryExpression(column, modifier=operators.asc_op)
-
-
-def outerjoin(left, right, onclause=None):
- """Return an ``OUTER JOIN`` clause element.
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the
- :meth:`~.FromClause.outerjoin()` method on any
- :class:`.FromClause`.
-
- :param left: The left side of the join.
-
- :param right: The right side of the join.
-
- :param onclause: Optional criterion for the ``ON`` clause, is
- derived from foreign key relationships established between
- left and right otherwise.
-
- To chain joins together, use the :meth:`.FromClause.join` or
- :meth:`.FromClause.outerjoin` methods on the resulting
- :class:`.Join` object.
-
- """
- return Join(left, right, onclause, isouter=True)
-
-
-def join(left, right, onclause=None, isouter=False):
- """Return a ``JOIN`` clause element (regular inner join).
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the
- :meth:`~.FromClause.join()` method on any
- :class:`.FromClause`.
-
- :param left: The left side of the join.
-
- :param right: The right side of the join.
-
- :param onclause: Optional criterion for the ``ON`` clause, is
- derived from foreign key relationships established between
- left and right otherwise.
-
- To chain joins together, use the :meth:`.FromClause.join` or
- :meth:`.FromClause.outerjoin` methods on the resulting
- :class:`.Join` object.
-
-
- """
- return Join(left, right, onclause, isouter)
-
-
-def select(columns=None, whereclause=None, from_obj=[], **kwargs):
- """Returns a ``SELECT`` clause element.
-
- Similar functionality is also available via the :func:`select()`
- method on any :class:`.FromClause`.
-
- The returned object is an instance of :class:`.Select`.
-
- All arguments which accept :class:`.ClauseElement` arguments also accept
- string arguments, which will be converted as appropriate into
- either :func:`text()` or :func:`literal_column()` constructs.
-
- .. seealso::
-
- :ref:`coretutorial_selecting` - Core Tutorial description of
- :func:`.select`.
-
- :param columns:
- A list of :class:`.ClauseElement` objects, typically
- :class:`.ColumnElement` objects or subclasses, which will form the
- columns clause of the resulting statement. For all members which are
- instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
- members of the :class:`.Selectable` will be added individually to the
- columns clause. For example, specifying a
- :class:`~sqlalchemy.schema.Table` instance will result in all the
- contained :class:`~sqlalchemy.schema.Column` objects within to be added
- to the columns clause.
-
- This argument is not present on the form of :func:`select()`
- available on :class:`~sqlalchemy.schema.Table`.
-
- :param whereclause:
- A :class:`.ClauseElement` expression which will be used to form the
- ``WHERE`` clause.
-
- :param from_obj:
- A list of :class:`.ClauseElement` objects which will be added to the
- ``FROM`` clause of the resulting statement. Note that "from" objects are
- automatically located within the columns and whereclause ClauseElements.
- Use this parameter to explicitly specify "from" objects which are not
- automatically locatable. This could include
- :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
- or :class:`.Join` objects whose presence will supercede that of the
- :class:`~sqlalchemy.schema.Table` objects already located in the other
- clauses.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind=None:
- an :class:`~.base.Engine` or :class:`~.base.Connection` instance
- to which the
- resulting :class:`.Select` object will be bound. The :class:`.Select`
- object will otherwise automatically bind to whatever
- :class:`~.base.Connectable` instances can be located within its contained
- :class:`.ClauseElement` members.
-
- :param correlate=True:
- indicates that this :class:`.Select` object should have its
- contained :class:`.FromClause` elements "correlated" to an enclosing
- :class:`.Select` object. This means that any :class:`.ClauseElement`
- instance within the "froms" collection of this :class:`.Select`
- which is also present in the "froms" collection of an
- enclosing select will not be rendered in the ``FROM`` clause
- of this select statement.
-
- :param distinct=False:
- when ``True``, applies a ``DISTINCT`` qualifier to the columns
- clause of the resulting statement.
-
- The boolean argument may also be a column expression or list
- of column expressions - this is a special calling form which
- is understood by the Postgresql dialect to render the
- ``DISTINCT ON (<columns>)`` syntax.
-
- ``distinct`` is also available via the :meth:`~.Select.distinct`
- generative method.
-
- :param for_update=False:
- when ``True``, applies ``FOR UPDATE`` to the end of the
- resulting statement.
-
- Certain database dialects also support
- alternate values for this parameter:
-
- * With the MySQL dialect, the value ``"read"`` translates to
- ``LOCK IN SHARE MODE``.
- * With the Oracle and Postgresql dialects, the value ``"nowait"``
- translates to ``FOR UPDATE NOWAIT``.
- * With the Postgresql dialect, the values "read" and ``"read_nowait"``
- translate to ``FOR SHARE`` and ``FOR SHARE NOWAIT``, respectively.
-
- .. versionadded:: 0.7.7
-
- :param group_by:
- a list of :class:`.ClauseElement` objects which will comprise the
- ``GROUP BY`` clause of the resulting select.
-
- :param having:
- a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
- of the resulting select when ``GROUP BY`` is used.
-
- :param limit=None:
- a numerical value which usually compiles to a ``LIMIT``
- expression in the resulting select. Databases that don't
- support ``LIMIT`` will attempt to provide similar
- functionality.
-
- :param offset=None:
- a numeric value which usually compiles to an ``OFFSET``
- expression in the resulting select. Databases that don't
- support ``OFFSET`` will attempt to provide similar
- functionality.
-
- :param order_by:
- a scalar or list of :class:`.ClauseElement` objects which will
- comprise the ``ORDER BY`` clause of the resulting select.
-
- :param use_labels=False:
- when ``True``, the statement will be generated using labels
- for each column in the columns clause, which qualify each
- column with its parent table's (or aliases) name so that name
- conflicts between columns in different tables don't occur.
- The format of the label is <tablename>_<column>. The "c"
- collection of the resulting :class:`.Select` object will use these
- names as well for targeting column members.
-
- use_labels is also available via the :meth:`~.SelectBase.apply_labels`
- generative method.
-
- """
- return Select(columns, whereclause=whereclause, from_obj=from_obj,
- **kwargs)
-
-
-def subquery(alias, *args, **kwargs):
- """Return an :class:`.Alias` object derived
- from a :class:`.Select`.
-
- name
- alias name
-
- \*args, \**kwargs
-
- all other arguments are delivered to the
- :func:`select` function.
-
- """
- return Select(*args, **kwargs).alias(alias)
-
-
-def insert(table, values=None, inline=False, **kwargs):
- """Represent an ``INSERT`` statement via the :class:`.Insert` SQL
- construct.
-
- Similar functionality is available via the
- :meth:`~.TableClause.insert` method on
- :class:`~.schema.Table`.
-
-
- :param table: :class:`.TableClause` which is the subject of the insert.
-
- :param values: collection of values to be inserted; see
- :meth:`.Insert.values` for a description of allowed formats here.
- Can be omitted entirely; a :class:`.Insert` construct will also
- dynamically render the VALUES clause at execution time based on
- the parameters passed to :meth:`.Connection.execute`.
-
- :param inline: if True, SQL defaults will be compiled 'inline' into the
- statement and not pre-executed.
-
- If both `values` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within `values` on a per-key basis.
-
- The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
- objects or their string identifiers. Each key may reference one of:
-
- * a literal data value (i.e. string, number, etc.);
- * a Column object;
- * a SELECT statement.
-
- If a ``SELECT`` statement is specified which references this
- ``INSERT`` statement's table, the statement will be correlated
- against the ``INSERT`` statement.
-
- .. seealso::
-
- :ref:`coretutorial_insert_expressions` - SQL Expression Tutorial
-
- :ref:`inserts_and_updates` - SQL Expression Tutorial
-
- """
- return Insert(table, values, inline=inline, **kwargs)
-
-
-def update(table, whereclause=None, values=None, inline=False, **kwargs):
- """Represent an ``UPDATE`` statement via the :class:`.Update` SQL
- construct.
-
- E.g.::
-
- from sqlalchemy import update
-
- stmt = update(users).where(users.c.id==5).\\
- values(name='user #5')
-
- Similar functionality is available via the
- :meth:`~.TableClause.update` method on
- :class:`.Table`::
-
-
- stmt = users.update().\\
- where(users.c.id==5).\\
- values(name='user #5')
-
- :param table: A :class:`.Table` object representing the database
- table to be updated.
-
- :param whereclause: Optional SQL expression describing the ``WHERE``
- condition of the ``UPDATE`` statement. Modern applications
- may prefer to use the generative :meth:`~Update.where()`
- method to specify the ``WHERE`` clause.
-
- The WHERE clause can refer to multiple tables.
- For databases which support this, an ``UPDATE FROM`` clause will
- be generated, or on MySQL, a multi-table update. The statement
- will fail on databases that don't have support for multi-table
- update statements. A SQL-standard method of referring to
- additional tables in the WHERE clause is to use a correlated
- subquery::
-
- users.update().values(name='ed').where(
- users.c.name==select([addresses.c.email_address]).\\
- where(addresses.c.user_id==users.c.id).\\
- as_scalar()
- )
-
- .. versionchanged:: 0.7.4
- The WHERE clause can refer to multiple tables.
-
- :param values:
- Optional dictionary which specifies the ``SET`` conditions of the
- ``UPDATE``. If left as ``None``, the ``SET``
- conditions are determined from those parameters passed to the
- statement during the execution and/or compilation of the
- statement. When compiled standalone without any parameters,
- the ``SET`` clause generates for all columns.
-
- Modern applications may prefer to use the generative
- :meth:`.Update.values` method to set the values of the
- UPDATE statement.
-
- :param inline:
- if True, SQL defaults present on :class:`.Column` objects via
- the ``default`` keyword will be compiled 'inline' into the statement
- and not pre-executed. This means that their values will not
- be available in the dictionary returned from
- :meth:`.ResultProxy.last_updated_params`.
-
- If both ``values`` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within ``values`` on a per-key basis.
-
- The keys within ``values`` can be either :class:`.Column`
- objects or their string identifiers (specifically the "key" of the
- :class:`.Column`, normally but not necessarily equivalent to
- its "name"). Normally, the
- :class:`.Column` objects used here are expected to be
- part of the target :class:`.Table` that is the table
- to be updated. However when using MySQL, a multiple-table
- UPDATE statement can refer to columns from any of
- the tables referred to in the WHERE clause.
-
- The values referred to in ``values`` are typically:
-
- * a literal data value (i.e. string, number, etc.)
- * a SQL expression, such as a related :class:`.Column`,
- a scalar-returning :func:`.select` construct,
- etc.
-
- When combining :func:`.select` constructs within the values
- clause of an :func:`.update` construct,
- the subquery represented by the :func:`.select` should be
- *correlated* to the parent table, that is, providing criterion
- which links the table inside the subquery to the outer table
- being updated::
-
- users.update().values(
- name=select([addresses.c.email_address]).\\
- where(addresses.c.user_id==users.c.id).\\
- as_scalar()
- )
-
- .. seealso::
-
- :ref:`inserts_and_updates` - SQL Expression
- Language Tutorial
-
-
- """
- return Update(
- table,
- whereclause=whereclause,
- values=values,
- inline=inline,
- **kwargs)
-
-
-def delete(table, whereclause=None, **kwargs):
- """Represent a ``DELETE`` statement via the :class:`.Delete` SQL
- construct.
-
- Similar functionality is available via the
- :meth:`~.TableClause.delete` method on
- :class:`~.schema.Table`.
-
- :param table: The table to be updated.
-
- :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
- condition of the ``UPDATE`` statement. Note that the
- :meth:`~Delete.where()` generative method may be used instead.
-
- .. seealso::
-
- :ref:`deletes` - SQL Expression Tutorial
-
- """
- return Delete(table, whereclause, **kwargs)
-
-
-def and_(*clauses):
- """Join a list of clauses together using the ``AND`` operator.
-
- The ``&`` operator is also overloaded on all :class:`.ColumnElement`
- subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.and_, *clauses)
-
-
-def or_(*clauses):
- """Join a list of clauses together using the ``OR`` operator.
-
- The ``|`` operator is also overloaded on all
- :class:`.ColumnElement` subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.or_, *clauses)
-
-
-def not_(clause):
- """Return a negation of the given clause, i.e. ``NOT(clause)``.
-
- The ``~`` operator is also overloaded on all
- :class:`.ColumnElement` subclasses to produce the
- same result.
-
- """
- return operators.inv(_literal_as_binds(clause))
-
-
-def distinct(expr):
- """Return a ``DISTINCT`` clause.
-
- e.g.::
-
- distinct(a)
-
- renders::
-
- DISTINCT a
-
- """
- expr = _literal_as_binds(expr)
- return UnaryExpression(expr,
- operator=operators.distinct_op, type_=expr.type)
-
-
-def between(ctest, cleft, cright):
- """Return a ``BETWEEN`` predicate clause.
-
- Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
-
- The :func:`between()` method on all
- :class:`.ColumnElement` subclasses provides
- similar functionality.
-
- """
- ctest = _literal_as_binds(ctest)
- return ctest.between(cleft, cright)
-
-
-def case(whens, value=None, else_=None):
- """Produce a ``CASE`` statement.
-
- whens
- A sequence of pairs, or alternatively a dict,
- to be translated into "WHEN / THEN" clauses.
-
- value
- Optional for simple case statements, produces
- a column expression as in "CASE <expr> WHEN ..."
-
- else\_
- Optional as well, for case defaults produces
- the "ELSE" portion of the "CASE" statement.
-
- The expressions used for THEN and ELSE,
- when specified as strings, will be interpreted
- as bound values. To specify textual SQL expressions
- for these, use the :func:`literal_column`
- construct.
-
- The expressions used for the WHEN criterion
- may only be literal strings when "value" is
- present, i.e. CASE table.somecol WHEN "x" THEN "y".
- Otherwise, literal strings are not accepted
- in this position, and either the text(<string>)
- or literal(<string>) constructs must be used to
- interpret raw string values.
-
- Usage examples::
-
- case([(orderline.c.qty > 100, item.c.specialprice),
- (orderline.c.qty > 10, item.c.bulkprice)
- ], else_=item.c.regularprice)
- case(value=emp.c.type, whens={
- 'engineer': emp.c.salary * 1.1,
- 'manager': emp.c.salary * 3,
- })
-
- Using :func:`literal_column()`, to allow for databases that
- do not support bind parameters in the ``then`` clause. The type
- can be specified which determines the type of the :func:`case()` construct
- overall::
-
- case([(orderline.c.qty > 100,
- literal_column("'greaterthan100'", String)),
- (orderline.c.qty > 10, literal_column("'greaterthan10'",
- String))
- ], else_=literal_column("'lethan10'", String))
-
- """
-
- return Case(whens, value=value, else_=else_)
-
-
-def cast(clause, totype, **kwargs):
- """Return a ``CAST`` function.
-
- Equivalent of SQL ``CAST(clause AS totype)``.
-
- Use with a :class:`~sqlalchemy.types.TypeEngine` subclass, i.e::
-
- cast(table.c.unit_price * table.c.qty, Numeric(10,4))
-
- or::
-
- cast(table.c.timestamp, DATE)
-
- """
- return Cast(clause, totype, **kwargs)
-
-
-def extract(field, expr):
- """Return the clause ``extract(field FROM expr)``."""
-
- return Extract(field, expr)
-
-
-def collate(expression, collation):
- """Return the clause ``expression COLLATE collation``.
-
- e.g.::
-
- collate(mycolumn, 'utf8_bin')
-
- produces::
-
- mycolumn COLLATE utf8_bin
-
- """
-
- expr = _literal_as_binds(expression)
- return BinaryExpression(
- expr,
- _literal_as_text(collation),
- operators.collate, type_=expr.type)
-
-
-def exists(*args, **kwargs):
- """Return an ``EXISTS`` clause as applied to a :class:`.Select` object.
-
- Calling styles are of the following forms::
-
- # use on an existing select()
- s = select([table.c.col1]).where(table.c.col2==5)
- s = exists(s)
-
- # construct a select() at once
- exists(['*'], **select_arguments).where(criterion)
-
- # columns argument is optional, generates "EXISTS (SELECT *)"
- # by default.
- exists().where(table.c.col2==5)
-
- """
- return Exists(*args, **kwargs)
-
-
-def union(*selects, **kwargs):
- """Return a ``UNION`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
-
-
-def union_all(*selects, **kwargs):
- """Return a ``UNION ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union_all()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
-
-
-def except_(*selects, **kwargs):
- """Return an ``EXCEPT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
-
-
-def except_all(*selects, **kwargs):
- """Return an ``EXCEPT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
-
-
-def intersect(*selects, **kwargs):
- """Return an ``INTERSECT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
-
-
-def intersect_all(*selects, **kwargs):
- """Return an ``INTERSECT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
-
-
-def alias(selectable, name=None, flat=False):
- """Return an :class:`.Alias` object.
-
- An :class:`.Alias` represents any :class:`.FromClause`
- with an alternate name assigned within SQL, typically using the ``AS``
- clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
-
- Similar functionality is available via the
- :meth:`~.FromClause.alias` method
- available on all :class:`.FromClause` subclasses.
-
- When an :class:`.Alias` is created from a :class:`.Table` object,
- this has the effect of the table being rendered
- as ``tablename AS aliasname`` in a SELECT statement.
-
- For :func:`.select` objects, the effect is that of creating a named
- subquery, i.e. ``(select ...) AS aliasname``.
-
- The ``name`` parameter is optional, and provides the name
- to use in the rendered SQL. If blank, an "anonymous" name
- will be deterministically generated at compile time.
- Deterministic means the name is guaranteed to be unique against
- other constructs used in the same statement, and will also be the
- same name for each successive compilation of the same statement
- object.
-
- :param selectable: any :class:`.FromClause` subclass,
- such as a table, select statement, etc.
-
- :param name: string name to be assigned as the alias.
- If ``None``, a name will be deterministically generated
- at compile time.
-
- :param flat: Will be passed through to if the given selectable
- is an instance of :class:`.Join` - see :meth:`.Join.alias`
- for details.
-
- .. versionadded:: 0.9.0
-
- """
- return selectable.alias(name=name, flat=flat)
-
-
-def literal(value, type_=None):
- """Return a literal clause, bound to a bind parameter.
-
- Literal clauses are created automatically when non- :class:`.ClauseElement`
- objects (such as strings, ints, dates, etc.) are used in a comparison
- operation with a :class:`.ColumnElement`
- subclass, such as a :class:`~sqlalchemy.schema.Column` object.
- Use this function to force the
- generation of a literal clause, which will be created as a
- :class:`BindParameter` with a bound value.
-
- :param value: the value to be bound. Can be any Python object supported by
- the underlying DB-API, or is translatable via the given type argument.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
- will provide bind-parameter translation for this literal.
-
- """
- return BindParameter(None, value, type_=type_, unique=True)
-
-
-def tuple_(*expr):
- """Return a SQL tuple.
-
- Main usage is to produce a composite IN construct::
-
- tuple_(table.c.col1, table.c.col2).in_(
- [(1, 2), (5, 12), (10, 19)]
- )
-
- .. warning::
-
- The composite IN construct is not supported by all backends,
- and is currently known to work on Postgresql and MySQL,
- but not SQLite. Unsupported backends will raise
- a subclass of :class:`~sqlalchemy.exc.DBAPIError` when such
- an expression is invoked.
-
- """
- return Tuple(*expr)
-
-
-def type_coerce(expr, type_):
- """Coerce the given expression into the given type,
- on the Python side only.
-
- :func:`.type_coerce` is roughly similar to :func:`.cast`, except no
- "CAST" expression is rendered - the given type is only applied towards
- expression typing and against received result values.
-
- e.g.::
-
- from sqlalchemy.types import TypeDecorator
- import uuid
-
- class AsGuid(TypeDecorator):
- impl = String
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- return str(value)
- else:
- return None
-
- def process_result_value(self, value, dialect):
- if value is not None:
- return uuid.UUID(value)
- else:
- return None
-
- conn.execute(
- select([type_coerce(mytable.c.ident, AsGuid)]).\\
- where(
- type_coerce(mytable.c.ident, AsGuid) ==
- uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
- )
- )
-
- """
- type_ = sqltypes.to_instance(type_)
-
- if hasattr(expr, '__clause_expr__'):
- return type_coerce(expr.__clause_expr__())
- elif isinstance(expr, BindParameter):
- bp = expr._clone()
- bp.type = type_
- return bp
- elif not isinstance(expr, Visitable):
- if expr is None:
- return null()
- else:
- return literal(expr, type_=type_)
- else:
- return Label(None, expr, type_=type_)
-
-
-def label(name, obj):
- """Return a :class:`Label` object for the
- given :class:`.ColumnElement`.
-
- A label changes the name of an element in the columns clause of a
- ``SELECT`` statement, typically via the ``AS`` SQL keyword.
-
- This functionality is more conveniently available via the
- :func:`label()` method on :class:`.ColumnElement`.
-
- name
- label name
-
- obj
- a :class:`.ColumnElement`.
-
- """
- return Label(name, obj)
-
-
-def column(text, type_=None):
- """Return a textual column clause, as would be in the columns clause of a
- ``SELECT`` statement.
-
- The object returned is an instance of :class:`.ColumnClause`, which
- represents the "syntactical" portion of the schema-level
- :class:`~sqlalchemy.schema.Column` object. It is often used directly
- within :func:`~.expression.select` constructs or with lightweight
- :func:`~.expression.table` constructs.
-
- Note that the :func:`~.expression.column` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the
- ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param text: the name of the column. Quoting rules will be applied
- to the clause like any other column name. For textual column constructs
- that are not to be quoted, use the :func:`literal_column` function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
- which will provide result-set translation for this column.
-
- See :class:`.ColumnClause` for further examples.
-
- """
- return ColumnClause(text, type_=type_)
-
-
-def literal_column(text, type_=None):
- """Return a textual column expression, as would be in the columns
- clause of a ``SELECT`` statement.
-
- The object returned supports further expressions in the same way as any
- other column object, including comparison, math and string operations.
- The type\_ parameter is important to determine proper expression behavior
- (such as, '+' means string concatenation or numerical addition based on
- the type).
-
- :param text: the text of the expression; can be any SQL expression.
- Quoting rules will not be applied. To specify a column-name expression
- which should be subject to quoting rules, use the :func:`column`
- function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
- object which will
- provide result-set translation and additional expression semantics for
- this column. If left as None the type will be NullType.
-
- """
- return ColumnClause(text, type_=type_, is_literal=True)
-
-
-def table(name, *columns):
- """Represent a textual table clause.
-
- The object returned is an instance of :class:`.TableClause`, which
- represents the "syntactical" portion of the schema-level
- :class:`~.schema.Table` object.
- It may be used to construct lightweight table constructs.
-
- Note that the :func:`~.expression.table` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the
- ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param name: Name of the table.
-
- :param columns: A collection of :func:`~.expression.column` constructs.
-
- See :class:`.TableClause` for further examples.
-
- """
- return TableClause(name, *columns)
-
-
-def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG,
- quote=None, callable_=None):
- """Create a bind parameter clause with the given key.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`BindParameter` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- Defaults to ``None``, however if neither ``value`` nor
- ``callable`` are passed explicitly, the ``required`` flag will be
- set to ``True`` which has the effect of requiring a value be present
- when the statement is actually executed.
-
- .. versionchanged:: 0.8 The ``required`` flag is set to ``True``
- automatically if ``value`` or ``callable`` is not passed.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embedded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`BindParameter` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`BindParameter` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param required:
- If ``True``, a value is required at execution time. If not passed,
- is set to ``True`` or ``False`` based on whether or not
- one of ``value`` or ``callable`` were passed..
-
- .. versionchanged:: 0.8 If the ``required`` flag is not specified,
- it will be set automatically to ``True`` or ``False`` depending
- on whether or not the ``value`` or ``callable`` parameters
- were specified.
-
- :param quote:
- True if this parameter name requires quoting and is not
- currently known as a SQLAlchemy reserved word; this currently
- only applies to the Oracle backend.
-
- """
- if isinstance(key, ColumnClause):
- type_ = key.type
- key = key.name
- if required is NO_ARG:
- required = (value is NO_ARG and callable_ is None)
- if value is NO_ARG:
- value = None
- return BindParameter(key, value, type_=type_,
- callable_=callable_,
- unique=unique, required=required,
- quote=quote)
-
-
-def outparam(key, type_=None):
- """Create an 'OUT' parameter for usage in functions (stored procedures),
- for databases which support them.
-
- The ``outparam`` can be used like a regular function parameter.
- The "output" value will be available from the
- :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
- attribute, which returns a dictionary containing the values.
-
- """
- return BindParameter(
- key, None, type_=type_, unique=False, isoutparam=True)
-
-
-def text(text, bind=None, *args, **kwargs):
- """Create a SQL construct that is represented by a literal string.
-
- E.g.::
-
- t = text("SELECT * FROM users")
- result = connection.execute(t)
-
- The advantages :func:`text` provides over a plain string are
- backend-neutral support for bind parameters, per-statement
- execution options, as well as
- bind parameter and result-column typing behavior, allowing
- SQLAlchemy type constructs to play a role when executing
- a statement that is specified literally.
-
- Bind parameters are specified by name, using the format ``:name``.
- E.g.::
-
- t = text("SELECT * FROM users WHERE id=:user_id")
- result = connection.execute(t, user_id=12)
-
- To invoke SQLAlchemy typing logic for bind parameters, the
- ``bindparams`` list allows specification of :func:`bindparam`
- constructs which specify the type for a given name::
-
- t = text("SELECT id FROM users WHERE updated_at>:updated",
- bindparams=[bindparam('updated', DateTime())]
- )
-
- Typing during result row processing is also an important concern.
- Result column types
- are specified using the ``typemap`` dictionary, where the keys
- match the names of columns. These names are taken from what
- the DBAPI returns as ``cursor.description``::
-
- t = text("SELECT id, name FROM users",
- typemap={
- 'id':Integer,
- 'name':Unicode
- }
- )
-
- The :func:`text` construct is used internally for most cases when
- a literal string is specified for part of a larger query, such as
- within :func:`select()`, :func:`update()`,
- :func:`insert()` or :func:`delete()`. In those cases, the same
- bind parameter syntax is applied::
-
- s = select([users.c.id, users.c.name]).where("id=:user_id")
- result = connection.execute(s, user_id=12)
-
- Using :func:`text` explicitly usually implies the construction
- of a full, standalone statement. As such, SQLAlchemy refers
- to it as an :class:`.Executable` object, and it supports
- the :meth:`Executable.execution_options` method. For example,
- a :func:`text` construct that should be subject to "autocommit"
- can be set explicitly so using the ``autocommit`` option::
-
- t = text("EXEC my_procedural_thing()").\\
- execution_options(autocommit=True)
-
- Note that SQLAlchemy's usual "autocommit" behavior applies to
- :func:`text` constructs - that is, statements which begin
- with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
- or a variety of other phrases specific to certain backends, will
- be eligible for autocommit if no transaction is in progress.
-
- :param text:
- the text of the SQL statement to be created. use ``:<param>``
- to specify bind parameters; they will be compiled to their
- engine-specific format.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind:
- an optional connection or engine to be used for this text query.
-
- :param bindparams:
- a list of :func:`bindparam()` instances which can be used to define
- the types and/or initial values for the bind parameters within
- the textual statement; the keynames of the bindparams must match
- those within the text of the statement. The types will be used
- for pre-processing on bind values.
-
- :param typemap:
- a dictionary mapping the names of columns represented in the
- columns clause of a ``SELECT`` statement to type objects,
- which will be used to perform post-processing on columns within
- the result set. This argument applies to any expression
- that returns result sets.
-
- """
- return TextClause(text, bind=bind, *args, **kwargs)
-
-
-def over(func, partition_by=None, order_by=None):
- """Produce an OVER clause against a function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- E.g.::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- Would produce "ROW_NUMBER() OVER(ORDER BY x)".
-
- :param func: a :class:`.FunctionElement` construct, typically
- generated by :data:`~.expression.func`.
- :param partition_by: a column element or string, or a list
- of such, that will be used as the PARTITION BY clause
- of the OVER construct.
- :param order_by: a column element or string, or a list
- of such, that will be used as the ORDER BY clause
- of the OVER construct.
-
- This function is also available from the :data:`~.expression.func`
- construct itself via the :meth:`.FunctionElement.over` method.
-
- .. versionadded:: 0.7
-
- """
- return Over(func, partition_by=partition_by, order_by=order_by)
-
-
-def null():
- """Return a :class:`Null` object, which compiles to ``NULL``.
-
- """
- return Null()
-
-
-def true():
- """Return a :class:`True_` object, which compiles to ``true``, or the
- boolean equivalent for the target dialect.
-
- """
- return True_()
-
-
-def false():
- """Return a :class:`False_` object, which compiles to ``false``, or the
- boolean equivalent for the target dialect.
-
- """
- return False_()
-
-
-class _FunctionGenerator(object):
- """Generate :class:`.Function` objects based on getattr calls."""
-
- def __init__(self, **opts):
- self.__names = []
- self.opts = opts
-
- def __getattr__(self, name):
- # passthru __ attributes; fixes pydoc
- if name.startswith('__'):
- try:
- return self.__dict__[name]
- except KeyError:
- raise AttributeError(name)
-
- elif name.endswith('_'):
- name = name[0:-1]
- f = _FunctionGenerator(**self.opts)
- f.__names = list(self.__names) + [name]
- return f
-
- def __call__(self, *c, **kwargs):
- o = self.opts.copy()
- o.update(kwargs)
-
- tokens = len(self.__names)
-
- if tokens == 2:
- package, fname = self.__names
- elif tokens == 1:
- package, fname = "_default", self.__names[0]
- else:
- package = None
-
- if package is not None and \
- package in functions._registry and \
- fname in functions._registry[package]:
- func = functions._registry[package][fname]
- return func(*c, **o)
-
- return Function(self.__names[-1],
- packagenames=self.__names[0:-1], *c, **o)
-
-# "func" global - i.e. func.count()
-func = _FunctionGenerator()
-"""Generate SQL function expressions.
-
- :data:`.func` is a special object instance which generates SQL
- functions based on name-based attributes, e.g.::
-
- >>> print func.count(1)
- count(:param_1)
-
- The element is a column-oriented SQL element like any other, and is
- used in that way::
-
- >>> print select([func.count(table.c.id)])
- SELECT count(sometable.id) FROM sometable
-
- Any name can be given to :data:`.func`. If the function name is unknown to
- SQLAlchemy, it will be rendered exactly as is. For common SQL functions
- which SQLAlchemy is aware of, the name may be interpreted as a *generic
- function* which will be compiled appropriately to the target database::
-
- >>> print func.current_timestamp()
- CURRENT_TIMESTAMP
-
- To call functions which are present in dot-separated packages,
- specify them in the same manner::
-
- >>> print func.stats.yield_curve(5, 10)
- stats.yield_curve(:yield_curve_1, :yield_curve_2)
-
- SQLAlchemy can be made aware of the return type of functions to enable
- type-specific lexical and result-based behavior. For example, to ensure
- that a string-based function returns a Unicode value and is similarly
- treated as a string in expressions, specify
- :class:`~sqlalchemy.types.Unicode` as the type:
-
- >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
- ... func.my_string(u'there', type_=Unicode)
- my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
-
- The object returned by a :data:`.func` call is usually an instance of
- :class:`.Function`.
- This object meets the "column" interface, including comparison and labeling
- functions. The object can also be passed the :meth:`~.Connectable.execute`
- method of a :class:`.Connection` or :class:`.Engine`, where it will be
- wrapped inside of a SELECT statement first::
-
- print connection.execute(func.current_timestamp()).scalar()
-
- In a few exception cases, the :data:`.func` accessor
- will redirect a name to a built-in expression such as :func:`.cast`
- or :func:`.extract`, as these names have well-known meaning
- but are not exactly the same as "functions" from a SQLAlchemy
- perspective.
-
- .. versionadded:: 0.8 :data:`.func` can return non-function expression
- constructs for common quasi-functional names like :func:`.cast`
- and :func:`.extract`.
-
- Functions which are interpreted as "generic" functions know how to
- calculate their return type automatically. For a listing of known generic
- functions, see :ref:`generic_functions`.
-
-"""
-
-# "modifier" global - i.e. modifier.distinct
-# TODO: use UnaryExpression for this instead ?
-modifier = _FunctionGenerator(group=False)
-
-
-class _truncated_label(util.text_type):
- """A unicode subclass used to identify symbolic "
- "names that may require truncation."""
-
- def apply_map(self, map_):
- return self
-
-# for backwards compatibility in case
-# someone is re-implementing the
-# _truncated_identifier() sequence in a custom
-# compiler
-_generated_label = _truncated_label
-
-
-class _anonymous_label(_truncated_label):
- """A unicode subclass used to identify anonymously
- generated names."""
-
- def __add__(self, other):
- return _anonymous_label(
- util.text_type(self) +
- util.text_type(other))
-
- def __radd__(self, other):
- return _anonymous_label(
- util.text_type(other) +
- util.text_type(self))
-
- def apply_map(self, map_):
- return self % map_
-
-
-def _as_truncated(value):
- """coerce the given value to :class:`._truncated_label`.
-
- Existing :class:`._truncated_label` and
- :class:`._anonymous_label` objects are passed
- unchanged.
- """
-
- if isinstance(value, _truncated_label):
- return value
- else:
- return _truncated_label(value)
-
-
-def _string_or_unprintable(element):
- if isinstance(element, util.string_types):
- return element
- else:
- try:
- return str(element)
- except:
- return "unprintable element %r" % element
-
-
-def _clone(element, **kw):
- return element._clone()
-
-
-def _expand_cloned(elements):
- """expand the given set of ClauseElements to be the set of all 'cloned'
- predecessors.
-
- """
- return itertools.chain(*[x._cloned_set for x in elements])
-
-
-def _select_iterables(elements):
- """expand tables into individual columns in the
- given list of column expressions.
-
- """
- return itertools.chain(*[c._select_iterable for c in elements])
-
-
-def _cloned_intersection(a, b):
- """return the intersection of sets a and b, counting
- any overlap between 'cloned' predecessors.
-
- The returned set is in terms of the entities present within 'a'.
-
- """
- all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
- return set(elem for elem in a
- if all_overlap.intersection(elem._cloned_set))
-
-def _cloned_difference(a, b):
- all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
- return set(elem for elem in a
- if not all_overlap.intersection(elem._cloned_set))
-
-def _from_objects(*elements):
- return itertools.chain(*[element._from_objects for element in elements])
-
-
-def _labeled(element):
- if not hasattr(element, 'name'):
- return element.label(None)
- else:
- return element
-
-
-# there is some inconsistency here between the usage of
-# inspect() vs. checking for Visitable and __clause_element__.
-# Ideally all functions here would derive from inspect(),
-# however the inspect() versions add significant callcount
-# overhead for critical functions like _interpret_as_column_or_from().
-# Generally, the column-based functions are more performance critical
-# and are fine just checking for __clause_element__(). it's only
-# _interpret_as_from() where we'd like to be able to receive ORM entities
-# that have no defined namespace, hence inspect() is needed there.
-
-
-def _column_as_key(element):
- if isinstance(element, util.string_types):
- return element
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- try:
- return element.key
- except AttributeError:
- return None
-
-
-def _clause_element_as_expr(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- else:
- return element
-
-
-def _literal_as_text(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif isinstance(element, util.string_types):
- return TextClause(util.text_type(element))
- elif isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- else:
- raise exc.ArgumentError(
- "SQL expression object or string expected."
- )
-
-
-def _no_literals(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
- "function to indicate a SQL expression "
- "literal, or 'literal()' to indicate a "
- "bound value." % element)
- else:
- return element
-
-
-def _is_literal(element):
- return not isinstance(element, Visitable) and \
- not hasattr(element, '__clause_element__')
-
-
-def _only_column_elements_or_none(element, name):
- if element is None:
- return None
- else:
- return _only_column_elements(element, name)
-
-
-def _only_column_elements(element, name):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, ColumnElement):
- raise exc.ArgumentError(
- "Column-based expression object expected for argument "
- "'%s'; got: '%s', type %s" % (name, element, type(element)))
- return element
-
-
-def _literal_as_binds(element, name=None, type_=None):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- if element is None:
- return null()
- else:
- return _BindParamClause(name, element, type_=type_, unique=True)
- else:
- return element
-
-
-def _interpret_as_column_or_from(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
-
- insp = inspection.inspect(element, raiseerr=False)
- if insp is None:
- if isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- elif hasattr(insp, "selectable"):
- return insp.selectable
-
- return literal_column(str(element))
-
-
-def _interpret_as_from(element):
- insp = inspection.inspect(element, raiseerr=False)
- if insp is None:
- if isinstance(element, util.string_types):
- return TextClause(util.text_type(element))
- elif hasattr(insp, "selectable"):
- return insp.selectable
- raise exc.ArgumentError("FROM expression expected")
-
-def _interpret_as_select(element):
- element = _interpret_as_from(element)
- if isinstance(element, Alias):
- element = element.original
- if not isinstance(element, Select):
- element = element.select()
- return element
-
-
-def _const_expr(element):
- if isinstance(element, (Null, False_, True_)):
- return element
- elif element is None:
- return null()
- elif element is False:
- return false()
- elif element is True:
- return true()
- else:
- raise exc.ArgumentError(
- "Expected None, False, or True"
- )
-
-
-def _type_from_args(args):
- for a in args:
- if not isinstance(a.type, sqltypes.NullType):
- return a.type
- else:
- return sqltypes.NullType
-
-
-def _corresponding_column_or_error(fromclause, column,
- require_embedded=False):
- c = fromclause.corresponding_column(column,
- require_embedded=require_embedded)
- if c is None:
- raise exc.InvalidRequestError(
- "Given column '%s', attached to table '%s', "
- "failed to locate a corresponding column from table '%s'"
- %
- (column,
- getattr(column, 'table', None),
- fromclause.description)
- )
- return c
-
-
-@util.decorator
-def _generative(fn, *args, **kw):
- """Mark a method as generative."""
-
- self = args[0]._generate()
- fn(self, *args[1:], **kw)
- return self
-
-
-def is_column(col):
- """True if ``col`` is an instance of :class:`.ColumnElement`."""
-
- return isinstance(col, ColumnElement)
-
-
-class ClauseElement(Visitable):
- """Base class for elements of a programmatically constructed SQL
- expression.
-
- """
- __visit_name__ = 'clause'
-
- _annotations = {}
- supports_execution = False
- _from_objects = []
- bind = None
- _is_clone_of = None
- is_selectable = False
- is_clause_element = True
-
- _order_by_label_element = None
-
- def _clone(self):
- """Create a shallow copy of this ClauseElement.
-
- This method may be used by a generative API. Its also used as
- part of the "deep" copy afforded by a traversal that combines
- the _copy_internals() method.
-
- """
- c = self.__class__.__new__(self.__class__)
- c.__dict__ = self.__dict__.copy()
- ClauseElement._cloned_set._reset(c)
- ColumnElement.comparator._reset(c)
-
- # this is a marker that helps to "equate" clauses to each other
- # when a Select returns its list of FROM clauses. the cloning
- # process leaves around a lot of remnants of the previous clause
- # typically in the form of column expressions still attached to the
- # old table.
- c._is_clone_of = self
-
- return c
-
- @property
- def _constructor(self):
- """return the 'constructor' for this ClauseElement.
-
- This is for the purposes for creating a new object of
- this type. Usually, its just the element's __class__.
- However, the "Annotated" version of the object overrides
- to return the class of its proxied element.
-
- """
- return self.__class__
-
- @util.memoized_property
- def _cloned_set(self):
- """Return the set consisting all cloned ancestors of this
- ClauseElement.
-
- Includes this ClauseElement. This accessor tends to be used for
- FromClause objects to identify 'equivalent' FROM clauses, regardless
- of transformative operations.
-
- """
- s = util.column_set()
- f = self
- while f is not None:
- s.add(f)
- f = f._is_clone_of
- return s
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d.pop('_is_clone_of', None)
- return d
-
- def _annotate(self, values):
- """return a copy of this ClauseElement with annotations
- updated by the given dictionary.
-
- """
- return sqlutil.Annotated(self, values)
-
- def _with_annotations(self, values):
- """return a copy of this ClauseElement with annotations
- replaced by the given dictionary.
-
- """
- return sqlutil.Annotated(self, values)
-
- def _deannotate(self, values=None, clone=False):
- """return a copy of this :class:`.ClauseElement` with annotations
- removed.
-
- :param values: optional tuple of individual values
- to remove.
-
- """
- if clone:
- # clone is used when we are also copying
- # the expression for a deep deannotation
- return self._clone()
- else:
- # if no clone, since we have no annotations we return
- # self
- return self
-
- def unique_params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elements replaced.
-
- Same functionality as ``params()``, except adds `unique=True`
- to affected bind parameters so that multiple statements can be
- used.
-
- """
- return self._params(True, optionaldict, kwargs)
-
- def params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elements replaced.
-
- Returns a copy of this ClauseElement with :func:`bindparam()`
- elements replaced with values taken from the given dictionary::
-
- >>> clause = column('x') + bindparam('foo')
- >>> print clause.compile().params
- {'foo':None}
- >>> print clause.params({'foo':7}).compile().params
- {'foo':7}
-
- """
- return self._params(False, optionaldict, kwargs)
-
- def _params(self, unique, optionaldict, kwargs):
- if len(optionaldict) == 1:
- kwargs.update(optionaldict[0])
- elif len(optionaldict) > 1:
- raise exc.ArgumentError(
- "params() takes zero or one positional dictionary argument")
-
- def visit_bindparam(bind):
- if bind.key in kwargs:
- bind.value = kwargs[bind.key]
- bind.required = False
- if unique:
- bind._convert_to_unique()
- return cloned_traverse(self, {}, {'bindparam': visit_bindparam})
-
- def compare(self, other, **kw):
- """Compare this ClauseElement to the given ClauseElement.
-
- Subclasses should override the default behavior, which is a
- straight identity comparison.
-
- \**kw are arguments consumed by subclass compare() methods and
- may be used to modify the criteria for comparison.
- (see :class:`.ColumnElement`)
-
- """
- return self is other
-
- def _copy_internals(self, clone=_clone, **kw):
- """Reassign internal elements to be clones of themselves.
-
- Called during a copy-and-traverse operation on newly
- shallow-copied elements to create a deep copy.
-
- The given clone function should be used, which may be applying
- additional transformations to the element (i.e. replacement
- traversal, cloned traversal, annotations).
-
- """
- pass
-
- def get_children(self, **kwargs):
- """Return immediate child elements of this :class:`.ClauseElement`.
-
- This is used for visit traversal.
-
- \**kwargs may contain flags that change the collection that is
- returned, for example to return a subset of items in order to
- cut down on larger traversals, or to return child items from a
- different context (such as schema-level collections instead of
- clause-level).
-
- """
- return []
-
- def self_group(self, against=None):
- """Apply a 'grouping' to this :class:`.ClauseElement`.
-
- This method is overridden by subclasses to return a
- "grouping" construct, i.e. parenthesis. In particular
- it's used by "binary" expressions to provide a grouping
- around themselves when placed into a larger expression,
- as well as by :func:`.select` constructs when placed into
- the FROM clause of another :func:`.select`. (Note that
- subqueries should be normally created using the
- :func:`.Select.alias` method, as many platforms require
- nested SELECT statements to be named).
-
- As expressions are composed together, the application of
- :meth:`self_group` is automatic - end-user code should never
- need to use this method directly. Note that SQLAlchemy's
- clause constructs take operator precedence into account -
- so parenthesis might not be needed, for example, in
- an expression like ``x OR (y AND z)`` - AND takes precedence
- over OR.
-
- The base :meth:`self_group` method of :class:`.ClauseElement`
- just returns self.
- """
- return self
-
- def compile(self, bind=None, dialect=None, **kw):
- """Compile this SQL expression.
-
- The return value is a :class:`~.Compiled` object.
- Calling ``str()`` or ``unicode()`` on the returned value will yield a
- string representation of the result. The
- :class:`~.Compiled` object also can return a
- dictionary of bind parameter names and values
- using the ``params`` accessor.
-
- :param bind: An ``Engine`` or ``Connection`` from which a
- ``Compiled`` will be acquired. This argument takes precedence over
- this :class:`.ClauseElement`'s bound engine, if any.
-
- :param column_keys: Used for INSERT and UPDATE statements, a list of
- column names which should be present in the VALUES clause of the
- compiled statement. If ``None``, all columns from the target table
- object are rendered.
-
- :param dialect: A ``Dialect`` instance from which a ``Compiled``
- will be acquired. This argument takes precedence over the `bind`
- argument as well as this :class:`.ClauseElement`'s bound engine, if
- any.
-
- :param inline: Used for INSERT statements, for a dialect which does
- not support inline retrieval of newly generated primary key
- columns, will force the expression used to create the new primary
- key value to be rendered inline within the INSERT statement's
- VALUES clause. This typically refers to Sequence execution but may
- also refer to any server-side default generation function
- associated with a primary key `Column`.
-
- """
-
- if not dialect:
- if bind:
- dialect = bind.dialect
- elif self.bind:
- dialect = self.bind.dialect
- bind = self.bind
- else:
- dialect = default.DefaultDialect()
- return self._compiler(dialect, bind=bind, **kw)
-
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.statement_compiler(dialect, self, **kw)
-
- def __str__(self):
- if util.py3k:
- return str(self.compile())
- else:
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
-
- def __and__(self, other):
- return and_(self, other)
-
- def __or__(self, other):
- return or_(self, other)
-
- def __invert__(self):
- return self._negate()
-
- def __bool__(self):
- raise TypeError("Boolean value of this clause is not defined")
-
- __nonzero__ = __bool__
-
- def _negate(self):
- if hasattr(self, 'negation_clause'):
- return self.negation_clause
- else:
- return UnaryExpression(
- self.self_group(against=operators.inv),
- operator=operators.inv,
- negate=None)
-
- def __repr__(self):
- friendly = getattr(self, 'description', None)
- if friendly is None:
- return object.__repr__(self)
- else:
- return '<%s.%s at 0x%x; %s>' % (
- self.__module__, self.__class__.__name__, id(self), friendly)
-
-inspection._self_inspects(ClauseElement)
-
-
-class Immutable(object):
- """mark a ClauseElement as 'immutable' when expressions are cloned."""
-
- def unique_params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def _clone(self):
- return self
-
-
-class _DefaultColumnComparator(operators.ColumnOperators):
- """Defines comparison and math operations.
-
- See :class:`.ColumnOperators` and :class:`.Operators` for descriptions
- of all operations.
-
- """
-
- @util.memoized_property
- def type(self):
- return self.expr.type
-
- def operate(self, op, *other, **kwargs):
- o = self.operators[op.__name__]
- return o[0](self, self.expr, op, *(other + o[1:]), **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- o = self.operators[op.__name__]
- return o[0](self, self.expr, op, other, reverse=True, *o[1:], **kwargs)
-
- def _adapt_expression(self, op, other_comparator):
- """evaluate the return type of <self> <op> <othertype>,
- and apply any adaptations to the given operator.
-
- This method determines the type of a resulting binary expression
- given two source types and an operator. For example, two
- :class:`.Column` objects, both of the type :class:`.Integer`, will
- produce a :class:`.BinaryExpression` that also has the type
- :class:`.Integer` when compared via the addition (``+``) operator.
- However, using the addition operator with an :class:`.Integer`
- and a :class:`.Date` object will produce a :class:`.Date`, assuming
- "days delta" behavior by the database (in reality, most databases
- other than Postgresql don't accept this particular operation).
-
- The method returns a tuple of the form <operator>, <type>.
- The resulting operator and type will be those applied to the
- resulting :class:`.BinaryExpression` as the final operator and the
- right-hand side of the expression.
-
- Note that only a subset of operators make usage of
- :meth:`._adapt_expression`,
- including math operators and user-defined operators, but not
- boolean comparison or special SQL keywords like MATCH or BETWEEN.
-
- """
- return op, other_comparator.type
-
- def _boolean_compare(self, expr, op, obj, negate=None, reverse=False,
- _python_is_types=(util.NoneType, bool),
- **kwargs):
-
- if isinstance(obj, _python_is_types + (Null, True_, False_)):
-
- # allow x ==/!= True/False to be treated as a literal.
- # this comes out to "== / != true/false" or "1/0" if those
- # constants aren't supported and works on all platforms
- if op in (operators.eq, operators.ne) and \
- isinstance(obj, (bool, True_, False_)):
- return BinaryExpression(expr,
- obj,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
- else:
- # all other None/True/False uses IS, IS NOT
- if op in (operators.eq, operators.is_):
- return BinaryExpression(expr, _const_expr(obj),
- operators.is_,
- negate=operators.isnot)
- elif op in (operators.ne, operators.isnot):
- return BinaryExpression(expr, _const_expr(obj),
- operators.isnot,
- negate=operators.is_)
- else:
- raise exc.ArgumentError(
- "Only '=', '!=', 'is_()', 'isnot()' operators can "
- "be used with None/True/False")
- else:
- obj = self._check_literal(expr, op, obj)
-
- if reverse:
- return BinaryExpression(obj,
- expr,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
- else:
- return BinaryExpression(expr,
- obj,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
-
- def _binary_operate(self, expr, op, obj, reverse=False, result_type=None,
- **kw):
- obj = self._check_literal(expr, op, obj)
-
- if reverse:
- left, right = obj, expr
- else:
- left, right = expr, obj
-
- if result_type is None:
- op, result_type = left.comparator._adapt_expression(
- op, right.comparator)
-
- return BinaryExpression(left, right, op, type_=result_type)
-
- def _scalar(self, expr, op, fn, **kw):
- return fn(expr)
-
- def _in_impl(self, expr, op, seq_or_selectable, negate_op, **kw):
- seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
-
- if isinstance(seq_or_selectable, ScalarSelect):
- return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op)
- elif isinstance(seq_or_selectable, SelectBase):
-
- # TODO: if we ever want to support (x, y, z) IN (select x,
- # y, z from table), we would need a multi-column version of
- # as_scalar() to produce a multi- column selectable that
- # does not export itself as a FROM clause
-
- return self._boolean_compare(
- expr, op, seq_or_selectable.as_scalar(),
- negate=negate_op, **kw)
- elif isinstance(seq_or_selectable, (Selectable, TextClause)):
- return self._boolean_compare(expr, op, seq_or_selectable,
- negate=negate_op, **kw)
-
- # Handle non selectable arguments as sequences
- args = []
- for o in seq_or_selectable:
- if not _is_literal(o):
- if not isinstance(o, ColumnOperators):
- raise exc.InvalidRequestError('in() function accept'
- 's either a list of non-selectable values, '
- 'or a selectable: %r' % o)
- elif o is None:
- o = null()
- else:
- o = expr._bind_param(op, o)
- args.append(o)
- if len(args) == 0:
-
- # Special case handling for empty IN's, behave like
- # comparison against zero row selectable. We use != to
- # build the contradiction as it handles NULL values
- # appropriately, i.e. "not (x IN ())" should not return NULL
- # values for x.
-
- util.warn('The IN-predicate on "%s" was invoked with an '
- 'empty sequence. This results in a '
- 'contradiction, which nonetheless can be '
- 'expensive to evaluate. Consider alternative '
- 'strategies for improved performance.' % expr)
- return expr != expr
-
- return self._boolean_compare(expr, op,
- ClauseList(*args).self_group(against=op),
- negate=negate_op)
-
- def _unsupported_impl(self, expr, op, *arg, **kw):
- raise NotImplementedError("Operator '%s' is not supported on "
- "this expression" % op.__name__)
-
- def _neg_impl(self, expr, op, **kw):
- """See :meth:`.ColumnOperators.__neg__`."""
- return UnaryExpression(expr, operator=operators.neg)
-
- def _match_impl(self, expr, op, other, **kw):
- """See :meth:`.ColumnOperators.match`."""
- return self._boolean_compare(expr, operators.match_op,
- self._check_literal(expr, operators.match_op,
- other))
-
- def _distinct_impl(self, expr, op, **kw):
- """See :meth:`.ColumnOperators.distinct`."""
- return UnaryExpression(expr, operator=operators.distinct_op,
- type_=expr.type)
-
- def _between_impl(self, expr, op, cleft, cright, **kw):
- """See :meth:`.ColumnOperators.between`."""
- return BinaryExpression(
- expr,
- ClauseList(
- self._check_literal(expr, operators.and_, cleft),
- self._check_literal(expr, operators.and_, cright),
- operator=operators.and_,
- group=False),
- operators.between_op)
-
- def _collate_impl(self, expr, op, other, **kw):
- return collate(expr, other)
-
- # a mapping of operators with the method they use, along with
- # their negated operator for comparison operators
- operators = {
- "add": (_binary_operate,),
- "mul": (_binary_operate,),
- "sub": (_binary_operate,),
- "div": (_binary_operate,),
- "mod": (_binary_operate,),
- "truediv": (_binary_operate,),
- "custom_op": (_binary_operate,),
- "concat_op": (_binary_operate,),
- "lt": (_boolean_compare, operators.ge),
- "le": (_boolean_compare, operators.gt),
- "ne": (_boolean_compare, operators.eq),
- "gt": (_boolean_compare, operators.le),
- "ge": (_boolean_compare, operators.lt),
- "eq": (_boolean_compare, operators.ne),
- "like_op": (_boolean_compare, operators.notlike_op),
- "ilike_op": (_boolean_compare, operators.notilike_op),
- "notlike_op": (_boolean_compare, operators.like_op),
- "notilike_op": (_boolean_compare, operators.ilike_op),
- "contains_op": (_boolean_compare, operators.notcontains_op),
- "startswith_op": (_boolean_compare, operators.notstartswith_op),
- "endswith_op": (_boolean_compare, operators.notendswith_op),
- "desc_op": (_scalar, desc),
- "asc_op": (_scalar, asc),
- "nullsfirst_op": (_scalar, nullsfirst),
- "nullslast_op": (_scalar, nullslast),
- "in_op": (_in_impl, operators.notin_op),
- "notin_op": (_in_impl, operators.in_op),
- "is_": (_boolean_compare, operators.is_),
- "isnot": (_boolean_compare, operators.isnot),
- "collate": (_collate_impl,),
- "match_op": (_match_impl,),
- "distinct_op": (_distinct_impl,),
- "between_op": (_between_impl, ),
- "neg": (_neg_impl,),
- "getitem": (_unsupported_impl,),
- "lshift": (_unsupported_impl,),
- "rshift": (_unsupported_impl,),
- }
-
- def _check_literal(self, expr, operator, other):
- if isinstance(other, (ColumnElement, TextClause)):
- if isinstance(other, BindParameter) and \
- isinstance(other.type, sqltypes.NullType):
- # TODO: perhaps we should not mutate the incoming
- # bindparam() here and instead make a copy of it.
- # this might be the only place that we're mutating
- # an incoming construct.
- other.type = expr.type
- return other
- elif hasattr(other, '__clause_element__'):
- other = other.__clause_element__()
- elif isinstance(other, sqltypes.TypeEngine.Comparator):
- other = other.expr
-
- if isinstance(other, (SelectBase, Alias)):
- return other.as_scalar()
- elif not isinstance(other, (ColumnElement, TextClause)):
- return expr._bind_param(operator, other)
- else:
- return other
-
-
-class ColumnElement(ClauseElement, ColumnOperators):
- """Represent a column-oriented SQL expression suitable for usage in the
- "columns" clause, WHERE clause etc. of a statement.
-
- While the most familiar kind of :class:`.ColumnElement` is the
- :class:`.Column` object, :class:`.ColumnElement` serves as the basis
- for any unit that may be present in a SQL expression, including
- the expressions themselves, SQL functions, bound parameters,
- literal expressions, keywords such as ``NULL``, etc.
- :class:`.ColumnElement` is the ultimate base class for all such elements.
-
- A :class:`.ColumnElement` provides the ability to generate new
- :class:`.ColumnElement`
- objects using Python expressions. This means that Python operators
- such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
- and allow the instantiation of further :class:`.ColumnElement` instances
- which are composed from other, more fundamental :class:`.ColumnElement`
- objects. For example, two :class:`.ColumnClause` objects can be added
- together with the addition operator ``+`` to produce
- a :class:`.BinaryExpression`.
- Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
- of :class:`.ColumnElement`::
-
- >>> from sqlalchemy.sql import column
- >>> column('a') + column('b')
- <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
- >>> print column('a') + column('b')
- a + b
-
- :class:`.ColumnElement` supports the ability to be a *proxy* element,
- which indicates that the :class:`.ColumnElement` may be associated with
- a :class:`.Selectable` which was derived from another :class:`.Selectable`.
- An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
- :class:`~sqlalchemy.schema.Table`. For the ambitious, an in-depth
- discussion of this concept can be found at
- `Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_.
-
- """
-
- __visit_name__ = 'column'
- primary_key = False
- foreign_keys = []
- quote = None
- _label = None
- _key_label = None
- _alt_names = ()
-
- @util.memoized_property
- def type(self):
- return sqltypes.NULLTYPE
-
- @util.memoized_property
- def comparator(self):
- return self.type.comparator_factory(self)
-
- def __getattr__(self, key):
- try:
- return getattr(self.comparator, key)
- except AttributeError:
- raise AttributeError(
- 'Neither %r object nor %r object has an attribute %r' % (
- type(self).__name__,
- type(self.comparator).__name__,
- key)
- )
-
- def operate(self, op, *other, **kwargs):
- return op(self.comparator, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- return op(other, self.comparator, **kwargs)
-
- def _bind_param(self, operator, obj):
- return BindParameter(None, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
- @property
- def expression(self):
- """Return a column expression.
-
- Part of the inspection interface; returns self.
-
- """
- return self
-
- @property
- def _select_iterable(self):
- return (self, )
-
- @util.memoized_property
- def base_columns(self):
- return util.column_set(c for c in self.proxy_set
- if not hasattr(c, '_proxies'))
-
- @util.memoized_property
- def proxy_set(self):
- s = util.column_set([self])
- if hasattr(self, '_proxies'):
- for c in self._proxies:
- s.update(c.proxy_set)
- return s
-
- def shares_lineage(self, othercolumn):
- """Return True if the given :class:`.ColumnElement`
- has a common ancestor to this :class:`.ColumnElement`."""
-
- return bool(self.proxy_set.intersection(othercolumn.proxy_set))
-
- def _compare_name_for_result(self, other):
- """Return True if the given column element compares to this one
- when targeting within a result row."""
-
- return hasattr(other, 'name') and hasattr(self, 'name') and \
- other.name == self.name
-
- def _make_proxy(self, selectable, name=None, name_is_truncatable=False, **kw):
- """Create a new :class:`.ColumnElement` representing this
- :class:`.ColumnElement` as it appears in the select list of a
- descending selectable.
-
- """
- if name is None:
- name = self.anon_label
- try:
- key = str(self)
- except exc.UnsupportedCompilationError:
- key = self.anon_label
- else:
- key = name
- co = ColumnClause(_as_truncated(name) if name_is_truncatable else name,
- selectable,
- type_=getattr(self,
- 'type', None))
- co._proxies = [self]
- if selectable._is_clone_of is not None:
- co._is_clone_of = \
- selectable._is_clone_of.columns.get(key)
- selectable._columns[key] = co
- return co
-
- def compare(self, other, use_proxies=False, equivalents=None, **kw):
- """Compare this ColumnElement to another.
-
- Special arguments understood:
-
- :param use_proxies: when True, consider two columns that
- share a common base column as equivalent (i.e. shares_lineage())
-
- :param equivalents: a dictionary of columns as keys mapped to sets
- of columns. If the given "other" column is present in this
- dictionary, if any of the columns in the corresponding set() pass the
- comparison test, the result is True. This is used to expand the
- comparison to other columns that may be known to be equivalent to
- this one via foreign key or other criterion.
-
- """
- to_compare = (other, )
- if equivalents and other in equivalents:
- to_compare = equivalents[other].union(to_compare)
-
- for oth in to_compare:
- if use_proxies and self.shares_lineage(oth):
- return True
- elif hash(oth) == hash(self):
- return True
- else:
- return False
-
- def label(self, name):
- """Produce a column label, i.e. ``<columnname> AS <name>``.
-
- This is a shortcut to the :func:`~.expression.label` function.
-
- if 'name' is None, an anonymous label name will be generated.
-
- """
- return Label(name, self, self.type)
-
- @util.memoized_property
- def anon_label(self):
- """provides a constant 'anonymous label' for this ColumnElement.
-
- This is a label() expression which will be named at compile time.
- The same label() is returned each time anon_label is called so
- that expressions can reference anon_label multiple times, producing
- the same label name at compile time.
-
- the compiler uses this function automatically at compile time
- for expressions that are known to be 'unnamed' like binary
- expressions and function calls.
-
- """
- return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self,
- 'name', 'anon')))
-
-
-class ColumnCollection(util.OrderedProperties):
- """An ordered dictionary that stores a list of ColumnElement
- instances.
-
- Overrides the ``__eq__()`` method to produce SQL clauses between
- sets of correlated columns.
-
- """
-
- def __init__(self, *cols):
- super(ColumnCollection, self).__init__()
- self._data.update((c.key, c) for c in cols)
- self.__dict__['_all_cols'] = util.column_set(self)
-
- def __str__(self):
- return repr([str(c) for c in self])
-
- def replace(self, column):
- """add the given column to this collection, removing unaliased
- versions of this column as well as existing columns with the
- same key.
-
- e.g.::
-
- t = Table('sometable', metadata, Column('col1', Integer))
- t.columns.replace(Column('col1', Integer, key='columnone'))
-
- will remove the original 'col1' from the collection, and add
- the new column under the name 'columnname'.
-
- Used by schema.Column to override columns during table reflection.
-
- """
- if column.name in self and column.key != column.name:
- other = self[column.name]
- if other.name == other.key:
- del self._data[other.name]
- self._all_cols.remove(other)
- if column.key in self._data:
- self._all_cols.remove(self._data[column.key])
- self._all_cols.add(column)
- self._data[column.key] = column
-
- def add(self, column):
- """Add a column to this collection.
-
- The key attribute of the column will be used as the hash key
- for this dictionary.
-
- """
- self[column.key] = column
-
- def __delitem__(self, key):
- raise NotImplementedError()
-
- def __setattr__(self, key, object):
- raise NotImplementedError()
-
- def __setitem__(self, key, value):
- if key in self:
-
- # this warning is primarily to catch select() statements
- # which have conflicting column names in their exported
- # columns collection
-
- existing = self[key]
- if not existing.shares_lineage(value):
- util.warn('Column %r on table %r being replaced by '
- '%r, which has the same key. Consider '
- 'use_labels for select() statements.' % (key,
- getattr(existing, 'table', None), value))
- self._all_cols.remove(existing)
- # pop out memoized proxy_set as this
- # operation may very well be occurring
- # in a _make_proxy operation
- ColumnElement.proxy_set._reset(value)
- self._all_cols.add(value)
- self._data[key] = value
-
- def clear(self):
- self._data.clear()
- self._all_cols.clear()
-
- def remove(self, column):
- del self._data[column.key]
- self._all_cols.remove(column)
-
- def update(self, value):
- self._data.update(value)
- self._all_cols.clear()
- self._all_cols.update(self._data.values())
-
- def extend(self, iter):
- self.update((c.key, c) for c in iter)
-
- __hash__ = None
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c == local)
- return and_(*l)
-
- def __contains__(self, other):
- if not isinstance(other, util.string_types):
- raise exc.ArgumentError("__contains__ requires a string argument")
- return util.OrderedProperties.__contains__(self, other)
-
- def __setstate__(self, state):
- self.__dict__['_data'] = state['_data']
- self.__dict__['_all_cols'] = util.column_set(self._data.values())
-
- def contains_column(self, col):
- # this has to be done via set() membership
- return col in self._all_cols
-
- def as_immutable(self):
- return ImmutableColumnCollection(self._data, self._all_cols)
-
-
-class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
- def __init__(self, data, colset):
- util.ImmutableProperties.__init__(self, data)
- self.__dict__['_all_cols'] = colset
-
- extend = remove = util.ImmutableProperties._immutable
-
-
-class ColumnSet(util.ordered_column_set):
- def contains_column(self, col):
- return col in self
-
- def extend(self, cols):
- for col in cols:
- self.add(col)
-
- def __add__(self, other):
- return list(self) + list(other)
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c == local)
- return and_(*l)
-
- def __hash__(self):
- return hash(tuple(x for x in self))
-
-
-class Selectable(ClauseElement):
- """mark a class as being selectable"""
- __visit_name__ = 'selectable'
-
- is_selectable = True
-
- @property
- def selectable(self):
- return self
-
-
-class FromClause(Selectable):
- """Represent an element that can be used within the ``FROM``
- clause of a ``SELECT`` statement.
-
- The most common forms of :class:`.FromClause` are the
- :class:`.Table` and the :func:`.select` constructs. Key
- features common to all :class:`.FromClause` objects include:
-
- * a :attr:`.c` collection, which provides per-name access to a collection
- of :class:`.ColumnElement` objects.
- * a :attr:`.primary_key` attribute, which is a collection of all those
- :class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
- * Methods to generate various derivations of a "from" clause, including
- :meth:`.FromClause.alias`, :meth:`.FromClause.join`,
- :meth:`.FromClause.select`.
-
-
- """
- __visit_name__ = 'fromclause'
- named_with_column = False
- _hide_froms = []
- quote = None
- schema = None
- _memoized_property = util.group_expirable_memoized_property(["_columns"])
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.FromClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def select(self, whereclause=None, **params):
- """return a SELECT of this :class:`.FromClause`.
-
- .. seealso::
-
- :func:`~.sql.expression.select` - general purpose
- method which allows for arbitrary column lists.
-
- """
-
- return select([self], whereclause, **params)
-
- def join(self, right, onclause=None, isouter=False):
- """return a join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, isouter)
-
- def outerjoin(self, right, onclause=None):
- """return an outer join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, True)
-
- def alias(self, name=None, flat=False):
- """return an alias of this :class:`.FromClause`.
-
- This is shorthand for calling::
-
- from sqlalchemy import alias
- a = alias(self, name=name)
-
- See :func:`~.expression.alias` for details.
-
- """
-
- return Alias(self, name)
-
- def is_derived_from(self, fromclause):
- """Return True if this FromClause is 'derived' from the given
- FromClause.
-
- An example would be an Alias of a Table is derived from that Table.
-
- """
- # this is essentially an "identity" check in the base class.
- # Other constructs override this to traverse through
- # contained elements.
- return fromclause in self._cloned_set
-
- def _is_lexical_equivalent(self, other):
- """Return True if this FromClause and the other represent
- the same lexical identity.
-
- This tests if either one is a copy of the other, or
- if they are the same via annotation identity.
-
- """
- return self._cloned_set.intersection(other._cloned_set)
-
- def replace_selectable(self, old, alias):
- """replace all occurrences of FromClause 'old' with the given Alias
- object, returning a copy of this :class:`.FromClause`.
-
- """
-
- return sqlutil.ClauseAdapter(alias).traverse(self)
-
- def correspond_on_equivalents(self, column, equivalents):
- """Return corresponding_column for the given column, or if None
- search for a match in the given dictionary.
-
- """
- col = self.corresponding_column(column, require_embedded=True)
- if col is None and col in equivalents:
- for equiv in equivalents[col]:
- nc = self.corresponding_column(equiv, require_embedded=True)
- if nc:
- return nc
- return col
-
- def corresponding_column(self, column, require_embedded=False):
- """Given a :class:`.ColumnElement`, return the exported
- :class:`.ColumnElement` object from this :class:`.Selectable`
- which corresponds to that original
- :class:`~sqlalchemy.schema.Column` via a common ancestor
- column.
-
- :param column: the target :class:`.ColumnElement` to be matched
-
- :param require_embedded: only return corresponding columns for
- the given :class:`.ColumnElement`, if the given
- :class:`.ColumnElement` is actually present within a sub-element
- of this :class:`.FromClause`. Normally the column will match if
- it merely shares a common ancestor with one of the exported
- columns of this :class:`.FromClause`.
-
- """
-
- def embedded(expanded_proxy_set, target_set):
- for t in target_set.difference(expanded_proxy_set):
- if not set(_expand_cloned([t])
- ).intersection(expanded_proxy_set):
- return False
- return True
-
- # don't dig around if the column is locally present
- if self.c.contains_column(column):
- return column
- col, intersect = None, None
- target_set = column.proxy_set
- cols = self.c
- for c in cols:
- expanded_proxy_set = set(_expand_cloned(c.proxy_set))
- i = target_set.intersection(expanded_proxy_set)
- if i and (not require_embedded
- or embedded(expanded_proxy_set, target_set)):
- if col is None:
-
- # no corresponding column yet, pick this one.
-
- col, intersect = c, i
- elif len(i) > len(intersect):
-
- # 'c' has a larger field of correspondence than
- # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
- # matches a1.c.x->table.c.x better than
- # selectable.c.x->table.c.x does.
-
- col, intersect = c, i
- elif i == intersect:
-
- # they have the same field of correspondence. see
- # which proxy_set has fewer columns in it, which
- # indicates a closer relationship with the root
- # column. Also take into account the "weight"
- # attribute which CompoundSelect() uses to give
- # higher precedence to columns based on vertical
- # position in the compound statement, and discard
- # columns that have no reference to the target
- # column (also occurs with CompoundSelect)
-
- col_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- col.proxy_set if sc.shares_lineage(column)])
- c_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- c.proxy_set if sc.shares_lineage(column)])
- if c_distance < col_distance:
- col, intersect = c, i
- return col
-
- @property
- def description(self):
- """a brief description of this FromClause.
-
- Used primarily for error message formatting.
-
- """
- return getattr(self, 'name', self.__class__.__name__ + " object")
-
- def _reset_exported(self):
- """delete memoized collections when a FromClause is cloned."""
-
- self._memoized_property.expire_instance(self)
-
- @_memoized_property
- def columns(self):
- """A named-based collection of :class:`.ColumnElement` objects
- maintained by this :class:`.FromClause`.
-
- The :attr:`.columns`, or :attr:`.c` collection, is the gateway
- to the construction of SQL expressions using table-bound or
- other selectable-bound columns::
-
- select([mytable]).where(mytable.c.somecolumn == 5)
-
- """
-
- if '_columns' not in self.__dict__:
- self._init_collections()
- self._populate_column_collection()
- return self._columns.as_immutable()
-
- @_memoized_property
- def primary_key(self):
- """Return the collection of Column objects which comprise the
- primary key of this FromClause."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.primary_key
-
- @_memoized_property
- def foreign_keys(self):
- """Return the collection of ForeignKey objects which this
- FromClause references."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.foreign_keys
-
- c = property(attrgetter('columns'),
- doc="An alias for the :attr:`.columns` attribute.")
- _select_iterable = property(attrgetter('columns'))
-
- def _init_collections(self):
- assert '_columns' not in self.__dict__
- assert 'primary_key' not in self.__dict__
- assert 'foreign_keys' not in self.__dict__
-
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
-
- @property
- def _cols_populated(self):
- return '_columns' in self.__dict__
-
- def _populate_column_collection(self):
- """Called on subclasses to establish the .c collection.
-
- Each implementation has a different way of establishing
- this collection.
-
- """
-
- def _refresh_for_new_column(self, column):
- """Given a column added to the .c collection of an underlying
- selectable, produce the local version of that column, assuming this
- selectable ultimately should proxy this column.
-
- this is used to "ping" a derived selectable to add a new column
- to its .c. collection when a Column has been added to one of the
- Table objects it ultimtely derives from.
-
- If the given selectable hasn't populated it's .c. collection yet,
- it should at least pass on the message to the contained selectables,
- but it will return None.
-
- This method is currently used by Declarative to allow Table
- columns to be added to a partially constructed inheritance
- mapping that may have already produced joins. The method
- isn't public right now, as the full span of implications
- and/or caveats aren't yet clear.
-
- It's also possible that this functionality could be invoked by
- default via an event, which would require that
- selectables maintain a weak referencing collection of all
- derivations.
-
- """
- if not self._cols_populated:
- return None
- elif column.key in self.columns and self.columns[column.key] is column:
- return column
- else:
- return None
-
-
-class BindParameter(ColumnElement):
- """Represent a bind parameter.
-
- Public constructor is the :func:`bindparam()` function.
-
- """
-
- __visit_name__ = 'bindparam'
- quote = None
-
- _is_crud = False
-
- def __init__(self, key, value, type_=None, unique=False,
- callable_=None,
- isoutparam=False, required=False,
- quote=None,
- _compared_to_operator=None,
- _compared_to_type=None):
- """Construct a BindParameter.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`BindParameter` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embedded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`BindParameter` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`BindParameter` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param quote:
- True if this parameter name requires quoting and is not
- currently known as a SQLAlchemy reserved word; this currently
- only applies to the Oracle backend.
-
- :param required:
- a value is required at execution time.
-
- :param isoutparam:
- if True, the parameter should be treated like a stored procedure
- "OUT" parameter.
-
- """
- if unique:
- self.key = _anonymous_label('%%(%d %s)s' % (id(self), key
- or 'param'))
- else:
- self.key = key or _anonymous_label('%%(%d param)s'
- % id(self))
-
- # identifying key that won't change across
- # clones, used to identify the bind's logical
- # identity
- self._identifying_key = self.key
-
- # key that was passed in the first place, used to
- # generate new keys
- self._orig_key = key or 'param'
-
- self.unique = unique
- self.value = value
- self.callable = callable_
- self.isoutparam = isoutparam
- self.required = required
- self.quote = quote
- if type_ is None:
- if _compared_to_type is not None:
- self.type = \
- _compared_to_type.coerce_compared_value(
- _compared_to_operator, value)
- else:
- self.type = sqltypes._type_map.get(type(value),
- sqltypes.NULLTYPE)
- elif isinstance(type_, type):
- self.type = type_()
- else:
- self.type = type_
-
- @property
- def effective_value(self):
- """Return the value of this bound parameter,
- taking into account if the ``callable`` parameter
- was set.
-
- The ``callable`` value will be evaluated
- and returned if present, else ``value``.
-
- """
- if self.callable:
- return self.callable()
- else:
- return self.value
-
- def _clone(self):
- c = ClauseElement._clone(self)
- if self.unique:
- c.key = _anonymous_label('%%(%d %s)s' % (id(c), c._orig_key
- or 'param'))
- return c
-
- def _convert_to_unique(self):
- if not self.unique:
- self.unique = True
- self.key = _anonymous_label('%%(%d %s)s' % (id(self),
- self._orig_key or 'param'))
-
- def compare(self, other, **kw):
- """Compare this :class:`BindParameter` to the given
- clause."""
-
- return isinstance(other, BindParameter) \
- and self.type._compare_type_affinity(other.type) \
- and self.value == other.value
-
- def __getstate__(self):
- """execute a deferred value for serialization purposes."""
-
- d = self.__dict__.copy()
- v = self.value
- if self.callable:
- v = self.callable()
- d['callable'] = None
- d['value'] = v
- return d
-
- def __repr__(self):
- return 'BindParameter(%r, %r, type_=%r)' % (self.key,
- self.value, self.type)
-
-
-class TypeClause(ClauseElement):
- """Handle a type keyword in a SQL statement.
-
- Used by the ``Case`` statement.
-
- """
-
- __visit_name__ = 'typeclause'
-
- def __init__(self, type):
- self.type = type
-
-
-class Generative(object):
- """Allow a ClauseElement to generate itself via the
- @_generative decorator.
-
- """
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
-
-class Executable(Generative):
- """Mark a ClauseElement as supporting execution.
-
- :class:`.Executable` is a superclass for all "statement" types
- of objects, including :func:`select`, :func:`delete`, :func:`update`,
- :func:`insert`, :func:`text`.
-
- """
-
- supports_execution = True
- _execution_options = util.immutabledict()
- _bind = None
-
- @_generative
- def execution_options(self, **kw):
- """ Set non-SQL options for the statement which take effect during
- execution.
-
- Execution options can be set on a per-statement or
- per :class:`.Connection` basis. Additionally, the
- :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide
- access to execution options which they in turn configure upon
- connections.
-
- The :meth:`execution_options` method is generative. A new
- instance of this statement is returned that contains the options::
-
- statement = select([table.c.x, table.c.y])
- statement = statement.execution_options(autocommit=True)
-
- Note that only a subset of possible execution options can be applied
- to a statement - these include "autocommit" and "stream_results",
- but not "isolation_level" or "compiled_cache".
- See :meth:`.Connection.execution_options` for a full list of
- possible options.
-
- .. seealso::
-
- :meth:`.Connection.execution_options()`
-
- :meth:`.Query.execution_options()`
-
- """
- if 'isolation_level' in kw:
- raise exc.ArgumentError(
- "'isolation_level' execution option may only be specified "
- "on Connection.execution_options(), or "
- "per-engine using the isolation_level "
- "argument to create_engine()."
- )
- if 'compiled_cache' in kw:
- raise exc.ArgumentError(
- "'compiled_cache' execution option may only be specified "
- "on Connection.execution_options(), not per statement."
- )
- self._execution_options = self._execution_options.union(kw)
-
- def execute(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`."""
- e = self.bind
- if e is None:
- label = getattr(self, 'description', self.__class__.__name__)
- msg = ('This %s is not directly bound to a Connection or Engine.'
- 'Use the .execute() method of a Connection or Engine '
- 'to execute this construct.' % label)
- raise exc.UnboundExecutionError(msg)
- return e._execute_clauseelement(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`, returning the
- result's scalar representation.
-
- """
- return self.execute(*multiparams, **params).scalar()
-
- @property
- def bind(self):
- """Returns the :class:`.Engine` or :class:`.Connection` to
- which this :class:`.Executable` is bound, or None if none found.
-
- This is a traversal which checks locally, then
- checks among the "from" clauses of associated objects
- until a bound engine or connection is found.
-
- """
- if self._bind is not None:
- return self._bind
-
- for f in _from_objects(self):
- if f is self:
- continue
- engine = f.bind
- if engine is not None:
- return engine
- else:
- return None
-
-
-# legacy, some outside users may be calling this
-_Executable = Executable
-
-
-class TextClause(Executable, ClauseElement):
- """Represent a literal SQL text fragment.
-
- Public constructor is the :func:`text()` function.
-
- """
+ 'tuple_', 'type_coerce', 'union', 'union_all', 'update']
+
+
+from .visitors import Visitable
+from .functions import func, modifier, FunctionElement
+from ..util.langhelpers import public_factory
+from .elements import ClauseElement, ColumnElement,\
+ BindParameter, UnaryExpression, BooleanClauseList, \
+ Label, Cast, Case, ColumnClause, TextClause, Over, Null, \
+ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \
+ Grouping, not_, \
+ collate, literal_column, between,\
+ literal, outparam, type_coerce, ClauseList
+
+from .elements import SavepointClause, RollbackToSavepointClause, \
+ ReleaseSavepointClause
+
+from .base import ColumnCollection, Generative, Executable, \
+ PARSE_AUTOCOMMIT
+
+from .selectable import Alias, Join, Select, Selectable, TableClause, \
+ CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \
+ alias, GenerativeSelect, \
+ subquery, HasPrefixes, Exists, ScalarSelect, TextAsFrom
+
+
+from .dml import Insert, Update, Delete, UpdateBase, ValuesBase
+
+# factory functions - these pull class-bound constructors and classmethods
+# from SQL elements and selectables into public functions. This allows
+# the functions to be available in the sqlalchemy.sql.* namespace and
+# to be auto-cross-documenting from the function to the class itself.
+
+and_ = public_factory(BooleanClauseList.and_, ".expression.and_")
+or_ = public_factory(BooleanClauseList.or_, ".expression.or_")
+bindparam = public_factory(BindParameter, ".expression.bindparam")
+select = public_factory(Select, ".expression.select")
+text = public_factory(TextClause._create_text, ".expression.text")
+table = public_factory(TableClause, ".expression.table")
+column = public_factory(ColumnClause, ".expression.column")
+over = public_factory(Over, ".expression.over")
+label = public_factory(Label, ".expression.label")
+case = public_factory(Case, ".expression.case")
+cast = public_factory(Cast, ".expression.cast")
+extract = public_factory(Extract, ".expression.extract")
+tuple_ = public_factory(Tuple, ".expression.tuple_")
+except_ = public_factory(CompoundSelect._create_except, ".expression.except_")
+except_all = public_factory(CompoundSelect._create_except_all, ".expression.except_all")
+intersect = public_factory(CompoundSelect._create_intersect, ".expression.intersect")
+intersect_all = public_factory(CompoundSelect._create_intersect_all, ".expression.intersect_all")
+union = public_factory(CompoundSelect._create_union, ".expression.union")
+union_all = public_factory(CompoundSelect._create_union_all, ".expression.union_all")
+exists = public_factory(Exists, ".expression.exists")
+nullsfirst = public_factory(UnaryExpression._create_nullsfirst, ".expression.nullsfirst")
+nullslast = public_factory(UnaryExpression._create_nullslast, ".expression.nullslast")
+asc = public_factory(UnaryExpression._create_asc, ".expression.asc")
+desc = public_factory(UnaryExpression._create_desc, ".expression.desc")
+distinct = public_factory(UnaryExpression._create_distinct, ".expression.distinct")
+true = public_factory(True_._singleton, ".expression.true")
+false = public_factory(False_._singleton, ".expression.false")
+null = public_factory(Null._singleton, ".expression.null")
+join = public_factory(Join._create_join, ".expression.join")
+outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin")
+insert = public_factory(Insert, ".expression.insert")
+update = public_factory(Update, ".expression.update")
+delete = public_factory(Delete, ".expression.delete")
+
+
+# internal functions still being called from tests and the ORM,
+# these might be better off in some other namespace
+from .base import _from_objects
+from .elements import _literal_as_text, _clause_element_as_expr,\
+ _is_column, _labeled, _only_column_elements, _string_or_unprintable, \
+ _truncated_label, _clone, _cloned_difference, _cloned_intersection,\
+ _column_as_key, _literal_as_binds, _select_iterables, \
+ _corresponding_column_or_error
+from .selectable import _interpret_as_from
- __visit_name__ = 'textclause'
- _bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
- _execution_options = \
- Executable._execution_options.union(
- {'autocommit': PARSE_AUTOCOMMIT})
-
- @property
- def _select_iterable(self):
- return (self,)
-
- @property
- def selectable(self):
- return self
-
- _hide_froms = []
-
- def __init__(
- self,
- text='',
- bind=None,
- bindparams=None,
- typemap=None,
- autocommit=None):
-
- self._bind = bind
- self.bindparams = {}
- self.typemap = typemap
- if autocommit is not None:
- util.warn_deprecated('autocommit on text() is deprecated. '
- 'Use .execution_options(autocommit=Tru'
- 'e)')
- self._execution_options = \
- self._execution_options.union(
- {'autocommit': autocommit})
- if typemap is not None:
- for key in typemap:
- typemap[key] = sqltypes.to_instance(typemap[key])
-
- def repl(m):
- self.bindparams[m.group(1)] = bindparam(m.group(1))
- return ':%s' % m.group(1)
-
- # scan the string and search for bind parameter names, add them
- # to the list of bindparams
-
- self.text = self._bind_params_regex.sub(repl, text)
- if bindparams is not None:
- for b in bindparams:
- self.bindparams[b.key] = b
-
- @property
- def type(self):
- if self.typemap is not None and len(self.typemap) == 1:
- return list(self.typemap)[0]
- else:
- return sqltypes.NULLTYPE
-
- @property
- def comparator(self):
- return self.type.comparator_factory(self)
-
- def self_group(self, against=None):
- if against is operators.in_op:
- return Grouping(self)
- else:
- return self
-
- def _copy_internals(self, clone=_clone, **kw):
- self.bindparams = dict((b.key, clone(b, **kw))
- for b in self.bindparams.values())
-
- def get_children(self, **kwargs):
- return list(self.bindparams.values())
-
-
-class Null(ColumnElement):
- """Represent the NULL keyword in a SQL statement.
-
- Public constructor is the :func:`null()` function.
-
- """
-
- __visit_name__ = 'null'
-
- def __init__(self):
- self.type = sqltypes.NULLTYPE
-
- def compare(self, other):
- return isinstance(other, Null)
-
-
-class False_(ColumnElement):
- """Represent the ``false`` keyword in a SQL statement.
-
- Public constructor is the :func:`false()` function.
-
- """
-
- __visit_name__ = 'false'
-
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
- def compare(self, other):
- return isinstance(other, False_)
-
-class True_(ColumnElement):
- """Represent the ``true`` keyword in a SQL statement.
-
- Public constructor is the :func:`true()` function.
-
- """
-
- __visit_name__ = 'true'
-
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
- def compare(self, other):
- return isinstance(other, True_)
-
-
-class ClauseList(ClauseElement):
- """Describe a list of clauses, separated by an operator.
-
- By default, is comma-separated, such as a column listing.
-
- """
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- self.operator = kwargs.pop('operator', operators.comma_op)
- self.group = kwargs.pop('group', True)
- self.group_contents = kwargs.pop('group_contents', True)
- if self.group_contents:
- self.clauses = [
- _literal_as_text(clause).self_group(against=self.operator)
- for clause in clauses if clause is not None]
- else:
- self.clauses = [
- _literal_as_text(clause)
- for clause in clauses if clause is not None]
-
- def __iter__(self):
- return iter(self.clauses)
-
- def __len__(self):
- return len(self.clauses)
-
- @property
- def _select_iterable(self):
- return iter(self)
-
- def append(self, clause):
- # TODO: not sure if i like the 'group_contents' flag. need to
- # define the difference between a ClauseList of ClauseLists,
- # and a "flattened" ClauseList of ClauseLists. flatten()
- # method ?
- if self.group_contents:
- self.clauses.append(_literal_as_text(clause).\
- self_group(against=self.operator))
- else:
- self.clauses.append(_literal_as_text(clause))
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clauses = [clone(clause, **kw) for clause in self.clauses]
-
- def get_children(self, **kwargs):
- return self.clauses
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[c._from_objects for c in self.clauses]))
-
- def self_group(self, against=None):
- if self.group and operators.is_precedent(self.operator, against):
- return Grouping(self)
- else:
- return self
-
- def compare(self, other, **kw):
- """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
- including a comparison of all the clause items.
-
- """
- if not isinstance(other, ClauseList) and len(self.clauses) == 1:
- return self.clauses[0].compare(other, **kw)
- elif isinstance(other, ClauseList) and \
- len(self.clauses) == len(other.clauses):
- for i in range(0, len(self.clauses)):
- if not self.clauses[i].compare(other.clauses[i], **kw):
- return False
- else:
- return self.operator == other.operator
- else:
- return False
-
-
-class BooleanClauseList(ClauseList, ColumnElement):
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- super(BooleanClauseList, self).__init__(*clauses, **kwargs)
- self.type = sqltypes.to_instance(kwargs.get('type_',
- sqltypes.Boolean))
-
- @property
- def _select_iterable(self):
- return (self, )
-
- def self_group(self, against=None):
- if not self.clauses:
- return self
- else:
- return super(BooleanClauseList, self).self_group(against=against)
-
-
-class Tuple(ClauseList, ColumnElement):
-
- def __init__(self, *clauses, **kw):
- clauses = [_literal_as_binds(c) for c in clauses]
- self.type = kw.pop('type_', None)
- if self.type is None:
- self.type = _type_from_args(clauses)
- super(Tuple, self).__init__(*clauses, **kw)
-
- @property
- def _select_iterable(self):
- return (self, )
-
- def _bind_param(self, operator, obj):
- return Tuple(*[
- BindParameter(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
- for o in obj
- ]).self_group()
-
-
-class Case(ColumnElement):
- __visit_name__ = 'case'
-
- def __init__(self, whens, value=None, else_=None):
- try:
- whens = util.dictlike_iteritems(whens)
- except TypeError:
- pass
-
- if value is not None:
- whenlist = [
- (_literal_as_binds(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
- else:
- whenlist = [
- (_no_literals(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
-
- if whenlist:
- type_ = list(whenlist[-1])[-1].type
- else:
- type_ = None
-
- if value is None:
- self.value = None
- else:
- self.value = _literal_as_binds(value)
-
- self.type = type_
- self.whens = whenlist
- if else_ is not None:
- self.else_ = _literal_as_binds(else_)
- else:
- self.else_ = None
-
- def _copy_internals(self, clone=_clone, **kw):
- if self.value is not None:
- self.value = clone(self.value, **kw)
- self.whens = [(clone(x, **kw), clone(y, **kw))
- for x, y in self.whens]
- if self.else_ is not None:
- self.else_ = clone(self.else_, **kw)
-
- def get_children(self, **kwargs):
- if self.value is not None:
- yield self.value
- for x, y in self.whens:
- yield x
- yield y
- if self.else_ is not None:
- yield self.else_
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[x._from_objects for x in
- self.get_children()]))
-
-
-class FunctionElement(Executable, ColumnElement, FromClause):
- """Base for SQL function-oriented constructs.
-
- .. seealso::
-
- :class:`.Function` - named SQL function.
-
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
-
- :class:`.GenericFunction` - allows creation of registered function
- types.
-
- """
-
- packagenames = ()
-
- def __init__(self, *clauses, **kwargs):
- """Construct a :class:`.FunctionElement`.
- """
- args = [_literal_as_binds(c, self.name) for c in clauses]
- self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *args).\
- self_group()
-
- @property
- def columns(self):
- """Fulfill the 'columns' contract of :class:`.ColumnElement`.
-
- Returns a single-element list consisting of this object.
-
- """
- return [self]
-
- @util.memoized_property
- def clauses(self):
- """Return the underlying :class:`.ClauseList` which contains
- the arguments for this :class:`.FunctionElement`.
-
- """
- return self.clause_expr.element
-
- def over(self, partition_by=None, order_by=None):
- """Produce an OVER clause against this function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- The expression::
-
- func.row_number().over(order_by='x')
-
- is shorthand for::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- See :func:`~.expression.over` for a full description.
-
- .. versionadded:: 0.7
-
- """
- return over(self, partition_by=partition_by, order_by=order_by)
-
- @property
- def _from_objects(self):
- return self.clauses._from_objects
-
- def get_children(self, **kwargs):
- return self.clause_expr,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clause_expr = clone(self.clause_expr, **kw)
- self._reset_exported()
- FunctionElement.clauses._reset(self)
-
- def select(self):
- """Produce a :func:`~.expression.select` construct
- against this :class:`.FunctionElement`.
-
- This is shorthand for::
-
- s = select([function_element])
-
- """
- s = select([self])
- if self._execution_options:
- s = s.execution_options(**self._execution_options)
- return s
-
- def scalar(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind' and return a scalar value.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.scalar` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute().scalar()
-
- def execute(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind'.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.execute` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute()
-
- def _bind_param(self, operator, obj):
- return BindParameter(None, obj, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
-
-class Function(FunctionElement):
- """Describe a named SQL function.
-
- See the superclass :class:`.FunctionElement` for a description
- of public methods.
-
- .. seealso::
-
- :data:`.func` - namespace which produces registered or ad-hoc
- :class:`.Function` instances.
-
- :class:`.GenericFunction` - allows creation of registered function
- types.
-
- """
-
- __visit_name__ = 'function'
-
- def __init__(self, name, *clauses, **kw):
- """Construct a :class:`.Function`.
-
- The :data:`.func` construct is normally used to construct
- new :class:`.Function` instances.
-
- """
- self.packagenames = kw.pop('packagenames', None) or []
- self.name = name
- self._bind = kw.get('bind', None)
- self.type = sqltypes.to_instance(kw.get('type_', None))
-
- FunctionElement.__init__(self, *clauses, **kw)
-
- def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
-
-class Cast(ColumnElement):
-
- __visit_name__ = 'cast'
-
- def __init__(self, clause, totype, **kwargs):
- self.type = sqltypes.to_instance(totype)
- self.clause = _literal_as_binds(clause, None)
- self.typeclause = TypeClause(self.type)
-
- def _copy_internals(self, clone=_clone, **kw):
- self.clause = clone(self.clause, **kw)
- self.typeclause = clone(self.typeclause, **kw)
-
- def get_children(self, **kwargs):
- return self.clause, self.typeclause
-
- @property
- def _from_objects(self):
- return self.clause._from_objects
-
-
-class Extract(ColumnElement):
-
- __visit_name__ = 'extract'
-
- def __init__(self, field, expr, **kwargs):
- self.type = sqltypes.Integer()
- self.field = field
- self.expr = _literal_as_binds(expr, None)
-
- def _copy_internals(self, clone=_clone, **kw):
- self.expr = clone(self.expr, **kw)
-
- def get_children(self, **kwargs):
- return self.expr,
-
- @property
- def _from_objects(self):
- return self.expr._from_objects
-
-
-class UnaryExpression(ColumnElement):
- """Define a 'unary' expression.
-
- A unary expression has a single column expression
- and an operator. The operator can be placed on the left
- (where it is called the 'operator') or right (where it is called the
- 'modifier') of the column expression.
-
- """
- __visit_name__ = 'unary'
-
- def __init__(self, element, operator=None, modifier=None,
- type_=None, negate=None):
- self.operator = operator
- self.modifier = modifier
-
- self.element = _literal_as_text(element).\
- self_group(against=self.operator or self.modifier)
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
-
- @util.memoized_property
- def _order_by_label_element(self):
- if self.modifier in (operators.desc_op, operators.asc_op):
- return self.element._order_by_label_element
- else:
- return None
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- def get_children(self, **kwargs):
- return self.element,
-
- def compare(self, other, **kw):
- """Compare this :class:`UnaryExpression` against the given
- :class:`.ClauseElement`."""
-
- return (
- isinstance(other, UnaryExpression) and
- self.operator == other.operator and
- self.modifier == other.modifier and
- self.element.compare(other.element, **kw)
- )
-
- def _negate(self):
- if self.negate is not None:
- return UnaryExpression(
- self.element,
- operator=self.negate,
- negate=self.operator,
- modifier=self.modifier,
- type_=self.type)
- else:
- return super(UnaryExpression, self)._negate()
-
- def self_group(self, against=None):
- if self.operator and operators.is_precedent(self.operator,
- against):
- return Grouping(self)
- else:
- return self
-
-
-class BinaryExpression(ColumnElement):
- """Represent an expression that is ``LEFT <operator> RIGHT``.
-
- A :class:`.BinaryExpression` is generated automatically
- whenever two column expressions are used in a Python binary expresion::
-
- >>> from sqlalchemy.sql import column
- >>> column('a') + column('b')
- <sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
- >>> print column('a') + column('b')
- a + b
-
- """
-
- __visit_name__ = 'binary'
-
- def __init__(self, left, right, operator, type_=None,
- negate=None, modifiers=None):
- # allow compatibility with libraries that
- # refer to BinaryExpression directly and pass strings
- if isinstance(operator, util.string_types):
- operator = operators.custom_op(operator)
- self._orig = (left, right)
- self.left = _literal_as_text(left).self_group(against=operator)
- self.right = _literal_as_text(right).self_group(against=operator)
- self.operator = operator
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
-
- if modifiers is None:
- self.modifiers = {}
- else:
- self.modifiers = modifiers
-
- def __bool__(self):
- if self.operator in (operator.eq, operator.ne):
- return self.operator(hash(self._orig[0]), hash(self._orig[1]))
- else:
- raise TypeError("Boolean value of this clause is not defined")
-
- __nonzero__ = __bool__
-
- @property
- def is_comparison(self):
- return operators.is_comparison(self.operator)
-
- @property
- def _from_objects(self):
- return self.left._from_objects + self.right._from_objects
-
- def _copy_internals(self, clone=_clone, **kw):
- self.left = clone(self.left, **kw)
- self.right = clone(self.right, **kw)
-
- def get_children(self, **kwargs):
- return self.left, self.right
-
- def compare(self, other, **kw):
- """Compare this :class:`BinaryExpression` against the
- given :class:`BinaryExpression`."""
-
- return (
- isinstance(other, BinaryExpression) and
- self.operator == other.operator and
- (
- self.left.compare(other.left, **kw) and
- self.right.compare(other.right, **kw) or
- (
- operators.is_commutative(self.operator) and
- self.left.compare(other.right, **kw) and
- self.right.compare(other.left, **kw)
- )
- )
- )
-
- def self_group(self, against=None):
- if operators.is_precedent(self.operator, against):
- return Grouping(self)
- else:
- return self
-
- def _negate(self):
- if self.negate is not None:
- return BinaryExpression(
- self.left,
- self.right,
- self.negate,
- negate=self.operator,
- type_=sqltypes.BOOLEANTYPE,
- modifiers=self.modifiers)
- else:
- return super(BinaryExpression, self)._negate()
-
-
-class Exists(UnaryExpression):
- __visit_name__ = UnaryExpression.__visit_name__
- _from_objects = []
-
- def __init__(self, *args, **kwargs):
- if args and isinstance(args[0], (SelectBase, ScalarSelect)):
- s = args[0]
- else:
- if not args:
- args = ([literal_column('*')],)
- s = select(*args, **kwargs).as_scalar().self_group()
-
- UnaryExpression.__init__(self, s, operator=operators.exists,
- type_=sqltypes.Boolean)
-
- def select(self, whereclause=None, **params):
- return select([self], whereclause, **params)
-
- def correlate(self, *fromclause):
- e = self._clone()
- e.element = self.element.correlate(*fromclause).self_group()
- return e
-
- def correlate_except(self, *fromclause):
- e = self._clone()
- e.element = self.element.correlate_except(*fromclause).self_group()
- return e
-
- def select_from(self, clause):
- """return a new :class:`.Exists` construct, applying the given
- expression to the :meth:`.Select.select_from` method of the select
- statement contained.
-
- """
- e = self._clone()
- e.element = self.element.select_from(clause).self_group()
- return e
-
- def where(self, clause):
- """return a new exists() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- e = self._clone()
- e.element = self.element.where(clause).self_group()
- return e
-
-
-class Join(FromClause):
- """represent a ``JOIN`` construct between two :class:`.FromClause`
- elements.
-
- The public constructor function for :class:`.Join` is the module-level
- :func:`join()` function, as well as the :func:`join()` method available
- off all :class:`.FromClause` subclasses.
-
- """
- __visit_name__ = 'join'
-
- def __init__(self, left, right, onclause=None, isouter=False):
- """Construct a new :class:`.Join`.
-
- The usual entrypoint here is the :func:`~.expression.join`
- function or the :meth:`.FromClause.join` method of any
- :class:`.FromClause` object.
-
- """
- self.left = _interpret_as_from(left)
- self.right = _interpret_as_from(right).self_group()
-
- if onclause is None:
- self.onclause = self._match_primaries(self.left, self.right)
- else:
- self.onclause = onclause
-
- self.isouter = isouter
-
- @property
- def description(self):
- return "Join object on %s(%d) and %s(%d)" % (
- self.left.description,
- id(self.left),
- self.right.description,
- id(self.right))
-
- def is_derived_from(self, fromclause):
- return fromclause is self or \
- self.left.is_derived_from(fromclause) or \
- self.right.is_derived_from(fromclause)
-
- def self_group(self, against=None):
- return FromGrouping(self)
-
- def _populate_column_collection(self):
- columns = [c for c in self.left.columns] + \
- [c for c in self.right.columns]
-
- self.primary_key.extend(sqlutil.reduce_columns(
- (c for c in columns if c.primary_key), self.onclause))
- self._columns.update((col._label, col) for col in columns)
- self.foreign_keys.update(itertools.chain(
- *[col.foreign_keys for col in columns]))
-
- def _refresh_for_new_column(self, column):
- col = self.left._refresh_for_new_column(column)
- if col is None:
- col = self.right._refresh_for_new_column(column)
- if col is not None:
- if self._cols_populated:
- self._columns[col._label] = col
- self.foreign_keys.add(col)
- if col.primary_key:
- self.primary_key.add(col)
- return col
- return None
-
- def _copy_internals(self, clone=_clone, **kw):
- self._reset_exported()
- self.left = clone(self.left, **kw)
- self.right = clone(self.right, **kw)
- self.onclause = clone(self.onclause, **kw)
-
- def get_children(self, **kwargs):
- return self.left, self.right, self.onclause
-
- def _match_primaries(self, left, right):
- if isinstance(left, Join):
- left_right = left.right
- else:
- left_right = None
- return sqlutil.join_condition(left, right, a_subset=left_right)
-
- def select(self, whereclause=None, **kwargs):
- """Create a :class:`.Select` from this :class:`.Join`.
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select
- j = select([j.left, j.right], **kw).\\
- where(whereclause).\\
- select_from(j)
-
- :param whereclause: the WHERE criterion that will be sent to
- the :func:`select()` function
-
- :param \**kwargs: all other kwargs are sent to the
- underlying :func:`select()` function.
-
- """
- collist = [self.left, self.right]
-
- return select(collist, whereclause, from_obj=[self], **kwargs)
-
- @property
- def bind(self):
- return self.left.bind or self.right.bind
-
- def alias(self, name=None, flat=False):
- """return an alias of this :class:`.Join`.
-
- The default behavior here is to first produce a SELECT
- construct from this :class:`.Join`, then to produce a
- :class:`.Alias` from that. So given a join of the form::
-
- j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
-
- The JOIN by itself would look like::
-
- table_a JOIN table_b ON table_a.id = table_b.a_id
-
- Whereas the alias of the above, ``j.alias()``, would in a
- SELECT context look like::
-
- (SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
- table_b.a_id AS table_b_a_id
- FROM table_a
- JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select, alias
- j = alias(
- select([j.left, j.right]).\\
- select_from(j).\\
- with_labels(True).\\
- correlate(False),
- name=name
- )
-
- The selectable produced by :meth:`.Join.alias` features the same
- columns as that of the two individual selectables presented under
- a single name - the individual columns are "auto-labeled", meaning
- the ``.c.`` collection of the resulting :class:`.Alias` represents
- the names of the individual columns using a ``<tablename>_<columname>``
- scheme::
-
- j.c.table_a_id
- j.c.table_b_a_id
-
- :meth:`.Join.alias` also features an alternate
- option for aliasing joins which produces no enclosing SELECT and
- does not normally apply labels to the column names. The
- ``flat=True`` option will call :meth:`.FromClause.alias`
- against the left and right sides individually.
- Using this option, no new ``SELECT`` is produced;
- we instead, from a construct as below::
-
- j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
- j = j.alias(flat=True)
-
- we get a result like this::
-
- table_a AS table_a_1 JOIN table_b AS table_b_1 ON
- table_a_1.id = table_b_1.a_id
-
- The ``flat=True`` argument is also propagated to the contained
- selectables, so that a composite join such as::
-
- j = table_a.join(
- table_b.join(table_c,
- table_b.c.id == table_c.c.b_id),
- table_b.c.a_id == table_a.c.id
- ).alias(flat=True)
-
- Will produce an expression like::
-
- table_a AS table_a_1 JOIN (
- table_b AS table_b_1 JOIN table_c AS table_c_1
- ON table_b_1.id = table_c_1.b_id
- ) ON table_a_1.id = table_b_1.a_id
-
- The standalone :func:`experssion.alias` function as well as the
- base :meth:`.FromClause.alias` method also support the ``flat=True``
- argument as a no-op, so that the argument can be passed to the
- ``alias()`` method of any selectable.
-
- .. versionadded:: 0.9.0 Added the ``flat=True`` option to create
- "aliases" of joins without enclosing inside of a SELECT
- subquery.
-
- :param name: name given to the alias.
-
- :param flat: if True, produce an alias of the left and right
- sides of this :class:`.Join` and return the join of those
- two selectables. This produces join expression that does not
- include an enclosing SELECT.
-
- .. versionadded:: 0.9.0
-
- .. seealso::
-
- :func:`~.expression.alias`
-
- """
- if flat:
- assert name is None, "Can't send name argument with flat"
- left_a, right_a = self.left.alias(flat=True), \
- self.right.alias(flat=True)
- adapter = sqlutil.ClauseAdapter(left_a).\
- chain(sqlutil.ClauseAdapter(right_a))
-
- return left_a.join(right_a,
- adapter.traverse(self.onclause), isouter=self.isouter)
- else:
- return self.select(use_labels=True, correlate=False).alias(name)
-
- @property
- def _hide_froms(self):
- return itertools.chain(*[_from_objects(x.left, x.right)
- for x in self._cloned_set])
-
- @property
- def _from_objects(self):
- return [self] + \
- self.onclause._from_objects + \
- self.left._from_objects + \
- self.right._from_objects
-
-
-class Alias(FromClause):
- """Represents an table or selectable alias (AS).
-
- Represents an alias, as typically applied to any table or
- sub-select within a SQL statement using the ``AS`` keyword (or
- without the keyword on certain databases such as Oracle).
-
- This object is constructed from the :func:`~.expression.alias` module level
- function as well as the :meth:`.FromClause.alias` method available on all
- :class:`.FromClause` subclasses.
-
- """
-
- __visit_name__ = 'alias'
- named_with_column = True
-
- def __init__(self, selectable, name=None):
- baseselectable = selectable
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
- self.supports_execution = baseselectable.supports_execution
- if self.supports_execution:
- self._execution_options = baseselectable._execution_options
- self.element = selectable
- if name is None:
- if self.original.named_with_column:
- name = getattr(self.original, 'name', None)
- name = _anonymous_label('%%(%d %s)s' % (id(self), name
- or 'anon'))
- self.name = name
-
- @property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- def as_scalar(self):
- try:
- return self.element.as_scalar()
- except AttributeError:
- raise AttributeError("Element %s does not support "
- "'as_scalar()'" % self.element)
-
- def is_derived_from(self, fromclause):
- if fromclause in self._cloned_set:
- return True
- return self.element.is_derived_from(fromclause)
-
- def _populate_column_collection(self):
- for col in self.element.columns:
- col._make_proxy(self)
-
- def _refresh_for_new_column(self, column):
- col = self.element._refresh_for_new_column(column)
- if col is not None:
- if not self._cols_populated:
- return None
- else:
- return col._make_proxy(self)
- else:
- return None
-
- def _copy_internals(self, clone=_clone, **kw):
- # don't apply anything to an aliased Table
- # for now. May want to drive this from
- # the given **kw.
- if isinstance(self.element, TableClause):
- return
- self._reset_exported()
- self.element = clone(self.element, **kw)
- baseselectable = self.element
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
-
- def get_children(self, column_collections=True, **kw):
- if column_collections:
- for c in self.c:
- yield c
- yield self.element
-
- @property
- def _from_objects(self):
- return [self]
-
- @property
- def bind(self):
- return self.element.bind
-
-
-class CTE(Alias):
- """Represent a Common Table Expression.
-
- The :class:`.CTE` object is obtained using the
- :meth:`.SelectBase.cte` method from any selectable.
- See that method for complete examples.
-
- .. versionadded:: 0.7.6
-
- """
- __visit_name__ = 'cte'
-
- def __init__(self, selectable,
- name=None,
- recursive=False,
- cte_alias=False,
- _restates=frozenset()):
- self.recursive = recursive
- self.cte_alias = cte_alias
- self._restates = _restates
- super(CTE, self).__init__(selectable, name=name)
-
- def alias(self, name=None, flat=False):
- return CTE(
- self.original,
- name=name,
- recursive=self.recursive,
- cte_alias=self.name
- )
-
- def union(self, other):
- return CTE(
- self.original.union(other),
- name=self.name,
- recursive=self.recursive,
- _restates=self._restates.union([self])
- )
-
- def union_all(self, other):
- return CTE(
- self.original.union_all(other),
- name=self.name,
- recursive=self.recursive,
- _restates=self._restates.union([self])
- )
-
-
-class Grouping(ColumnElement):
- """Represent a grouping within a column expression"""
-
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
- self.type = getattr(element, 'type', sqltypes.NULLTYPE)
-
- @property
- def _label(self):
- return getattr(self.element, '_label', None) or self.anon_label
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- def get_children(self, **kwargs):
- return self.element,
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element': self.element, 'type': self.type}
-
- def __setstate__(self, state):
- self.element = state['element']
- self.type = state['type']
-
- def compare(self, other, **kw):
- return isinstance(other, Grouping) and \
- self.element.compare(other.element)
-
-
-class FromGrouping(FromClause):
- """Represent a grouping of a FROM clause"""
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
-
- def _init_collections(self):
- pass
-
- @property
- def columns(self):
- return self.element.columns
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- return self.element.foreign_keys
-
- def is_derived_from(self, element):
- return self.element.is_derived_from(element)
-
- def alias(self, **kw):
- return FromGrouping(self.element.alias(**kw))
-
- @property
- def _hide_froms(self):
- return self.element._hide_froms
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element': self.element}
-
- def __setstate__(self, state):
- self.element = state['element']
-
-
-class Over(ColumnElement):
- """Represent an OVER clause.
-
- This is a special operator against a so-called
- "window" function, as well as any aggregate function,
- which produces results relative to the result set
- itself. It's supported only by certain database
- backends.
-
- """
- __visit_name__ = 'over'
-
- order_by = None
- partition_by = None
-
- def __init__(self, func, partition_by=None, order_by=None):
- self.func = func
- if order_by is not None:
- self.order_by = ClauseList(*util.to_list(order_by))
- if partition_by is not None:
- self.partition_by = ClauseList(*util.to_list(partition_by))
-
- @util.memoized_property
- def type(self):
- return self.func.type
-
- def get_children(self, **kwargs):
- return [c for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
-
- def _copy_internals(self, clone=_clone, **kw):
- self.func = clone(self.func, **kw)
- if self.partition_by is not None:
- self.partition_by = clone(self.partition_by, **kw)
- if self.order_by is not None:
- self.order_by = clone(self.order_by, **kw)
-
- @property
- def _from_objects(self):
- return list(itertools.chain(
- *[c._from_objects for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
- ))
-
-
-class Label(ColumnElement):
- """Represents a column label (AS).
-
- Represent a label, as typically applied to any column-level
- element using the ``AS`` sql keyword.
-
- This object is constructed from the :func:`label()` module level
- function as well as the :func:`label()` method available on all
- :class:`.ColumnElement` subclasses.
-
- """
-
- __visit_name__ = 'label'
-
- def __init__(self, name, element, type_=None):
- while isinstance(element, Label):
- element = element.element
- if name:
- self.name = name
- else:
- self.name = _anonymous_label('%%(%d %s)s' % (id(self),
- getattr(element, 'name', 'anon')))
- self.key = self._label = self._key_label = self.name
- self._element = element
- self._type = type_
- self.quote = element.quote
- self._proxies = [element]
-
- @util.memoized_property
- def _order_by_label_element(self):
- return self
-
- @util.memoized_property
- def type(self):
- return sqltypes.to_instance(
- self._type or getattr(self._element, 'type', None)
- )
-
- @util.memoized_property
- def element(self):
- return self._element.self_group(against=operators.as_)
-
- def self_group(self, against=None):
- sub_element = self._element.self_group(against=against)
- if sub_element is not self._element:
- return Label(self.name,
- sub_element,
- type_=self._type)
- else:
- return self
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- return self.element.foreign_keys
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone, **kw):
- self.element = clone(self.element, **kw)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _make_proxy(self, selectable, name=None, **kw):
- e = self.element._make_proxy(selectable,
- name=name if name else self.name)
- e._proxies.append(self)
- if self._type is not None:
- e.type = self._type
- return e
-
-
-class ColumnClause(Immutable, ColumnElement):
- """Represents a generic column expression from any textual string.
-
- This includes columns associated with tables, aliases and select
- statements, but also any arbitrary text. May or may not be bound
- to an underlying :class:`.Selectable`.
-
- :class:`.ColumnClause` is constructed by itself typically via
- the :func:`~.expression.column` function. It may be placed directly
- into constructs such as :func:`.select` constructs::
-
- from sqlalchemy.sql import column, select
-
- c1, c2 = column("c1"), column("c2")
- s = select([c1, c2]).where(c1==5)
-
- There is also a variant on :func:`~.expression.column` known
- as :func:`~.expression.literal_column` - the difference is that
- in the latter case, the string value is assumed to be an exact
- expression, rather than a column name, so that no quoting rules
- or similar are applied::
-
- from sqlalchemy.sql import literal_column, select
-
- s = select([literal_column("5 + 7")])
-
- :class:`.ColumnClause` can also be used in a table-like
- fashion by combining the :func:`~.expression.column` function
- with the :func:`~.expression.table` function, to produce
- a "lightweight" form of table metadata::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The above construct can be created in an ad-hoc fashion and is
- not associated with any :class:`.schema.MetaData`, unlike it's
- more full fledged :class:`.schema.Table` counterpart.
-
- :param text: the text of the element.
-
- :param selectable: parent selectable.
-
- :param type: :class:`.types.TypeEngine` object which can associate
- this :class:`.ColumnClause` with a type.
-
- :param is_literal: if True, the :class:`.ColumnClause` is assumed to
- be an exact expression that will be delivered to the output with no
- quoting rules applied regardless of case sensitive settings. the
- :func:`literal_column()` function is usually used to create such a
- :class:`.ColumnClause`.
-
-
- """
- __visit_name__ = 'column'
-
- onupdate = default = server_default = server_onupdate = None
-
- _memoized_property = util.group_expirable_memoized_property()
-
- def __init__(self, text, selectable=None, type_=None, is_literal=False):
- self.key = self.name = text
- self.table = selectable
- self.type = sqltypes.to_instance(type_)
- self.is_literal = is_literal
-
- def _compare_name_for_result(self, other):
- if self.is_literal or \
- self.table is None or \
- not hasattr(other, 'proxy_set') or (
- isinstance(other, ColumnClause) and other.is_literal
- ):
- return super(ColumnClause, self).\
- _compare_name_for_result(other)
- else:
- return other.proxy_set.intersection(self.proxy_set)
-
- def _get_table(self):
- return self.__dict__['table']
-
- def _set_table(self, table):
- self._memoized_property.expire_instance(self)
- self.__dict__['table'] = table
- table = property(_get_table, _set_table)
-
- @_memoized_property
- def _from_objects(self):
- t = self.table
- if t is not None:
- return [t]
- else:
- return []
-
- @util.memoized_property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- @_memoized_property
- def _key_label(self):
- if self.key != self.name:
- return self._gen_label(self.key)
- else:
- return self._label
-
- @_memoized_property
- def _label(self):
- return self._gen_label(self.name)
-
- def _gen_label(self, name):
- t = self.table
- if self.is_literal:
- return None
-
- elif t is not None and t.named_with_column:
- if getattr(t, 'schema', None):
- label = t.schema.replace('.', '_') + "_" + \
- t.name + "_" + name
- else:
- label = t.name + "_" + name
-
- # ensure the label name doesn't conflict with that
- # of an existing column
- if label in t.c:
- _label = label
- counter = 1
- while _label in t.c:
- _label = label + "_" + str(counter)
- counter += 1
- label = _label
-
- return _as_truncated(label)
-
- else:
- return name
-
- def _bind_param(self, operator, obj):
- return BindParameter(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
- def _make_proxy(self, selectable, name=None, attach=True,
- name_is_truncatable=False, **kw):
- # propagate the "is_literal" flag only if we are keeping our name,
- # otherwise its considered to be a label
- is_literal = self.is_literal and (name is None or name == self.name)
- c = self._constructor(
- _as_truncated(name or self.name) if \
- name_is_truncatable else \
- (name or self.name),
- selectable=selectable,
- type_=self.type,
- is_literal=is_literal
- )
- if name is None:
- c.key = self.key
- c._proxies = [self]
- if selectable._is_clone_of is not None:
- c._is_clone_of = \
- selectable._is_clone_of.columns.get(c.key)
-
- if attach:
- selectable._columns[c.key] = c
- return c
-
-
-class TableClause(Immutable, FromClause):
- """Represents a minimal "table" construct.
-
- The constructor for :class:`.TableClause` is the
- :func:`~.expression.table` function. This produces
- a lightweight table object that has only a name and a
- collection of columns, which are typically produced
- by the :func:`~.expression.column` function::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The :class:`.TableClause` construct serves as the base for
- the more commonly used :class:`~.schema.Table` object, providing
- the usual set of :class:`~.expression.FromClause` services including
- the ``.c.`` collection and statement generation methods.
-
- It does **not** provide all the additional schema-level services
- of :class:`~.schema.Table`, including constraints, references to other
- tables, or support for :class:`.MetaData`-level services. It's useful
- on its own as an ad-hoc construct used to generate quick SQL
- statements when a more fully fledged :class:`~.schema.Table`
- is not on hand.
-
- """
-
- __visit_name__ = 'table'
-
- named_with_column = True
-
- implicit_returning = False
- """:class:`.TableClause` doesn't support having a primary key or column
- -level defaults, so implicit returning doesn't apply."""
-
- _autoincrement_column = None
- """No PK or default support so no autoincrement column."""
-
- def __init__(self, name, *columns):
- super(TableClause, self).__init__()
- self.name = self.fullname = name
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
- for c in columns:
- self.append_column(c)
-
- def _init_collections(self):
- pass
-
- @util.memoized_property
- def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode('ascii', 'backslashreplace')
-
- def append_column(self, c):
- self._columns[c.key] = c
- c.table = self
-
- def get_children(self, column_collections=True, **kwargs):
- if column_collections:
- return [c for c in self.c]
- else:
- return []
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.TableClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def insert(self, values=None, inline=False, **kwargs):
- """Generate an :func:`.insert` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.insert().values(name='foo')
-
- See :func:`.insert` for argument and usage information.
-
- """
-
- return insert(self, values=values, inline=inline, **kwargs)
-
- def update(self, whereclause=None, values=None, inline=False, **kwargs):
- """Generate an :func:`.update` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.update().where(table.c.id==7).values(name='foo')
-
- See :func:`.update` for argument and usage information.
-
- """
-
- return update(self, whereclause=whereclause,
- values=values, inline=inline, **kwargs)
-
- def delete(self, whereclause=None, **kwargs):
- """Generate a :func:`.delete` construct against this
- :class:`.TableClause`.
-
- E.g.::
-
- table.delete().where(table.c.id==7)
-
- See :func:`.delete` for argument and usage information.
-
- """
-
- return delete(self, whereclause, **kwargs)
-
- @property
- def _from_objects(self):
- return [self]
-
-
-class SelectBase(Executable, FromClause):
- """Base class for :class:`.Select` and ``CompoundSelects``."""
-
- _order_by_clause = ClauseList()
- _group_by_clause = ClauseList()
- _limit = None
- _offset = None
-
- def __init__(self,
- use_labels=False,
- for_update=False,
- limit=None,
- offset=None,
- order_by=None,
- group_by=None,
- bind=None,
- autocommit=None):
- self.use_labels = use_labels
- self.for_update = for_update
- if autocommit is not None:
- util.warn_deprecated('autocommit on select() is '
- 'deprecated. Use .execution_options(a'
- 'utocommit=True)')
- self._execution_options = \
- self._execution_options.union(
- {'autocommit': autocommit})
- if limit is not None:
- self._limit = util.asint(limit)
- if offset is not None:
- self._offset = util.asint(offset)
- self._bind = bind
-
- if order_by is not None:
- self._order_by_clause = ClauseList(*util.to_list(order_by))
- if group_by is not None:
- self._group_by_clause = ClauseList(*util.to_list(group_by))
-
- def as_scalar(self):
- """return a 'scalar' representation of this selectable, which can be
- used as a column expression.
-
- Typically, a select statement which has only one column in its columns
- clause is eligible to be used as a scalar expression.
-
- The returned object is an instance of
- :class:`ScalarSelect`.
-
- """
- return ScalarSelect(self)
-
- @_generative
- def apply_labels(self):
- """return a new selectable with the 'use_labels' flag set to True.
-
- This will result in column expressions being generated using labels
- against their table name, such as "SELECT somecolumn AS
- tablename_somecolumn". This allows selectables which contain multiple
- FROM clauses to produce a unique set of column names regardless of
- name conflicts among the individual FROM clauses.
-
- """
- self.use_labels = True
-
- def label(self, name):
- """return a 'scalar' representation of this selectable, embedded as a
- subquery with a label.
-
- .. seealso::
-
- :meth:`~.SelectBase.as_scalar`.
-
- """
- return self.as_scalar().label(name)
-
- def cte(self, name=None, recursive=False):
- """Return a new :class:`.CTE`, or Common Table Expression instance.
-
- Common table expressions are a SQL standard whereby SELECT
- statements can draw upon secondary statements specified along
- with the primary statement, using a clause called "WITH".
- Special semantics regarding UNION can also be employed to
- allow "recursive" queries, where a SELECT statement can draw
- upon the set of rows that have previously been selected.
-
- SQLAlchemy detects :class:`.CTE` objects, which are treated
- similarly to :class:`.Alias` objects, as special elements
- to be delivered to the FROM clause of the statement as well
- as to a WITH clause at the top of the statement.
-
- .. versionadded:: 0.7.6
-
- :param name: name given to the common table expression. Like
- :meth:`._FromClause.alias`, the name can be left as ``None``
- in which case an anonymous symbol will be used at query
- compile time.
- :param recursive: if ``True``, will render ``WITH RECURSIVE``.
- A recursive common table expression is intended to be used in
- conjunction with UNION ALL in order to derive rows
- from those already selected.
-
- The following examples illustrate two examples from
- Postgresql's documentation at
- http://www.postgresql.org/docs/8.4/static/queries-with.html.
-
- Example 1, non recursive::
-
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
-
- metadata = MetaData()
-
- orders = Table('orders', metadata,
- Column('region', String),
- Column('amount', Integer),
- Column('product', String),
- Column('quantity', Integer)
- )
-
- regional_sales = select([
- orders.c.region,
- func.sum(orders.c.amount).label('total_sales')
- ]).group_by(orders.c.region).cte("regional_sales")
-
-
- top_regions = select([regional_sales.c.region]).\\
- where(
- regional_sales.c.total_sales >
- select([
- func.sum(regional_sales.c.total_sales)/10
- ])
- ).cte("top_regions")
-
- statement = select([
- orders.c.region,
- orders.c.product,
- func.sum(orders.c.quantity).label("product_units"),
- func.sum(orders.c.amount).label("product_sales")
- ]).where(orders.c.region.in_(
- select([top_regions.c.region])
- )).group_by(orders.c.region, orders.c.product)
-
- result = conn.execute(statement).fetchall()
-
- Example 2, WITH RECURSIVE::
-
- from sqlalchemy import Table, Column, String, Integer, MetaData, \\
- select, func
-
- metadata = MetaData()
-
- parts = Table('parts', metadata,
- Column('part', String),
- Column('sub_part', String),
- Column('quantity', Integer),
- )
-
- included_parts = select([
- parts.c.sub_part,
- parts.c.part,
- parts.c.quantity]).\\
- where(parts.c.part=='our part').\\
- cte(recursive=True)
-
-
- incl_alias = included_parts.alias()
- parts_alias = parts.alias()
- included_parts = included_parts.union_all(
- select([
- parts_alias.c.part,
- parts_alias.c.sub_part,
- parts_alias.c.quantity
- ]).
- where(parts_alias.c.part==incl_alias.c.sub_part)
- )
-
- statement = select([
- included_parts.c.sub_part,
- func.sum(included_parts.c.quantity).
- label('total_quantity')
- ]).\
- select_from(included_parts.join(parts,
- included_parts.c.part==parts.c.part)).\\
- group_by(included_parts.c.sub_part)
-
- result = conn.execute(statement).fetchall()
-
-
- .. seealso::
-
- :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
-
- """
- return CTE(self, name=name, recursive=recursive)
-
- @_generative
- @util.deprecated('0.6',
- message=":func:`.autocommit` is deprecated. Use "
- ":func:`.Executable.execution_options` with the "
- "'autocommit' flag.")
- def autocommit(self):
- """return a new selectable with the 'autocommit' flag set to
- True."""
-
- self._execution_options = \
- self._execution_options.union({'autocommit': True})
-
- def _generate(self):
- """Override the default _generate() method to also clear out
- exported collections."""
-
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- s._reset_exported()
- return s
-
- @_generative
- def limit(self, limit):
- """return a new selectable with the given LIMIT criterion
- applied."""
-
- self._limit = util.asint(limit)
-
- @_generative
- def offset(self, offset):
- """return a new selectable with the given OFFSET criterion
- applied."""
-
- self._offset = util.asint(offset)
-
- @_generative
- def order_by(self, *clauses):
- """return a new selectable with the given list of ORDER BY
- criterion applied.
-
- The criterion will be appended to any pre-existing ORDER BY
- criterion.
-
- """
-
- self.append_order_by(*clauses)
-
- @_generative
- def group_by(self, *clauses):
- """return a new selectable with the given list of GROUP BY
- criterion applied.
-
- The criterion will be appended to any pre-existing GROUP BY
- criterion.
-
- """
-
- self.append_group_by(*clauses)
-
- def append_order_by(self, *clauses):
- """Append the given ORDER BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing ORDER BY criterion.
-
- This is an **in-place** mutation method; the
- :meth:`~.SelectBase.order_by` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._order_by_clause = ClauseList()
- else:
- if getattr(self, '_order_by_clause', None) is not None:
- clauses = list(self._order_by_clause) + list(clauses)
- self._order_by_clause = ClauseList(*clauses)
-
- def append_group_by(self, *clauses):
- """Append the given GROUP BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing GROUP BY criterion.
-
- This is an **in-place** mutation method; the
- :meth:`~.SelectBase.group_by` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._group_by_clause = ClauseList()
- else:
- if getattr(self, '_group_by_clause', None) is not None:
- clauses = list(self._group_by_clause) + list(clauses)
- self._group_by_clause = ClauseList(*clauses)
-
- @property
- def _from_objects(self):
- return [self]
-
-
-class ScalarSelect(Generative, Grouping):
- _from_objects = []
-
- def __init__(self, element):
- self.element = element
- self.type = element._scalar_type()
-
- @property
- def columns(self):
- raise exc.InvalidRequestError('Scalar Select expression has no '
- 'columns; use this object directly within a '
- 'column-level expression.')
- c = columns
-
- @_generative
- def where(self, crit):
- """Apply a WHERE clause to the SELECT statement referred to
- by this :class:`.ScalarSelect`.
-
- """
- self.element = self.element.where(crit)
-
- def self_group(self, **kwargs):
- return self
-
-
-class CompoundSelect(SelectBase):
- """Forms the basis of ``UNION``, ``UNION ALL``, and other
- SELECT-based set operations."""
-
- __visit_name__ = 'compound_select'
-
- UNION = util.symbol('UNION')
- UNION_ALL = util.symbol('UNION ALL')
- EXCEPT = util.symbol('EXCEPT')
- EXCEPT_ALL = util.symbol('EXCEPT ALL')
- INTERSECT = util.symbol('INTERSECT')
- INTERSECT_ALL = util.symbol('INTERSECT ALL')
-
- def __init__(self, keyword, *selects, **kwargs):
- self._auto_correlate = kwargs.pop('correlate', False)
- self.keyword = keyword
- self.selects = []
-
- numcols = None
-
- # some DBs do not like ORDER BY in the inner queries of a UNION, etc.
- for n, s in enumerate(selects):
- s = _clause_element_as_expr(s)
-
- if not numcols:
- numcols = len(s.c)
- elif len(s.c) != numcols:
- raise exc.ArgumentError('All selectables passed to '
- 'CompoundSelect must have identical numbers of '
- 'columns; select #%d has %d columns, select '
- '#%d has %d' % (1, len(self.selects[0].c), n
- + 1, len(s.c)))
-
- self.selects.append(s.self_group(self))
-
- SelectBase.__init__(self, **kwargs)
-
- def _scalar_type(self):
- return self.selects[0]._scalar_type()
-
- def self_group(self, against=None):
- return FromGrouping(self)
-
- def is_derived_from(self, fromclause):
- for s in self.selects:
- if s.is_derived_from(fromclause):
- return True
- return False
-
- def _populate_column_collection(self):
- for cols in zip(*[s.c for s in self.selects]):
-
- # this is a slightly hacky thing - the union exports a
- # column that resembles just that of the *first* selectable.
- # to get at a "composite" column, particularly foreign keys,
- # you have to dig through the proxies collection which we
- # generate below. We may want to improve upon this, such as
- # perhaps _make_proxy can accept a list of other columns
- # that are "shared" - schema.column can then copy all the
- # ForeignKeys in. this would allow the union() to have all
- # those fks too.
-
- proxy = cols[0]._make_proxy(self,
- name=cols[0]._label if self.use_labels else None,
- key=cols[0]._key_label if self.use_labels else None)
-
- # hand-construct the "_proxies" collection to include all
- # derived columns place a 'weight' annotation corresponding
- # to how low in the list of select()s the column occurs, so
- # that the corresponding_column() operation can resolve
- # conflicts
-
- proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
- c) in enumerate(cols)]
-
- def _refresh_for_new_column(self, column):
- for s in self.selects:
- s._refresh_for_new_column(column)
-
- if not self._cols_populated:
- return None
-
- raise NotImplementedError("CompoundSelect constructs don't support "
- "addition of columns to underlying selectables")
-
- def _copy_internals(self, clone=_clone, **kw):
- self._reset_exported()
- self.selects = [clone(s, **kw) for s in self.selects]
- if hasattr(self, '_col_map'):
- del self._col_map
- for attr in ('_order_by_clause', '_group_by_clause'):
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr), **kw))
-
- def get_children(self, column_collections=True, **kwargs):
- return (column_collections and list(self.c) or []) \
- + [self._order_by_clause, self._group_by_clause] \
- + list(self.selects)
-
- def bind(self):
- if self._bind:
- return self._bind
- for s in self.selects:
- e = s.bind
- if e:
- return e
- else:
- return None
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-
-class HasPrefixes(object):
- _prefixes = ()
-
- @_generative
- def prefix_with(self, *expr, **kw):
- """Add one or more expressions following the statement keyword, i.e.
- SELECT, INSERT, UPDATE, or DELETE. Generative.
-
- This is used to support backend-specific prefix keywords such as those
- provided by MySQL.
-
- E.g.::
-
- stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
-
- Multiple prefixes can be specified by multiple calls
- to :meth:`.prefix_with`.
-
- :param \*expr: textual or :class:`.ClauseElement` construct which
- will be rendered following the INSERT, UPDATE, or DELETE
- keyword.
- :param \**kw: A single keyword 'dialect' is accepted. This is an
- optional string dialect name which will
- limit rendering of this prefix to only that dialect.
-
- """
- dialect = kw.pop('dialect', None)
- if kw:
- raise exc.ArgumentError("Unsupported argument(s): %s" %
- ",".join(kw))
- self._setup_prefixes(expr, dialect)
-
- def _setup_prefixes(self, prefixes, dialect=None):
- self._prefixes = self._prefixes + tuple(
- [(_literal_as_text(p), dialect) for p in prefixes])
-
-
-class Select(HasPrefixes, SelectBase):
- """Represents a ``SELECT`` statement.
-
- .. seealso::
-
- :func:`~.expression.select` - the function which creates
- a :class:`.Select` object.
-
- :ref:`coretutorial_selecting` - Core Tutorial description
- of :func:`.select`.
-
- """
-
- __visit_name__ = 'select'
-
- _prefixes = ()
- _hints = util.immutabledict()
- _distinct = False
- _from_cloned = None
- _correlate = ()
- _correlate_except = None
- _memoized_property = SelectBase._memoized_property
-
- def __init__(self,
- columns,
- whereclause=None,
- from_obj=None,
- distinct=False,
- having=None,
- correlate=True,
- prefixes=None,
- **kwargs):
- """Construct a Select object.
-
- The public constructor for Select is the
- :func:`select` function; see that function for
- argument descriptions.
-
- Additional generative and mutator methods are available on the
- :class:`SelectBase` superclass.
-
- """
- self._auto_correlate = correlate
- if distinct is not False:
- if distinct is True:
- self._distinct = True
- else:
- self._distinct = [
- _literal_as_text(e)
- for e in util.to_list(distinct)
- ]
-
- if from_obj is not None:
- self._from_obj = util.OrderedSet(
- _interpret_as_from(f)
- for f in util.to_list(from_obj))
- else:
- self._from_obj = util.OrderedSet()
-
- try:
- cols_present = bool(columns)
- except TypeError:
- raise exc.ArgumentError("columns argument to select() must "
- "be a Python list or other iterable")
-
- if cols_present:
- self._raw_columns = []
- for c in columns:
- c = _interpret_as_column_or_from(c)
- if isinstance(c, ScalarSelect):
- c = c.self_group(against=operators.comma_op)
- self._raw_columns.append(c)
- else:
- self._raw_columns = []
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
-
- if having is not None:
- self._having = _literal_as_text(having)
- else:
- self._having = None
-
- if prefixes:
- self._setup_prefixes(prefixes)
-
- SelectBase.__init__(self, **kwargs)
-
- @property
- def _froms(self):
- # would love to cache this,
- # but there's just enough edge cases, particularly now that
- # declarative encourages construction of SQL expressions
- # without tables present, to just regen this each time.
- froms = []
- seen = set()
- translate = self._from_cloned
-
- def add(items):
- for item in items:
- if translate and item in translate:
- item = translate[item]
- if not seen.intersection(item._cloned_set):
- froms.append(item)
- seen.update(item._cloned_set)
-
- add(_from_objects(*self._raw_columns))
- if self._whereclause is not None:
- add(_from_objects(self._whereclause))
- add(self._from_obj)
-
- return froms
-
- def _get_display_froms(self, explicit_correlate_froms=None,
- implicit_correlate_froms=None):
- """Return the full list of 'from' clauses to be displayed.
-
- Takes into account a set of existing froms which may be
- rendered in the FROM clause of enclosing selects; this Select
- may want to leave those absent if it is automatically
- correlating.
-
- """
- froms = self._froms
-
- toremove = set(itertools.chain(*[
- _expand_cloned(f._hide_froms)
- for f in froms]))
- if toremove:
- # if we're maintaining clones of froms,
- # add the copies out to the toremove list. only include
- # clones that are lexical equivalents.
- if self._from_cloned:
- toremove.update(
- self._from_cloned[f] for f in
- toremove.intersection(self._from_cloned)
- if self._from_cloned[f]._is_lexical_equivalent(f)
- )
- # filter out to FROM clauses not in the list,
- # using a list to maintain ordering
- froms = [f for f in froms if f not in toremove]
-
- if self._correlate:
- to_correlate = self._correlate
- if to_correlate:
- froms = [
- f for f in froms if f not in
- _cloned_intersection(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
- to_correlate
- )
- ]
-
- if self._correlate_except is not None:
-
- froms = [
- f for f in froms if f not in
- _cloned_difference(
- _cloned_intersection(froms, explicit_correlate_froms or ()),
- self._correlate_except
- )
- ]
-
- if self._auto_correlate and \
- implicit_correlate_froms and \
- len(froms) > 1:
-
- froms = [
- f for f in froms if f not in
- _cloned_intersection(froms, implicit_correlate_froms)
- ]
-
- if not len(froms):
- raise exc.InvalidRequestError("Select statement '%s"
- "' returned no FROM clauses due to "
- "auto-correlation; specify "
- "correlate(<tables>) to control "
- "correlation manually." % self)
-
- return froms
-
- def _scalar_type(self):
- elem = self._raw_columns[0]
- cols = list(elem._select_iterable)
- return cols[0].type
-
- @property
- def froms(self):
- """Return the displayed list of FromClause elements."""
-
- return self._get_display_froms()
-
- @_generative
- def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given selectable to this
- :class:`.Select`.
-
- The text of the hint is rendered in the appropriate
- location for the database backend in use, relative
- to the given :class:`.Table` or :class:`.Alias` passed as the
- ``selectable`` argument. The dialect implementation
- typically uses Python string substitution syntax
- with the token ``%(name)s`` to render the name of
- the table or alias. E.g. when using Oracle, the
- following::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
-
- Would render SQL as::
-
- select /*+ index(mytable ix_mytable) */ ... from mytable
-
- The ``dialect_name`` option will limit the rendering of a particular
- hint to a particular backend. Such as, to add hints for both Oracle
- and Sybase simultaneously::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
- with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
-
- """
- self._hints = self._hints.union(
- {(selectable, dialect_name): text})
-
- @property
- def type(self):
- raise exc.InvalidRequestError("Select objects don't have a type. "
- "Call as_scalar() on this Select object "
- "to return a 'scalar' version of this Select.")
-
- @_memoized_property.method
- def locate_all_froms(self):
- """return a Set of all FromClause elements referenced by this Select.
-
- This set is a superset of that returned by the ``froms`` property,
- which is specifically for those FromClause elements that would
- actually be rendered.
-
- """
- froms = self._froms
- return froms + list(_from_objects(*froms))
-
- @property
- def inner_columns(self):
- """an iterator of all ColumnElement expressions which would
- be rendered into the columns clause of the resulting SELECT statement.
-
- """
- return _select_iterables(self._raw_columns)
-
- def is_derived_from(self, fromclause):
- if self in fromclause._cloned_set:
- return True
-
- for f in self.locate_all_froms():
- if f.is_derived_from(fromclause):
- return True
- return False
-
- def _copy_internals(self, clone=_clone, **kw):
-
- # Select() object has been cloned and probably adapted by the
- # given clone function. Apply the cloning function to internal
- # objects
-
- # 1. keep a dictionary of the froms we've cloned, and what
- # they've become. This is consulted later when we derive
- # additional froms from "whereclause" and the columns clause,
- # which may still reference the uncloned parent table.
- # as of 0.7.4 we also put the current version of _froms, which
- # gets cleared on each generation. previously we were "baking"
- # _froms into self._from_obj.
- self._from_cloned = from_cloned = dict((f, clone(f, **kw))
- for f in self._from_obj.union(self._froms))
-
- # 3. update persistent _from_obj with the cloned versions.
- self._from_obj = util.OrderedSet(from_cloned[f] for f in
- self._from_obj)
-
- # the _correlate collection is done separately, what can happen
- # here is the same item is _correlate as in _from_obj but the
- # _correlate version has an annotation on it - (specifically
- # RelationshipProperty.Comparator._criterion_exists() does
- # this). Also keep _correlate liberally open with it's previous
- # contents, as this set is used for matching, not rendering.
- self._correlate = set(clone(f) for f in
- self._correlate).union(self._correlate)
-
- # 4. clone other things. The difficulty here is that Column
- # objects are not actually cloned, and refer to their original
- # .table, resulting in the wrong "from" parent after a clone
- # operation. Hence _from_cloned and _from_obj supercede what is
- # present here.
- self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
- for attr in '_whereclause', '_having', '_order_by_clause', \
- '_group_by_clause':
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr), **kw))
-
- # erase exported column list, _froms collection,
- # etc.
- self._reset_exported()
-
- def get_children(self, column_collections=True, **kwargs):
- """return child elements as per the ClauseElement specification."""
-
- return (column_collections and list(self.columns) or []) + \
- self._raw_columns + list(self._froms) + \
- [x for x in
- (self._whereclause, self._having,
- self._order_by_clause, self._group_by_clause)
- if x is not None]
-
- @_generative
- def column(self, column):
- """return a new select() construct with the given column expression
- added to its columns clause.
-
- """
- self.append_column(column)
-
- def reduce_columns(self, only_synonyms=True):
- """Return a new :func`.select` construct with redundantly
- named, equivalently-valued columns removed from the columns clause.
-
- "Redundant" here means two columns where one refers to the
- other either based on foreign key, or via a simple equality
- comparison in the WHERE clause of the statement. The primary purpose
- of this method is to automatically construct a select statement
- with all uniquely-named columns, without the need to use
- table-qualified labels as :meth:`.apply_labels` does.
-
- When columns are omitted based on foreign key, the referred-to
- column is the one that's kept. When columns are omitted based on
- WHERE eqivalence, the first column in the columns clause is the
- one that's kept.
-
- :param only_synonyms: when True, limit the removal of columns
- to those which have the same name as the equivalent. Otherwise,
- all columns that are equivalent to another are removed.
-
- .. versionadded:: 0.8
-
- """
- return self.with_only_columns(
- sqlutil.reduce_columns(
- self.inner_columns,
- only_synonyms=only_synonyms,
- *(self._whereclause, ) + tuple(self._from_obj)
- )
- )
-
- @_generative
- def with_only_columns(self, columns):
- """Return a new :func:`.select` construct with its columns
- clause replaced with the given columns.
-
- .. versionchanged:: 0.7.3
- Due to a bug fix, this method has a slight
- behavioral change as of version 0.7.3.
- Prior to version 0.7.3, the FROM clause of
- a :func:`.select` was calculated upfront and as new columns
- were added; in 0.7.3 and later it's calculated
- at compile time, fixing an issue regarding late binding
- of columns to parent tables. This changes the behavior of
- :meth:`.Select.with_only_columns` in that FROM clauses no
- longer represented in the new list are dropped,
- but this behavior is more consistent in
- that the FROM clauses are consistently derived from the
- current columns clause. The original intent of this method
- is to allow trimming of the existing columns list to be fewer
- columns than originally present; the use case of replacing
- the columns list with an entirely different one hadn't
- been anticipated until 0.7.3 was released; the usage
- guidelines below illustrate how this should be done.
-
- This method is exactly equivalent to as if the original
- :func:`.select` had been called with the given columns
- clause. I.e. a statement::
-
- s = select([table1.c.a, table1.c.b])
- s = s.with_only_columns([table1.c.b])
-
- should be exactly equivalent to::
-
- s = select([table1.c.b])
-
- This means that FROM clauses which are only derived
- from the column list will be discarded if the new column
- list no longer contains that FROM::
-
- >>> table1 = table('t1', column('a'), column('b'))
- >>> table2 = table('t2', column('a'), column('b'))
- >>> s1 = select([table1.c.a, table2.c.b])
- >>> print s1
- SELECT t1.a, t2.b FROM t1, t2
- >>> s2 = s1.with_only_columns([table2.c.b])
- >>> print s2
- SELECT t2.b FROM t1
-
- The preferred way to maintain a specific FROM clause
- in the construct, assuming it won't be represented anywhere
- else (i.e. not in the WHERE clause, etc.) is to set it using
- :meth:`.Select.select_from`::
-
- >>> s1 = select([table1.c.a, table2.c.b]).\\
- ... select_from(table1.join(table2,
- ... table1.c.a==table2.c.a))
- >>> s2 = s1.with_only_columns([table2.c.b])
- >>> print s2
- SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
-
- Care should also be taken to use the correct
- set of column objects passed to :meth:`.Select.with_only_columns`.
- Since the method is essentially equivalent to calling the
- :func:`.select` construct in the first place with the given
- columns, the columns passed to :meth:`.Select.with_only_columns`
- should usually be a subset of those which were passed
- to the :func:`.select` construct, not those which are available
- from the ``.c`` collection of that :func:`.select`. That
- is::
-
- s = select([table1.c.a, table1.c.b]).select_from(table1)
- s = s.with_only_columns([table1.c.b])
-
- and **not**::
-
- # usually incorrect
- s = s.with_only_columns([s.c.b])
-
- The latter would produce the SQL::
-
- SELECT b
- FROM (SELECT t1.a AS a, t1.b AS b
- FROM t1), t1
-
- Since the :func:`.select` construct is essentially being
- asked to select both from ``table1`` as well as itself.
-
- """
- self._reset_exported()
- rc = []
- for c in columns:
- c = _interpret_as_column_or_from(c)
- if isinstance(c, ScalarSelect):
- c = c.self_group(against=operators.comma_op)
- rc.append(c)
- self._raw_columns = rc
-
- @_generative
- def where(self, whereclause):
- """return a new select() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
-
- self.append_whereclause(whereclause)
-
- @_generative
- def having(self, having):
- """return a new select() construct with the given expression added to
- its HAVING clause, joined to the existing clause via AND, if any.
-
- """
- self.append_having(having)
-
- @_generative
- def distinct(self, *expr):
- """Return a new select() construct which will apply DISTINCT to its
- columns clause.
-
- :param \*expr: optional column expressions. When present,
- the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
- construct.
-
- """
- if expr:
- expr = [_literal_as_text(e) for e in expr]
- if isinstance(self._distinct, list):
- self._distinct = self._distinct + expr
- else:
- self._distinct = expr
- else:
- self._distinct = True
-
- @_generative
- def select_from(self, fromclause):
- """return a new :func:`.select` construct with the
- given FROM expression
- merged into its list of FROM objects.
-
- E.g.::
-
- table1 = table('t1', column('a'))
- table2 = table('t2', column('b'))
- s = select([table1.c.a]).\\
- select_from(
- table1.join(table2, table1.c.a==table2.c.b)
- )
-
- The "from" list is a unique set on the identity of each element,
- so adding an already present :class:`.Table` or other selectable
- will have no effect. Passing a :class:`.Join` that refers
- to an already present :class:`.Table` or other selectable will have
- the effect of concealing the presence of that selectable as
- an individual element in the rendered FROM list, instead
- rendering it into a JOIN clause.
-
- While the typical purpose of :meth:`.Select.select_from` is to
- replace the default, derived FROM clause with a join, it can
- also be called with individual table elements, multiple times
- if desired, in the case that the FROM clause cannot be fully
- derived from the columns clause::
-
- select([func.count('*')]).select_from(table1)
-
- """
- self.append_from(fromclause)
-
- @_generative
- def correlate(self, *fromclauses):
- """return a new :class:`.Select` which will correlate the given FROM
- clauses to that of an enclosing :class:`.Select`.
-
- Calling this method turns off the :class:`.Select` object's
- default behavior of "auto-correlation". Normally, FROM elements
- which appear in a :class:`.Select` that encloses this one via
- its :term:`WHERE clause`, ORDER BY, HAVING or
- :term:`columns clause` will be omitted from this :class:`.Select`
- object's :term:`FROM clause`.
- Setting an explicit correlation collection using the
- :meth:`.Select.correlate` method provides a fixed list of FROM objects
- that can potentially take place in this process.
-
- When :meth:`.Select.correlate` is used to apply specific FROM clauses
- for correlation, the FROM elements become candidates for
- correlation regardless of how deeply nested this :class:`.Select`
- object is, relative to an enclosing :class:`.Select` which refers to
- the same FROM object. This is in contrast to the behavior of
- "auto-correlation" which only correlates to an immediate enclosing
- :class:`.Select`. Multi-level correlation ensures that the link
- between enclosed and enclosing :class:`.Select` is always via
- at least one WHERE/ORDER BY/HAVING/columns clause in order for
- correlation to take place.
-
- If ``None`` is passed, the :class:`.Select` object will correlate
- none of its FROM entries, and all will render unconditionally
- in the local FROM clause.
-
- :param \*fromclauses: a list of one or more :class:`.FromClause`
- constructs, or other compatible constructs (i.e. ORM-mapped
- classes) to become part of the correlate collection.
-
- .. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
- :meth:`.Select.correlate`.
-
- .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
- longer unconditionally removes entries from the FROM clause; instead,
- the candidate FROM entries must also be matched by a FROM entry
- located in an enclosing :class:`.Select`, which ultimately encloses
- this one as present in the WHERE clause, ORDER BY clause, HAVING
- clause, or columns clause of an enclosing :meth:`.Select`.
-
- .. versionchanged:: 0.8.2 explicit correlation takes place
- via any level of nesting of :class:`.Select` objects; in previous
- 0.8 versions, correlation would only occur relative to the immediate
- enclosing :class:`.Select` construct.
-
- .. seealso::
-
- :meth:`.Select.correlate_except`
-
- :ref:`correlated_subqueries`
-
- """
- self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
- self._correlate = ()
- else:
- self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclauses)
-
- @_generative
- def correlate_except(self, *fromclauses):
- """return a new :class:`.Select` which will omit the given FROM
- clauses from the auto-correlation process.
-
- Calling :meth:`.Select.correlate_except` turns off the
- :class:`.Select` object's default behavior of
- "auto-correlation" for the given FROM elements. An element
- specified here will unconditionally appear in the FROM list, while
- all other FROM elements remain subject to normal auto-correlation
- behaviors.
-
- .. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
- method was improved to fully prevent FROM clauses specified here
- from being omitted from the immediate FROM clause of this
- :class:`.Select`.
-
- If ``None`` is passed, the :class:`.Select` object will correlate
- all of its FROM entries.
-
- .. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
- correctly auto-correlate all FROM clauses.
-
- :param \*fromclauses: a list of one or more :class:`.FromClause`
- constructs, or other compatible constructs (i.e. ORM-mapped
- classes) to become part of the correlate-exception collection.
-
- .. seealso::
-
- :meth:`.Select.correlate`
-
- :ref:`correlated_subqueries`
-
- """
-
- self._auto_correlate = False
- if fromclauses and fromclauses[0] is None:
- self._correlate_except = ()
- else:
- self._correlate_except = set(self._correlate_except or ()).union(
- _interpret_as_from(f) for f in fromclauses)
-
- def append_correlation(self, fromclause):
- """append the given correlation expression to this select()
- construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.correlate` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
-
- self._auto_correlate = False
- self._correlate = set(self._correlate).union(
- _interpret_as_from(f) for f in fromclause)
-
- def append_column(self, column):
- """append the given column expression to the columns clause of this
- select() construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.column` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- column = _interpret_as_column_or_from(column)
-
- if isinstance(column, ScalarSelect):
- column = column.self_group(against=operators.comma_op)
-
- self._raw_columns = self._raw_columns + [column]
-
- def append_prefix(self, clause):
- """append the given columns clause prefix expression to this select()
- construct.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.prefix_with` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- clause = _literal_as_text(clause)
- self._prefixes = self._prefixes + (clause,)
-
- def append_whereclause(self, whereclause):
- """append the given expression to this select() construct's WHERE
- criterion.
-
- The expression will be joined to existing WHERE criterion via AND.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.where` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- whereclause = _literal_as_text(whereclause)
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause, whereclause)
- else:
- self._whereclause = whereclause
-
- def append_having(self, having):
- """append the given expression to this select() construct's HAVING
- criterion.
-
- The expression will be joined to existing HAVING criterion via AND.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.having` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- if self._having is not None:
- self._having = and_(self._having, _literal_as_text(having))
- else:
- self._having = _literal_as_text(having)
-
- def append_from(self, fromclause):
- """append the given FromClause expression to this select() construct's
- FROM clause.
-
- This is an **in-place** mutation method; the
- :meth:`~.Select.select_from` method is preferred, as it provides standard
- :term:`method chaining`.
-
- """
- self._reset_exported()
- fromclause = _interpret_as_from(fromclause)
- self._from_obj = self._from_obj.union([fromclause])
-
-
- @_memoized_property
- def _columns_plus_names(self):
- if self.use_labels:
- names = set()
- def name_for_col(c):
- if c._label is None:
- return (None, c)
- name = c._label
- if name in names:
- name = c.anon_label
- else:
- names.add(name)
- return name, c
-
- return [
- name_for_col(c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
- ]
- else:
- return [
- (None, c)
- for c in util.unique_list(_select_iterables(self._raw_columns))
- ]
-
- def _populate_column_collection(self):
- for name, c in self._columns_plus_names:
- if not hasattr(c, '_make_proxy'):
- continue
- if name is None:
- key = None
- elif self.use_labels:
- key = c._key_label
- if key is not None and key in self.c:
- key = c.anon_label
- else:
- key = None
-
- c._make_proxy(self, key=key,
- name=name,
- name_is_truncatable=True)
-
- def _refresh_for_new_column(self, column):
- for fromclause in self._froms:
- col = fromclause._refresh_for_new_column(column)
- if col is not None:
- if col in self.inner_columns and self._cols_populated:
- our_label = col._key_label if self.use_labels else col.key
- if our_label not in self.c:
- return col._make_proxy(self,
- name=col._label if self.use_labels else None,
- key=col._key_label if self.use_labels else None,
- name_is_truncatable=True)
- return None
- return None
-
- def self_group(self, against=None):
- """return a 'grouping' construct as per the ClauseElement
- specification.
-
- This produces an element that can be embedded in an expression. Note
- that this method is called automatically as needed when constructing
- expressions and should not require explicit use.
-
- """
- if isinstance(against, CompoundSelect):
- return self
- return FromGrouping(self)
-
- def union(self, other, **kwargs):
- """return a SQL UNION of this select() construct against the given
- selectable."""
-
- return union(self, other, **kwargs)
-
- def union_all(self, other, **kwargs):
- """return a SQL UNION ALL of this select() construct against the given
- selectable.
-
- """
- return union_all(self, other, **kwargs)
-
- def except_(self, other, **kwargs):
- """return a SQL EXCEPT of this select() construct against the given
- selectable."""
-
- return except_(self, other, **kwargs)
-
- def except_all(self, other, **kwargs):
- """return a SQL EXCEPT ALL of this select() construct against the
- given selectable.
-
- """
- return except_all(self, other, **kwargs)
-
- def intersect(self, other, **kwargs):
- """return a SQL INTERSECT of this select() construct against the given
- selectable.
-
- """
- return intersect(self, other, **kwargs)
-
- def intersect_all(self, other, **kwargs):
- """return a SQL INTERSECT ALL of this select() construct against the
- given selectable.
-
- """
- return intersect_all(self, other, **kwargs)
-
- def bind(self):
- if self._bind:
- return self._bind
- froms = self._froms
- if not froms:
- for c in self._raw_columns:
- e = c.bind
- if e:
- self._bind = e
- return e
- else:
- e = list(froms)[0].bind
- if e:
- self._bind = e
- return e
-
- return None
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-
-class UpdateBase(HasPrefixes, Executable, ClauseElement):
- """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements.
-
- """
-
- __visit_name__ = 'update_base'
-
- _execution_options = \
- Executable._execution_options.union({'autocommit': True})
- kwargs = util.immutabledict()
- _hints = util.immutabledict()
- _prefixes = ()
-
- def _process_colparams(self, parameters):
- def process_single(p):
- if isinstance(p, (list, tuple)):
- return dict(
- (c.key, pval)
- for c, pval in zip(self.table.c, p)
- )
- else:
- return p
-
- if isinstance(parameters, (list, tuple)) and \
- isinstance(parameters[0], (list, tuple, dict)):
-
- if not self._supports_multi_parameters:
- raise exc.InvalidRequestError(
- "This construct does not support "
- "multiple parameter sets.")
-
- return [process_single(p) for p in parameters], True
- else:
- return process_single(parameters), False
-
- def params(self, *arg, **kw):
- """Set the parameters for the statement.
-
- This method raises ``NotImplementedError`` on the base class,
- and is overridden by :class:`.ValuesBase` to provide the
- SET/VALUES clause of UPDATE and INSERT.
-
- """
- raise NotImplementedError(
- "params() is not supported for INSERT/UPDATE/DELETE statements."
- " To set the values for an INSERT or UPDATE statement, use"
- " stmt.values(**parameters).")
-
- def bind(self):
- """Return a 'bind' linked to this :class:`.UpdateBase`
- or a :class:`.Table` associated with it.
-
- """
- return self._bind or self.table.bind
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- @_generative
- def returning(self, *cols):
- """Add a RETURNING or equivalent clause to this statement.
-
- The given list of columns represent columns within the table that is
- the target of the INSERT, UPDATE, or DELETE. Each element can be any
- column expression. :class:`~sqlalchemy.schema.Table` objects will be
- expanded into their individual columns.
-
- Upon compilation, a RETURNING clause, or database equivalent,
- will be rendered within the statement. For INSERT and UPDATE,
- the values are the newly inserted/updated values. For DELETE,
- the values are those of the rows which were deleted.
-
- Upon execution, the values of the columns to be returned
- are made available via the result set and can be iterated
- using ``fetchone()`` and similar. For DBAPIs which do not
- natively support returning values (i.e. cx_oracle),
- SQLAlchemy will approximate this behavior at the result level
- so that a reasonable amount of behavioral neutrality is
- provided.
-
- Note that not all databases/DBAPIs
- support RETURNING. For those backends with no support,
- an exception is raised upon compilation and/or execution.
- For those who do support it, the functionality across backends
- varies greatly, including restrictions on executemany()
- and other statements which return multiple rows. Please
- read the documentation notes for the database in use in
- order to determine the availability of RETURNING.
-
- """
- self._returning = cols
-
- @_generative
- def with_hint(self, text, selectable=None, dialect_name="*"):
- """Add a table hint for a single table to this
- INSERT/UPDATE/DELETE statement.
-
- .. note::
-
- :meth:`.UpdateBase.with_hint` currently applies only to
- Microsoft SQL Server. For MySQL INSERT/UPDATE/DELETE hints, use
- :meth:`.UpdateBase.prefix_with`.
-
- The text of the hint is rendered in the appropriate
- location for the database backend in use, relative
- to the :class:`.Table` that is the subject of this
- statement, or optionally to that of the given
- :class:`.Table` passed as the ``selectable`` argument.
-
- The ``dialect_name`` option will limit the rendering of a particular
- hint to a particular backend. Such as, to add a hint
- that only takes effect for SQL Server::
-
- mytable.insert().with_hint("WITH (PAGLOCK)", dialect_name="mssql")
-
- .. versionadded:: 0.7.6
-
- :param text: Text of the hint.
- :param selectable: optional :class:`.Table` that specifies
- an element of the FROM clause within an UPDATE or DELETE
- to be the subject of the hint - applies only to certain backends.
- :param dialect_name: defaults to ``*``, if specified as the name
- of a particular dialect, will apply these hints only when
- that dialect is in use.
- """
- if selectable is None:
- selectable = self.table
-
- self._hints = self._hints.union(
- {(selectable, dialect_name): text})
-
-
-class ValuesBase(UpdateBase):
- """Supplies support for :meth:`.ValuesBase.values` to
- INSERT and UPDATE constructs."""
-
- __visit_name__ = 'values_base'
-
- _supports_multi_parameters = False
- _has_multi_parameters = False
- select = None
-
- def __init__(self, table, values, prefixes):
- self.table = _interpret_as_from(table)
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(values)
- if prefixes:
- self._setup_prefixes(prefixes)
-
- @_generative
- def values(self, *args, **kwargs):
- """specify a fixed VALUES clause for an INSERT statement, or the SET
- clause for an UPDATE.
-
- Note that the :class:`.Insert` and :class:`.Update` constructs support
- per-execution time formatting of the VALUES and/or SET clauses,
- based on the arguments passed to :meth:`.Connection.execute`. However,
- the :meth:`.ValuesBase.values` method can be used to "fix" a particular
- set of parameters into the statement.
-
- Multiple calls to :meth:`.ValuesBase.values` will produce a new
- construct, each one with the parameter list modified to include
- the new parameters sent. In the typical case of a single
- dictionary of parameters, the newly passed keys will replace
- the same keys in the previous construct. In the case of a list-based
- "multiple values" construct, each new list of values is extended
- onto the existing list of values.
-
- :param \**kwargs: key value pairs representing the string key
- of a :class:`.Column` mapped to the value to be rendered into the
- VALUES or SET clause::
-
- users.insert().values(name="some name")
-
- users.update().where(users.c.id==5).values(name="some name")
-
- :param \*args: Alternatively, a dictionary, tuple or list
- of dictionaries or tuples can be passed as a single positional
- argument in order to form the VALUES or
- SET clause of the statement. The single dictionary form
- works the same as the kwargs form::
-
- users.insert().values({"name": "some name"})
-
- If a tuple is passed, the tuple should contain the same number
- of columns as the target :class:`.Table`::
-
- users.insert().values((5, "some name"))
-
- The :class:`.Insert` construct also supports multiply-rendered VALUES
- construct, for those backends which support this SQL syntax
- (SQLite, Postgresql, MySQL). This mode is indicated by passing a list
- of one or more dictionaries/tuples::
-
- users.insert().values([
- {"name": "some name"},
- {"name": "some other name"},
- {"name": "yet another name"},
- ])
-
- In the case of an :class:`.Update`
- construct, only the single dictionary/tuple form is accepted,
- else an exception is raised. It is also an exception case to
- attempt to mix the single-/multiple- value styles together,
- either through multiple :meth:`.ValuesBase.values` calls
- or by sending a list + kwargs at the same time.
-
- .. note::
-
- Passing a multiple values list is *not* the same
- as passing a multiple values list to the :meth:`.Connection.execute`
- method. Passing a list of parameter sets to :meth:`.ValuesBase.values`
- produces a construct of this form::
-
- INSERT INTO table (col1, col2, col3) VALUES
- (col1_0, col2_0, col3_0),
- (col1_1, col2_1, col3_1),
- ...
-
- whereas a multiple list passed to :meth:`.Connection.execute`
- has the effect of using the DBAPI
- `executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_
- method, which provides a high-performance system of invoking
- a single-row INSERT statement many times against a series
- of parameter sets. The "executemany" style is supported by
- all database backends, as it does not depend on a special SQL
- syntax.
-
- .. versionadded:: 0.8
- Support for multiple-VALUES INSERT statements.
-
-
- .. seealso::
-
- :ref:`inserts_and_updates` - SQL Expression
- Language Tutorial
-
- :func:`~.expression.insert` - produce an ``INSERT`` statement
-
- :func:`~.expression.update` - produce an ``UPDATE`` statement
-
- """
- if self.select is not None:
- raise exc.InvalidRequestError(
- "This construct already inserts from a SELECT")
- if self._has_multi_parameters and kwargs:
- raise exc.InvalidRequestError(
- "This construct already has multiple parameter sets.")
-
- if args:
- if len(args) > 1:
- raise exc.ArgumentError(
- "Only a single dictionary/tuple or list of "
- "dictionaries/tuples is accepted positionally.")
- v = args[0]
- else:
- v = {}
-
- if self.parameters is None:
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(v)
- else:
- if self._has_multi_parameters:
- self.parameters = list(self.parameters)
- p, self._has_multi_parameters = self._process_colparams(v)
- if not self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't mix single-values and multiple values "
- "formats in one statement")
-
- self.parameters.extend(p)
- else:
- self.parameters = self.parameters.copy()
- p, self._has_multi_parameters = self._process_colparams(v)
- if self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't mix single-values and multiple values "
- "formats in one statement")
- self.parameters.update(p)
-
- if kwargs:
- if self._has_multi_parameters:
- raise exc.ArgumentError(
- "Can't pass kwargs and multiple parameter sets "
- "simultaenously")
- else:
- self.parameters.update(kwargs)
-
-
-class Insert(ValuesBase):
- """Represent an INSERT construct.
-
- The :class:`.Insert` object is created using the
- :func:`~.expression.insert()` function.
-
- .. seealso::
-
- :ref:`coretutorial_insert_expressions`
-
- """
- __visit_name__ = 'insert'
-
- _supports_multi_parameters = True
-
- def __init__(self,
- table,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values, prefixes)
- self._bind = bind
- self.select = None
- self.inline = inline
- self._returning = returning
- self.kwargs = kwargs
-
- def get_children(self, **kwargs):
- if self.select is not None:
- return self.select,
- else:
- return ()
-
- @_generative
- def from_select(self, names, select):
- """Return a new :class:`.Insert` construct which represents
- an ``INSERT...FROM SELECT`` statement.
-
- e.g.::
-
- sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
- ins = table2.insert().from_select(['a', 'b'], sel)
-
- :param names: a sequence of string column names or :class:`.Column`
- objects representing the target columns.
- :param select: a :func:`.select` construct, :class:`.FromClause`
- or other construct which resolves into a :class:`.FromClause`,
- such as an ORM :class:`.Query` object, etc. The order of
- columns returned from this FROM clause should correspond to the
- order of columns sent as the ``names`` parameter; while this
- is not checked before passing along to the database, the database
- would normally raise an exception if these column lists don't
- correspond.
-
- .. note::
-
- Depending on backend, it may be necessary for the :class:`.Insert`
- statement to be constructed using the ``inline=True`` flag; this
- flag will prevent the implicit usage of ``RETURNING`` when the
- ``INSERT`` statement is rendered, which isn't supported on a backend
- such as Oracle in conjunction with an ``INSERT..SELECT`` combination::
-
- sel = select([table1.c.a, table1.c.b]).where(table1.c.c > 5)
- ins = table2.insert(inline=True).from_select(['a', 'b'], sel)
-
- .. versionadded:: 0.8.3
-
- """
- if self.parameters:
- raise exc.InvalidRequestError(
- "This construct already inserts value expressions")
-
- self.parameters, self._has_multi_parameters = \
- self._process_colparams(dict((n, null()) for n in names))
-
- self.select = _interpret_as_select(select)
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self.parameters = self.parameters.copy()
- if self.select is not None:
- self.select = _clone(self.select)
-
-
-class Update(ValuesBase):
- """Represent an Update construct.
-
- The :class:`.Update` object is created using the :func:`update()` function.
-
- """
- __visit_name__ = 'update'
-
- def __init__(self,
- table,
- whereclause,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values, prefixes)
- self._bind = bind
- self._returning = returning
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
- self.inline = inline
- self.kwargs = kwargs
-
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self._whereclause = clone(self._whereclause, **kw)
- self.parameters = self.parameters.copy()
-
- @_generative
- def where(self, whereclause):
- """return a new update() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
- @property
- def _extra_froms(self):
- # TODO: this could be made memoized
- # if the memoization is reset on each generative call.
- froms = []
- seen = set([self.table])
-
- if self._whereclause is not None:
- for item in _from_objects(self._whereclause):
- if not seen.intersection(item._cloned_set):
- froms.append(item)
- seen.update(item._cloned_set)
-
- return froms
-
-
-class Delete(UpdateBase):
- """Represent a DELETE construct.
-
- The :class:`.Delete` object is created using the :func:`delete()` function.
-
- """
-
- __visit_name__ = 'delete'
-
- def __init__(self,
- table,
- whereclause,
- bind=None,
- returning=None,
- prefixes=None,
- **kwargs):
- self._bind = bind
- self.table = _interpret_as_from(table)
- self._returning = returning
-
- if prefixes:
- self._setup_prefixes(prefixes)
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
-
- self.kwargs = kwargs
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- @_generative
- def where(self, whereclause):
- """Add the given WHERE clause to a newly returned delete construct."""
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
- def _copy_internals(self, clone=_clone, **kw):
- # TODO: coverage
- self._whereclause = clone(self._whereclause, **kw)
-
-
-class _IdentifiedClause(Executable, ClauseElement):
-
- __visit_name__ = 'identified'
- _execution_options = \
- Executable._execution_options.union({'autocommit': False})
- quote = None
-
- def __init__(self, ident):
- self.ident = ident
-
-
-class SavepointClause(_IdentifiedClause):
- __visit_name__ = 'savepoint'
-
-
-class RollbackToSavepointClause(_IdentifiedClause):
- __visit_name__ = 'rollback_to_savepoint'
-
-
-class ReleaseSavepointClause(_IdentifiedClause):
- __visit_name__ = 'release_savepoint'
# old names for compatibility
+_Executable = Executable
_BindParamClause = BindParameter
_Label = Label
_SelectBase = SelectBase
diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py
index 5e2d0792c..f300e2416 100644
--- a/lib/sqlalchemy/sql/functions.py
+++ b/lib/sqlalchemy/sql/functions.py
@@ -1,14 +1,19 @@
# sql/functions.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from .. import types as sqltypes, schema
-from .expression import (
- ClauseList, Function, _literal_as_binds, literal_column, _type_from_args,
- cast, extract
- )
+"""SQL function API, factories, and built-in functions.
+
+"""
+from . import sqltypes, schema
+from .base import Executable
+from .elements import ClauseList, Cast, Extract, _literal_as_binds, \
+ literal_column, _type_from_args, ColumnElement, _clone,\
+ Over, BindParameter
+from .selectable import FromClause, Select
+
from . import operators
from .visitors import VisitableType
from .. import util
@@ -29,6 +34,281 @@ def register_function(identifier, fn, package="_default"):
reg[identifier] = fn
+class FunctionElement(Executable, ColumnElement, FromClause):
+ """Base for SQL function-oriented constructs.
+
+ .. seealso::
+
+ :class:`.Function` - named SQL function.
+
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
+
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
+
+ """
+
+ packagenames = ()
+
+ def __init__(self, *clauses, **kwargs):
+ """Construct a :class:`.FunctionElement`.
+ """
+ args = [_literal_as_binds(c, self.name) for c in clauses]
+ self.clause_expr = ClauseList(
+ operator=operators.comma_op,
+ group_contents=True, *args).\
+ self_group()
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_function(self, multiparams, params)
+
+ @property
+ def columns(self):
+ """Fulfill the 'columns' contract of :class:`.ColumnElement`.
+
+ Returns a single-element list consisting of this object.
+
+ """
+ return [self]
+
+ @util.memoized_property
+ def clauses(self):
+ """Return the underlying :class:`.ClauseList` which contains
+ the arguments for this :class:`.FunctionElement`.
+
+ """
+ return self.clause_expr.element
+
+ def over(self, partition_by=None, order_by=None):
+ """Produce an OVER clause against this function.
+
+ Used against aggregate or so-called "window" functions,
+ for database backends that support window functions.
+
+ The expression::
+
+ func.row_number().over(order_by='x')
+
+ is shorthand for::
+
+ from sqlalchemy import over
+ over(func.row_number(), order_by='x')
+
+ See :func:`~.expression.over` for a full description.
+
+ .. versionadded:: 0.7
+
+ """
+ return Over(self, partition_by=partition_by, order_by=order_by)
+
+ @property
+ def _from_objects(self):
+ return self.clauses._from_objects
+
+ def get_children(self, **kwargs):
+ return self.clause_expr,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.clause_expr = clone(self.clause_expr, **kw)
+ self._reset_exported()
+ FunctionElement.clauses._reset(self)
+
+ def select(self):
+ """Produce a :func:`~.expression.select` construct
+ against this :class:`.FunctionElement`.
+
+ This is shorthand for::
+
+ s = select([function_element])
+
+ """
+ s = Select([self])
+ if self._execution_options:
+ s = s.execution_options(**self._execution_options)
+ return s
+
+ def scalar(self):
+ """Execute this :class:`.FunctionElement` against an embedded
+ 'bind' and return a scalar value.
+
+ This first calls :meth:`~.FunctionElement.select` to
+ produce a SELECT construct.
+
+ Note that :class:`.FunctionElement` can be passed to
+ the :meth:`.Connectable.scalar` method of :class:`.Connection`
+ or :class:`.Engine`.
+
+ """
+ return self.select().execute().scalar()
+
+ def execute(self):
+ """Execute this :class:`.FunctionElement` against an embedded
+ 'bind'.
+
+ This first calls :meth:`~.FunctionElement.select` to
+ produce a SELECT construct.
+
+ Note that :class:`.FunctionElement` can be passed to
+ the :meth:`.Connectable.execute` method of :class:`.Connection`
+ or :class:`.Engine`.
+
+ """
+ return self.select().execute()
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(None, obj, _compared_to_operator=operator,
+ _compared_to_type=self.type, unique=True)
+
+
+class _FunctionGenerator(object):
+ """Generate :class:`.Function` objects based on getattr calls."""
+
+ def __init__(self, **opts):
+ self.__names = []
+ self.opts = opts
+
+ def __getattr__(self, name):
+ # passthru __ attributes; fixes pydoc
+ if name.startswith('__'):
+ try:
+ return self.__dict__[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ elif name.endswith('_'):
+ name = name[0:-1]
+ f = _FunctionGenerator(**self.opts)
+ f.__names = list(self.__names) + [name]
+ return f
+
+ def __call__(self, *c, **kwargs):
+ o = self.opts.copy()
+ o.update(kwargs)
+
+ tokens = len(self.__names)
+
+ if tokens == 2:
+ package, fname = self.__names
+ elif tokens == 1:
+ package, fname = "_default", self.__names[0]
+ else:
+ package = None
+
+ if package is not None:
+ func = _registry[package].get(fname)
+ if func is not None:
+ return func(*c, **o)
+
+ return Function(self.__names[-1],
+ packagenames=self.__names[0:-1], *c, **o)
+
+
+func = _FunctionGenerator()
+"""Generate SQL function expressions.
+
+ :data:`.func` is a special object instance which generates SQL
+ functions based on name-based attributes, e.g.::
+
+ >>> print func.count(1)
+ count(:param_1)
+
+ The element is a column-oriented SQL element like any other, and is
+ used in that way::
+
+ >>> print select([func.count(table.c.id)])
+ SELECT count(sometable.id) FROM sometable
+
+ Any name can be given to :data:`.func`. If the function name is unknown to
+ SQLAlchemy, it will be rendered exactly as is. For common SQL functions
+ which SQLAlchemy is aware of, the name may be interpreted as a *generic
+ function* which will be compiled appropriately to the target database::
+
+ >>> print func.current_timestamp()
+ CURRENT_TIMESTAMP
+
+ To call functions which are present in dot-separated packages,
+ specify them in the same manner::
+
+ >>> print func.stats.yield_curve(5, 10)
+ stats.yield_curve(:yield_curve_1, :yield_curve_2)
+
+ SQLAlchemy can be made aware of the return type of functions to enable
+ type-specific lexical and result-based behavior. For example, to ensure
+ that a string-based function returns a Unicode value and is similarly
+ treated as a string in expressions, specify
+ :class:`~sqlalchemy.types.Unicode` as the type:
+
+ >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
+ ... func.my_string(u'there', type_=Unicode)
+ my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
+
+ The object returned by a :data:`.func` call is usually an instance of
+ :class:`.Function`.
+ This object meets the "column" interface, including comparison and labeling
+ functions. The object can also be passed the :meth:`~.Connectable.execute`
+ method of a :class:`.Connection` or :class:`.Engine`, where it will be
+ wrapped inside of a SELECT statement first::
+
+ print connection.execute(func.current_timestamp()).scalar()
+
+ In a few exception cases, the :data:`.func` accessor
+ will redirect a name to a built-in expression such as :func:`.cast`
+ or :func:`.extract`, as these names have well-known meaning
+ but are not exactly the same as "functions" from a SQLAlchemy
+ perspective.
+
+ .. versionadded:: 0.8 :data:`.func` can return non-function expression
+ constructs for common quasi-functional names like :func:`.cast`
+ and :func:`.extract`.
+
+ Functions which are interpreted as "generic" functions know how to
+ calculate their return type automatically. For a listing of known generic
+ functions, see :ref:`generic_functions`.
+
+"""
+
+modifier = _FunctionGenerator(group=False)
+
+class Function(FunctionElement):
+ """Describe a named SQL function.
+
+ See the superclass :class:`.FunctionElement` for a description
+ of public methods.
+
+ .. seealso::
+
+ :data:`.func` - namespace which produces registered or ad-hoc
+ :class:`.Function` instances.
+
+ :class:`.GenericFunction` - allows creation of registered function
+ types.
+
+ """
+
+ __visit_name__ = 'function'
+
+ def __init__(self, name, *clauses, **kw):
+ """Construct a :class:`.Function`.
+
+ The :data:`.func` construct is normally used to construct
+ new :class:`.Function` instances.
+
+ """
+ self.packagenames = kw.pop('packagenames', None) or []
+ self.name = name
+ self._bind = kw.get('bind', None)
+ self.type = sqltypes.to_instance(kw.get('type_', None))
+
+ FunctionElement.__init__(self, *clauses, **kw)
+
+ def _bind_param(self, operator, obj):
+ return BindParameter(self.name, obj,
+ _compared_to_operator=operator,
+ _compared_to_type=self.type,
+ unique=True)
+
+
class _GenericMeta(VisitableType):
def __init__(cls, clsname, bases, clsdict):
cls.name = name = clsdict.get('name', clsname)
@@ -128,8 +408,8 @@ class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
kwargs.pop("type_", None) or getattr(self, 'type', None))
-register_function("cast", cast)
-register_function("extract", extract)
+register_function("cast", Cast)
+register_function("extract", Extract)
class next_value(GenericFunction):
diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py
index 128442158..d7ec977aa 100644
--- a/lib/sqlalchemy/sql/operators.py
+++ b/lib/sqlalchemy/sql/operators.py
@@ -1,5 +1,5 @@
# sql/operators.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -27,8 +27,11 @@ else:
class Operators(object):
"""Base of comparison and logical operators.
- Implements base methods :meth:`operate` and :meth:`reverse_operate`,
- as well as :meth:`__and__`, :meth:`__or__`, :meth:`__invert__`.
+ Implements base methods :meth:`~sqlalchemy.sql.operators.Operators.operate` and
+ :meth:`~sqlalchemy.sql.operators.Operators.reverse_operate`, as well as
+ :meth:`~sqlalchemy.sql.operators.Operators.__and__`,
+ :meth:`~sqlalchemy.sql.operators.Operators.__or__`,
+ :meth:`~sqlalchemy.sql.operators.Operators.__invert__`.
Usually is used via its most common subclass
:class:`.ColumnOperators`.
@@ -654,6 +657,12 @@ def exists():
raise NotImplementedError()
+def istrue(a):
+ raise NotImplementedError()
+
+def isfalse(a):
+ raise NotImplementedError()
+
def is_(a, b):
return a.is_(b)
@@ -779,6 +788,7 @@ parenthesize (a op b).
"""
+_asbool = util.symbol('_asbool', canonical=-10)
_smallest = util.symbol('_smallest', canonical=-100)
_largest = util.symbol('_largest', canonical=100)
@@ -816,12 +826,19 @@ _PRECEDENCE = {
between_op: 5,
distinct_op: 5,
inv: 5,
+ istrue: 5,
+ isfalse: 5,
and_: 3,
or_: 2,
comma_op: -1,
- collate: 7,
+
+ desc_op: 3,
+ asc_op: 3,
+ collate: 4,
+
as_: -1,
exists: 0,
+ _asbool: -10,
_smallest: _smallest,
_largest: _largest
}
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
new file mode 100644
index 000000000..ba38b5070
--- /dev/null
+++ b/lib/sqlalchemy/sql/schema.py
@@ -0,0 +1,3273 @@
+# sql/schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The schema module provides the building blocks for database metadata.
+
+Each element within this module describes a database entity which can be
+created and dropped, or is otherwise part of such an entity. Examples include
+tables, columns, sequences, and indexes.
+
+All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
+defined in this module they are intended to be agnostic of any vendor-specific
+constructs.
+
+A collection of entities are grouped into a unit called
+:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
+schema elements, and can also be associated with an actual database connection
+such that operations involving the contained elements can contact the database
+as needed.
+
+Two of the elements here also build upon their "syntactic" counterparts, which
+are defined in :class:`~sqlalchemy.sql.expression.`, specifically
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
+Since these objects are part of the SQL expression language, they are usable
+as components in SQL expressions.
+
+"""
+
+import inspect
+from .. import exc, util, event, inspection
+from .base import SchemaEventTarget, DialectKWArgs
+from . import visitors
+from . import type_api
+from .base import _bind_or_error, ColumnCollection
+from .elements import ClauseElement, ColumnClause, _truncated_label, \
+ _as_truncated, TextClause, _literal_as_text,\
+ ColumnElement, _find_columns, quoted_name
+from .selectable import TableClause
+import collections
+import sqlalchemy
+from . import ddl
+
+RETAIN_SCHEMA = util.symbol('retain_schema')
+
+
+def _get_table_key(name, schema):
+ if schema is None:
+ return name
+ else:
+ return schema + "." + name
+
+
+
+@inspection._self_inspects
+class SchemaItem(SchemaEventTarget, visitors.Visitable):
+ """Base class for items that define a database schema."""
+
+ __visit_name__ = 'schema_item'
+
+ def _execute_on_connection(self, connection, multiparams, params):
+ return connection._execute_default(self, multiparams, params)
+
+ def _init_items(self, *args):
+ """Initialize the list of child items for this SchemaItem."""
+
+ for item in args:
+ if item is not None:
+ item._set_parent_with_dispatch(self)
+
+ def get_children(self, **kwargs):
+ """used to allow SchemaVisitor access"""
+ return []
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+ @property
+ @util.deprecated('0.9', 'Use ``<obj>.name.quote``')
+ def quote(self):
+ """Return the value of the ``quote`` flag passed
+ to this schema object, for those schema items which
+ have a ``name`` field.
+
+ """
+
+ return self.name.quote
+
+ @util.memoized_property
+ def info(self):
+ """Info dictionary associated with the object, allowing user-defined
+ data to be associated with this :class:`.SchemaItem`.
+
+ The dictionary is automatically generated when first accessed.
+ It can also be specified in the constructor of some objects,
+ such as :class:`.Table` and :class:`.Column`.
+
+ """
+ return {}
+
+ def _schema_item_copy(self, schema_item):
+ if 'info' in self.__dict__:
+ schema_item.info = self.info.copy()
+ schema_item.dispatch._update(self.dispatch)
+ return schema_item
+
+
+class Table(DialectKWArgs, SchemaItem, TableClause):
+ """Represent a table in a database.
+
+ e.g.::
+
+ mytable = Table("mytable", metadata,
+ Column('mytable_id', Integer, primary_key=True),
+ Column('value', String(50))
+ )
+
+ The :class:`.Table` object constructs a unique instance of itself based
+ on its name and optional schema name within the given
+ :class:`.MetaData` object. Calling the :class:`.Table`
+ constructor with the same name and same :class:`.MetaData` argument
+ a second time will return the *same* :class:`.Table` object - in this way
+ the :class:`.Table` constructor acts as a registry function.
+
+ .. seealso::
+
+ :ref:`metadata_describing` - Introduction to database metadata
+
+ Constructor arguments are as follows:
+
+ :param name: The name of this table as represented in the database.
+
+ The table name, along with the value of the ``schema`` parameter,
+ forms a key which uniquely identifies this :class:`.Table` within
+ the owning :class:`.MetaData` collection.
+ Additional calls to :class:`.Table` with the same name, metadata,
+ and schema name will return the same :class:`.Table` object.
+
+ Names which contain no upper case characters
+ will be treated as case insensitive names, and will not be quoted
+ unless they are a reserved word or contain special characters.
+ A name with any number of upper case characters is considered
+ to be case sensitive, and will be sent as quoted.
+
+ To enable unconditional quoting for the table name, specify the flag
+ ``quote=True`` to the constructor, or use the :class:`.quoted_name`
+ construct to specify the name.
+
+ :param metadata: a :class:`.MetaData` object which will contain this
+ table. The metadata is used as a point of association of this table
+ with other tables which are referenced via foreign key. It also
+ may be used to associate this table with a particular
+ :class:`.Connectable`.
+
+ :param \*args: Additional positional arguments are used primarily
+ to add the list of :class:`.Column` objects contained within this
+ table. Similar to the style of a CREATE TABLE statement, other
+ :class:`.SchemaItem` constructs may be added here, including
+ :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`.
+
+ :param autoload: Defaults to False: the Columns for this table should
+ be reflected from the database. Usually there will be no Column
+ objects in the constructor if this property is set.
+
+ :param autoload_replace: If ``True``, when using ``autoload=True``
+ and ``extend_existing=True``,
+ replace ``Column`` objects already present in the ``Table`` that's
+ in the ``MetaData`` registry with
+ what's reflected. Otherwise, all existing columns will be
+ excluded from the reflection process. Note that this does
+ not impact ``Column`` objects specified in the same call to ``Table``
+ which includes ``autoload``, those always take precedence.
+ Defaults to ``True``.
+
+ .. versionadded:: 0.7.5
+
+ :param autoload_with: If autoload==True, this is an optional Engine
+ or Connection instance to be used for the table reflection. If
+ ``None``, the underlying MetaData's bound connectable will be used.
+
+ :param extend_existing: When ``True``, indicates that if this
+ :class:`.Table` is already present in the given :class:`.MetaData`,
+ apply further arguments within the constructor to the existing
+ :class:`.Table`.
+
+ If ``extend_existing`` or ``keep_existing`` are not set, an error is
+ raised if additional table modifiers are specified when
+ the given :class:`.Table` is already present in the :class:`.MetaData`.
+
+ .. versionchanged:: 0.7.4
+ ``extend_existing`` will work in conjunction
+ with ``autoload=True`` to run a new reflection operation against
+ the database; new :class:`.Column` objects will be produced
+ from database metadata to replace those existing with the same
+ name, and additional :class:`.Column` objects not present
+ in the :class:`.Table` will be added.
+
+ As is always the case with ``autoload=True``, :class:`.Column`
+ objects can be specified in the same :class:`.Table` constructor,
+ which will take precedence. I.e.::
+
+ Table("mytable", metadata,
+ Column('y', Integer),
+ extend_existing=True,
+ autoload=True,
+ autoload_with=engine
+ )
+
+ The above will overwrite all columns within ``mytable`` which
+ are present in the database, except for ``y`` which will be used as is
+ from the above definition. If the ``autoload_replace`` flag
+ is set to False, no existing columns will be replaced.
+
+ :param implicit_returning: True by default - indicates that
+ RETURNING can be used by default to fetch newly inserted primary key
+ values, for backends which support this. Note that
+ create_engine() also provides an implicit_returning flag.
+
+ :param include_columns: A list of strings indicating a subset of
+ columns to be loaded via the ``autoload`` operation; table columns who
+ aren't present in this list will not be represented on the resulting
+ ``Table`` object. Defaults to ``None`` which indicates all columns
+ should be reflected.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.SchemaItem.info` attribute of this object.
+
+ :param keep_existing: When ``True``, indicates that if this Table
+ is already present in the given :class:`.MetaData`, ignore
+ further arguments within the constructor to the existing
+ :class:`.Table`, and return the :class:`.Table` object as
+ originally created. This is to allow a function that wishes
+ to define a new :class:`.Table` on first call, but on
+ subsequent calls will return the same :class:`.Table`,
+ without any of the declarations (particularly constraints)
+ being applied a second time. Also see extend_existing.
+
+ If extend_existing or keep_existing are not set, an error is
+ raised if additional table modifiers are specified when
+ the given :class:`.Table` is already present in the :class:`.MetaData`.
+
+ :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
+ which will be passed to :func:`.event.listen` upon construction.
+ This alternate hook to :func:`.event.listen` allows the establishment
+ of a listener function specific to this :class:`.Table` before
+ the "autoload" process begins. Particularly useful for
+ the :meth:`.DDLEvents.column_reflect` event::
+
+ def listen_for_reflect(table, column_info):
+ "handle the column reflection event"
+ # ...
+
+ t = Table(
+ 'sometable',
+ autoload=True,
+ listeners=[
+ ('column_reflect', listen_for_reflect)
+ ])
+
+ :param mustexist: When ``True``, indicates that this Table must already
+ be present in the given :class:`.MetaData` collection, else
+ an exception is raised.
+
+ :param prefixes:
+ A list of strings to insert after CREATE in the CREATE TABLE
+ statement. They will be separated by spaces.
+
+ :param quote: Force quoting of this table's name on or off, corresponding
+ to ``True`` or ``False``. When left at its default of ``None``,
+ the column identifier will be quoted according to whether the name is
+ case sensitive (identifiers with at least one upper case character are
+ treated as case sensitive), or if it's a reserved word. This flag
+ is only needed to force quoting of a reserved word which is not known
+ by the SQLAlchemy dialect.
+
+ :param quote_schema: same as 'quote' but applies to the schema identifier.
+
+ :param schema: The schema name for this table, which is required if
+ the table resides in a schema other than the default selected schema
+ for the engine's database connection. Defaults to ``None``.
+
+ The quoting rules for the schema name are the same as those for the
+ ``name`` parameter, in that quoting is applied for reserved words or
+ case-sensitive names; to enable unconditional quoting for the
+ schema name, specify the flag
+ ``quote_schema=True`` to the constructor, or use the :class:`.quoted_name`
+ construct to specify the name.
+
+ :param useexisting: Deprecated. Use extend_existing.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+
+ __visit_name__ = 'table'
+
+ def __new__(cls, *args, **kw):
+ if not args:
+ # python3k pickle seems to call this
+ return object.__new__(cls)
+
+ try:
+ name, metadata, args = args[0], args[1], args[2:]
+ except IndexError:
+ raise TypeError("Table() takes at least two arguments")
+
+ schema = kw.get('schema', None)
+ if schema is None:
+ schema = metadata.schema
+ keep_existing = kw.pop('keep_existing', False)
+ extend_existing = kw.pop('extend_existing', False)
+ if 'useexisting' in kw:
+ msg = "useexisting is deprecated. Use extend_existing."
+ util.warn_deprecated(msg)
+ if extend_existing:
+ msg = "useexisting is synonymous with extend_existing."
+ raise exc.ArgumentError(msg)
+ extend_existing = kw.pop('useexisting', False)
+
+ if keep_existing and extend_existing:
+ msg = "keep_existing and extend_existing are mutually exclusive."
+ raise exc.ArgumentError(msg)
+
+ mustexist = kw.pop('mustexist', False)
+ key = _get_table_key(name, schema)
+ if key in metadata.tables:
+ if not keep_existing and not extend_existing and bool(args):
+ raise exc.InvalidRequestError(
+ "Table '%s' is already defined for this MetaData "
+ "instance. Specify 'extend_existing=True' "
+ "to redefine "
+ "options and columns on an "
+ "existing Table object." % key)
+ table = metadata.tables[key]
+ if extend_existing:
+ table._init_existing(*args, **kw)
+ return table
+ else:
+ if mustexist:
+ raise exc.InvalidRequestError(
+ "Table '%s' not defined" % (key))
+ table = object.__new__(cls)
+ table.dispatch.before_parent_attach(table, metadata)
+ metadata._add_table(name, schema, table)
+ try:
+ table._init(name, metadata, *args, **kw)
+ table.dispatch.after_parent_attach(table, metadata)
+ return table
+ except:
+ #metadata._remove_table(name, schema)
+ raise
+
+
+ @property
+ @util.deprecated('0.9', 'Use ``table.schema.quote``')
+ def quote_schema(self):
+ """Return the value of the ``quote_schema`` flag passed
+ to this :class:`.Table`.
+ """
+
+ return self.schema.quote
+
+ def __init__(self, *args, **kw):
+ """Constructor for :class:`~.schema.Table`.
+
+ This method is a no-op. See the top-level
+ documentation for :class:`~.schema.Table`
+ for constructor arguments.
+
+ """
+ # __init__ is overridden to prevent __new__ from
+ # calling the superclass constructor.
+
+ def _init(self, name, metadata, *args, **kwargs):
+ super(Table, self).__init__(quoted_name(name, kwargs.pop('quote', None)))
+ self.metadata = metadata
+
+ self.schema = kwargs.pop('schema', None)
+ if self.schema is None:
+ self.schema = metadata.schema
+ else:
+ quote_schema = kwargs.pop('quote_schema', None)
+ self.schema = quoted_name(self.schema, quote_schema)
+
+ self.indexes = set()
+ self.constraints = set()
+ self._columns = ColumnCollection()
+ PrimaryKeyConstraint()._set_parent_with_dispatch(self)
+ self.foreign_keys = set()
+ self._extra_dependencies = set()
+ if self.schema is not None:
+ self.fullname = "%s.%s" % (self.schema, self.name)
+ else:
+ self.fullname = self.name
+
+ autoload = kwargs.pop('autoload', False)
+ autoload_with = kwargs.pop('autoload_with', None)
+ # this argument is only used with _init_existing()
+ kwargs.pop('autoload_replace', True)
+ include_columns = kwargs.pop('include_columns', None)
+
+ self.implicit_returning = kwargs.pop('implicit_returning', True)
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+ if 'listeners' in kwargs:
+ listeners = kwargs.pop('listeners')
+ for evt, fn in listeners:
+ event.listen(self, evt, fn)
+
+ self._prefixes = kwargs.pop('prefixes', [])
+
+ self._extra_kwargs(**kwargs)
+
+ # load column definitions from the database if 'autoload' is defined
+ # we do it after the table is in the singleton dictionary to support
+ # circular foreign keys
+ if autoload:
+ self._autoload(metadata, autoload_with, include_columns)
+
+ # initialize all the column, etc. objects. done after reflection to
+ # allow user-overrides
+ self._init_items(*args)
+
+ def _autoload(self, metadata, autoload_with, include_columns,
+ exclude_columns=()):
+
+ if autoload_with:
+ autoload_with.run_callable(
+ autoload_with.dialect.reflecttable,
+ self, include_columns, exclude_columns
+ )
+ else:
+ bind = _bind_or_error(metadata,
+ msg="No engine is bound to this Table's MetaData. "
+ "Pass an engine to the Table via "
+ "autoload_with=<someengine>, "
+ "or associate the MetaData with an engine via "
+ "metadata.bind=<someengine>")
+ bind.run_callable(
+ bind.dialect.reflecttable,
+ self, include_columns, exclude_columns
+ )
+
+ @property
+ def _sorted_constraints(self):
+ """Return the set of constraints as a list, sorted by creation
+ order.
+
+ """
+ return sorted(self.constraints, key=lambda c: c._creation_order)
+
+ def _init_existing(self, *args, **kwargs):
+ autoload = kwargs.pop('autoload', False)
+ autoload_with = kwargs.pop('autoload_with', None)
+ autoload_replace = kwargs.pop('autoload_replace', True)
+ schema = kwargs.pop('schema', None)
+ if schema and schema != self.schema:
+ raise exc.ArgumentError(
+ "Can't change schema of existing table from '%s' to '%s'",
+ (self.schema, schema))
+
+ include_columns = kwargs.pop('include_columns', None)
+
+ if include_columns is not None:
+ for c in self.c:
+ if c.name not in include_columns:
+ self._columns.remove(c)
+
+ for key in ('quote', 'quote_schema'):
+ if key in kwargs:
+ raise exc.ArgumentError(
+ "Can't redefine 'quote' or 'quote_schema' arguments")
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+
+ if autoload:
+ if not autoload_replace:
+ exclude_columns = [c.name for c in self.c]
+ else:
+ exclude_columns = ()
+ self._autoload(
+ self.metadata, autoload_with, include_columns, exclude_columns)
+
+ self._extra_kwargs(**kwargs)
+ self._init_items(*args)
+
+ def _extra_kwargs(self, **kwargs):
+ self._validate_dialect_kwargs(kwargs)
+
+ def _init_collections(self):
+ pass
+
+ @util.memoized_property
+ def _autoincrement_column(self):
+ for col in self.primary_key:
+ if col.autoincrement and \
+ col.type._type_affinity is not None and \
+ issubclass(col.type._type_affinity, type_api.INTEGERTYPE._type_affinity) and \
+ (not col.foreign_keys or col.autoincrement == 'ignore_fk') and \
+ isinstance(col.default, (type(None), Sequence)) and \
+ (col.server_default is None or col.server_default.reflected):
+ return col
+
+ @property
+ def key(self):
+ """Return the 'key' for this :class:`.Table`.
+
+ This value is used as the dictionary key within the
+ :attr:`.MetaData.tables` collection. It is typically the same
+ as that of :attr:`.Table.name` for a table with no :attr:`.Table.schema`
+ set; otherwise it is typically of the form ``schemaname.tablename``.
+
+ """
+ return _get_table_key(self.name, self.schema)
+
+ def __repr__(self):
+ return "Table(%s)" % ', '.join(
+ [repr(self.name)] + [repr(self.metadata)] +
+ [repr(x) for x in self.columns] +
+ ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']])
+
+ def __str__(self):
+ return _get_table_key(self.description, self.schema)
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this Table."""
+
+ return self.metadata and self.metadata.bind or None
+
+ def add_is_dependent_on(self, table):
+ """Add a 'dependency' for this Table.
+
+ This is another Table object which must be created
+ first before this one can, or dropped after this one.
+
+ Usually, dependencies between tables are determined via
+ ForeignKey objects. However, for other situations that
+ create dependencies outside of foreign keys (rules, inheriting),
+ this method can manually establish such a link.
+
+ """
+ self._extra_dependencies.add(table)
+
+ def append_column(self, column):
+ """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`.
+
+ The "key" of the newly added :class:`~.schema.Column`, i.e. the
+ value of its ``.key`` attribute, will then be available
+ in the ``.c`` collection of this :class:`~.schema.Table`, and the
+ column definition will be included in any CREATE TABLE, SELECT,
+ UPDATE, etc. statements generated from this :class:`~.schema.Table`
+ construct.
+
+ Note that this does **not** change the definition of the table
+ as it exists within any underlying database, assuming that
+ table has already been created in the database. Relational
+ databases support the addition of columns to existing tables
+ using the SQL ALTER command, which would need to be
+ emitted for an already-existing table that doesn't contain
+ the newly added column.
+
+ """
+
+ column._set_parent_with_dispatch(self)
+
+ def append_constraint(self, constraint):
+ """Append a :class:`~.schema.Constraint` to this
+ :class:`~.schema.Table`.
+
+ This has the effect of the constraint being included in any
+ future CREATE TABLE statement, assuming specific DDL creation
+ events have not been associated with the given
+ :class:`~.schema.Constraint` object.
+
+ Note that this does **not** produce the constraint within the
+ relational database automatically, for a table that already exists
+ in the database. To add a constraint to an
+ existing relational database table, the SQL ALTER command must
+ be used. SQLAlchemy also provides the
+ :class:`.AddConstraint` construct which can produce this SQL when
+ invoked as an executable clause.
+
+ """
+
+ constraint._set_parent_with_dispatch(self)
+
+ def append_ddl_listener(self, event_name, listener):
+ """Append a DDL event listener to this ``Table``.
+
+ .. deprecated:: 0.7
+ See :class:`.DDLEvents`.
+
+ """
+
+ def adapt_listener(target, connection, **kw):
+ listener(event_name, target, connection)
+
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
+
+ def _set_parent(self, metadata):
+ metadata._add_table(self.name, self.schema, self)
+ self.metadata = metadata
+
+ def get_children(self, column_collections=True,
+ schema_visitor=False, **kw):
+ if not schema_visitor:
+ return TableClause.get_children(
+ self, column_collections=column_collections, **kw)
+ else:
+ if column_collections:
+ return list(self.columns)
+ else:
+ return []
+
+ def exists(self, bind=None):
+ """Return True if this table exists."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ return bind.run_callable(bind.dialect.has_table,
+ self.name, schema=self.schema)
+
+ def create(self, bind=None, checkfirst=False):
+ """Issue a ``CREATE`` statement for this
+ :class:`.Table`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.create_all`.
+
+ """
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=False):
+ """Issue a ``DROP`` statement for this
+ :class:`.Table`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.drop_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst)
+
+ def tometadata(self, metadata, schema=RETAIN_SCHEMA):
+ """Return a copy of this :class:`.Table` associated with a different
+ :class:`.MetaData`.
+
+ E.g.::
+
+ some_engine = create_engine("sqlite:///some.db")
+
+ # create two metadata
+ meta1 = MetaData()
+ meta2 = MetaData()
+
+ # load 'users' from the sqlite engine
+ users_table = Table('users', meta1, autoload=True,
+ autoload_with=some_engine)
+
+ # create the same Table object for the plain metadata
+ users_table_2 = users_table.tometadata(meta2)
+
+ :param metadata: Target :class:`.MetaData` object.
+ :param schema: Optional string name of a target schema, or
+ ``None`` for no schema. The :class:`.Table` object will be
+ given this schema name upon copy. Defaults to the special
+ symbol :attr:`.RETAIN_SCHEMA` which indicates no change should be
+ made to the schema name of the resulting :class:`.Table`.
+
+ """
+
+ if schema is RETAIN_SCHEMA:
+ schema = self.schema
+ elif schema is None:
+ schema = metadata.schema
+ key = _get_table_key(self.name, schema)
+ if key in metadata.tables:
+ util.warn("Table '%s' already exists within the given "
+ "MetaData - not copying." % self.description)
+ return metadata.tables[key]
+
+ args = []
+ for c in self.columns:
+ args.append(c.copy(schema=schema))
+ table = Table(
+ self.name, metadata, schema=schema,
+ *args, **self.kwargs
+ )
+ for c in self.constraints:
+ table.append_constraint(c.copy(schema=schema, target_table=table))
+
+ for index in self.indexes:
+ # skip indexes that would be generated
+ # by the 'index' flag on Column
+ if len(index.columns) == 1 and \
+ list(index.columns)[0].index:
+ continue
+ Index(index.name,
+ unique=index.unique,
+ *[table.c[col] for col in index.columns.keys()],
+ **index.kwargs)
+ return self._schema_item_copy(table)
+
+
+class Column(SchemaItem, ColumnClause):
+ """Represents a column in a database table."""
+
+ __visit_name__ = 'column'
+
+ def __init__(self, *args, **kwargs):
+ """
+ Construct a new ``Column`` object.
+
+ :param name: The name of this column as represented in the database.
+ This argument may be the first positional argument, or specified
+ via keyword.
+
+ Names which contain no upper case characters
+ will be treated as case insensitive names, and will not be quoted
+ unless they are a reserved word. Names with any number of upper
+ case characters will be quoted and sent exactly. Note that this
+ behavior applies even for databases which standardize upper
+ case names as case insensitive such as Oracle.
+
+ The name field may be omitted at construction time and applied
+ later, at any time before the Column is associated with a
+ :class:`.Table`. This is to support convenient
+ usage within the :mod:`~sqlalchemy.ext.declarative` extension.
+
+ :param type\_: The column's type, indicated using an instance which
+ subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
+ are required for the type, the class of the type can be sent
+ as well, e.g.::
+
+ # use a type with arguments
+ Column('data', String(50))
+
+ # use no arguments
+ Column('level', Integer)
+
+ The ``type`` argument may be the second positional argument
+ or specified by keyword.
+
+ If the ``type`` is ``None`` or is omitted, it will first default to the special
+ type :class:`.NullType`. If and when this :class:`.Column` is
+ made to refer to another column using :class:`.ForeignKey`
+ and/or :class:`.ForeignKeyConstraint`, the type of the remote-referenced
+ column will be copied to this column as well, at the moment that
+ the foreign key is resolved against that remote :class:`.Column`
+ object.
+
+ .. versionchanged:: 0.9.0
+ Support for propagation of type to a :class:`.Column` from its
+ :class:`.ForeignKey` object has been improved and should be
+ more reliable and timely.
+
+ :param \*args: Additional positional arguments include various
+ :class:`.SchemaItem` derived constructs which will be applied
+ as options to the column. These include instances of
+ :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`,
+ and :class:`.Sequence`. In some cases an equivalent keyword
+ argument is available such as ``server_default``, ``default``
+ and ``unique``.
+
+ :param autoincrement: This flag may be set to ``False`` to
+ indicate an integer primary key column that should not be
+ considered to be the "autoincrement" column, that is
+ the integer primary key column which generates values
+ implicitly upon INSERT and whose value is usually returned
+ via the DBAPI cursor.lastrowid attribute. It defaults
+ to ``True`` to satisfy the common use case of a table
+ with a single integer primary key column. If the table
+ has a composite primary key consisting of more than one
+ integer column, set this flag to True only on the
+ column that should be considered "autoincrement".
+
+ The setting *only* has an effect for columns which are:
+
+ * Integer derived (i.e. INT, SMALLINT, BIGINT).
+
+ * Part of the primary key
+
+ * Are not referenced by any foreign keys, unless
+ the value is specified as ``'ignore_fk'``
+
+ .. versionadded:: 0.7.4
+
+ * have no server side or client side defaults (with the exception
+ of Postgresql SERIAL).
+
+ The setting has these two effects on columns that meet the
+ above criteria:
+
+ * DDL issued for the column will include database-specific
+ keywords intended to signify this column as an
+ "autoincrement" column, such as AUTO INCREMENT on MySQL,
+ SERIAL on Postgresql, and IDENTITY on MS-SQL. It does
+ *not* issue AUTOINCREMENT for SQLite since this is a
+ special SQLite flag that is not required for autoincrementing
+ behavior. See the SQLite dialect documentation for
+ information on SQLite's AUTOINCREMENT.
+
+ * The column will be considered to be available as
+ cursor.lastrowid or equivalent, for those dialects which
+ "post fetch" newly inserted identifiers after a row has
+ been inserted (SQLite, MySQL, MS-SQL). It does not have
+ any effect in this regard for databases that use sequences
+ to generate primary key identifiers (i.e. Firebird, Postgresql,
+ Oracle).
+
+ .. versionchanged:: 0.7.4
+ ``autoincrement`` accepts a special value ``'ignore_fk'``
+ to indicate that autoincrementing status regardless of foreign
+ key references. This applies to certain composite foreign key
+ setups, such as the one demonstrated in the ORM documentation
+ at :ref:`post_update`.
+
+ :param default: A scalar, Python callable, or
+ :class:`.ColumnElement` expression representing the
+ *default value* for this column, which will be invoked upon insert
+ if this column is otherwise not specified in the VALUES clause of
+ the insert. This is a shortcut to using :class:`.ColumnDefault` as
+ a positional argument; see that class for full detail on the
+ structure of the argument.
+
+ Contrast this argument to ``server_default`` which creates a
+ default generator on the database side.
+
+ :param doc: optional String that can be used by the ORM or similar
+ to document attributes. This attribute does not render SQL
+ comments (a future attribute 'comment' will achieve that).
+
+ :param key: An optional string identifier which will identify this
+ ``Column`` object on the :class:`.Table`. When a key is provided,
+ this is the only identifier referencing the ``Column`` within the
+ application, including ORM attribute mapping; the ``name`` field
+ is used only when rendering SQL.
+
+ :param index: When ``True``, indicates that the column is indexed.
+ This is a shortcut for using a :class:`.Index` construct on the
+ table. To specify indexes with explicit names or indexes that
+ contain multiple columns, use the :class:`.Index` construct
+ instead.
+
+ :param info: Optional data dictionary which will be populated into the
+ :attr:`.SchemaItem.info` attribute of this object.
+
+ :param nullable: If set to the default of ``True``, indicates the
+ column will be rendered as allowing NULL, else it's rendered as
+ NOT NULL. This parameter is only used when issuing CREATE TABLE
+ statements.
+
+ :param onupdate: A scalar, Python callable, or
+ :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
+ default value to be applied to the column within UPDATE
+ statements, which wil be invoked upon update if this column is not
+ present in the SET clause of the update. This is a shortcut to
+ using :class:`.ColumnDefault` as a positional argument with
+ ``for_update=True``.
+
+ :param primary_key: If ``True``, marks this column as a primary key
+ column. Multiple columns can have this flag set to specify
+ composite primary keys. As an alternative, the primary key of a
+ :class:`.Table` can be specified via an explicit
+ :class:`.PrimaryKeyConstraint` object.
+
+ :param server_default: A :class:`.FetchedValue` instance, str, Unicode
+ or :func:`~sqlalchemy.sql.expression.text` construct representing
+ the DDL DEFAULT value for the column.
+
+ String types will be emitted as-is, surrounded by single quotes::
+
+ Column('x', Text, server_default="val")
+
+ x TEXT DEFAULT 'val'
+
+ A :func:`~sqlalchemy.sql.expression.text` expression will be
+ rendered as-is, without quotes::
+
+ Column('y', DateTime, server_default=text('NOW()'))
+
+ y DATETIME DEFAULT NOW()
+
+ Strings and text() will be converted into a :class:`.DefaultClause`
+ object upon initialization.
+
+ Use :class:`.FetchedValue` to indicate that an already-existing
+ column will generate a default value on the database side which
+ will be available to SQLAlchemy for post-fetch after inserts. This
+ construct does not specify any DDL and the implementation is left
+ to the database, such as via a trigger.
+
+ :param server_onupdate: A :class:`.FetchedValue` instance
+ representing a database-side default generation function. This
+ indicates to SQLAlchemy that a newly generated value will be
+ available after updates. This construct does not specify any DDL
+ and the implementation is left to the database, such as via a
+ trigger.
+
+ :param quote: Force quoting of this column's name on or off,
+ corresponding to ``True`` or ``False``. When left at its default
+ of ``None``, the column identifier will be quoted according to
+ whether the name is case sensitive (identifiers with at least one
+ upper case character are treated as case sensitive), or if it's a
+ reserved word. This flag is only needed to force quoting of a
+ reserved word which is not known by the SQLAlchemy dialect.
+
+ :param unique: When ``True``, indicates that this column contains a
+ unique constraint, or if ``index`` is ``True`` as well, indicates
+ that the :class:`.Index` should be created with the unique flag.
+ To specify multiple columns in the constraint/index or to specify
+ an explicit name, use the :class:`.UniqueConstraint` or
+ :class:`.Index` constructs explicitly.
+
+ :param system: When ``True``, indicates this is a "system" column,
+ that is a column which is automatically made available by the
+ database, and should not be included in the columns list for a
+ ``CREATE TABLE`` statement.
+
+ For more elaborate scenarios where columns should be conditionally
+ rendered differently on different backends, consider custom
+ compilation rules for :class:`.CreateColumn`.
+
+ ..versionadded:: 0.8.3 Added the ``system=True`` parameter to
+ :class:`.Column`.
+
+ """
+
+ name = kwargs.pop('name', None)
+ type_ = kwargs.pop('type_', None)
+ args = list(args)
+ if args:
+ if isinstance(args[0], util.string_types):
+ if name is not None:
+ raise exc.ArgumentError(
+ "May not pass name positionally and as a keyword.")
+ name = args.pop(0)
+ if args:
+ coltype = args[0]
+
+ if hasattr(coltype, "_sqla_type"):
+ if type_ is not None:
+ raise exc.ArgumentError(
+ "May not pass type_ positionally and as a keyword.")
+ type_ = args.pop(0)
+
+ if name is not None:
+ name = quoted_name(name, kwargs.pop('quote', None))
+ elif "quote" in kwargs:
+ raise exc.ArgumentError("Explicit 'name' is required when "
+ "sending 'quote' argument")
+
+ super(Column, self).__init__(name, type_)
+ self.key = kwargs.pop('key', name)
+ self.primary_key = kwargs.pop('primary_key', False)
+ self.nullable = kwargs.pop('nullable', not self.primary_key)
+ self.default = kwargs.pop('default', None)
+ self.server_default = kwargs.pop('server_default', None)
+ self.server_onupdate = kwargs.pop('server_onupdate', None)
+
+ # these default to None because .index and .unique is *not*
+ # an informational flag about Column - there can still be an
+ # Index or UniqueConstraint referring to this Column.
+ self.index = kwargs.pop('index', None)
+ self.unique = kwargs.pop('unique', None)
+
+ self.system = kwargs.pop('system', False)
+ self.doc = kwargs.pop('doc', None)
+ self.onupdate = kwargs.pop('onupdate', None)
+ self.autoincrement = kwargs.pop('autoincrement', True)
+ self.constraints = set()
+ self.foreign_keys = set()
+
+ # check if this Column is proxying another column
+ if '_proxies' in kwargs:
+ self._proxies = kwargs.pop('_proxies')
+ # otherwise, add DDL-related events
+ elif isinstance(self.type, SchemaEventTarget):
+ self.type._set_parent_with_dispatch(self)
+
+ if self.default is not None:
+ if isinstance(self.default, (ColumnDefault, Sequence)):
+ args.append(self.default)
+ else:
+ if getattr(self.type, '_warn_on_bytestring', False):
+ if isinstance(self.default, util.binary_type):
+ util.warn("Unicode column received non-unicode "
+ "default value.")
+ args.append(ColumnDefault(self.default))
+
+ if self.server_default is not None:
+ if isinstance(self.server_default, FetchedValue):
+ args.append(self.server_default._as_for_update(False))
+ else:
+ args.append(DefaultClause(self.server_default))
+
+ if self.onupdate is not None:
+ if isinstance(self.onupdate, (ColumnDefault, Sequence)):
+ args.append(self.onupdate)
+ else:
+ args.append(ColumnDefault(self.onupdate, for_update=True))
+
+ if self.server_onupdate is not None:
+ if isinstance(self.server_onupdate, FetchedValue):
+ args.append(self.server_onupdate._as_for_update(True))
+ else:
+ args.append(DefaultClause(self.server_onupdate,
+ for_update=True))
+ self._init_items(*args)
+
+ util.set_creation_order(self)
+
+ if 'info' in kwargs:
+ self.info = kwargs.pop('info')
+
+ if kwargs:
+ raise exc.ArgumentError(
+ "Unknown arguments passed to Column: " + repr(list(kwargs)))
+
+# @property
+# def quote(self):
+# return getattr(self.name, "quote", None)
+
+ def __str__(self):
+ if self.name is None:
+ return "(no name)"
+ elif self.table is not None:
+ if self.table.named_with_column:
+ return (self.table.description + "." + self.description)
+ else:
+ return self.description
+ else:
+ return self.description
+
+ def references(self, column):
+ """Return True if this Column references the given column via foreign
+ key."""
+
+ for fk in self.foreign_keys:
+ if fk.column.proxy_set.intersection(column.proxy_set):
+ return True
+ else:
+ return False
+
+ def append_foreign_key(self, fk):
+ fk._set_parent_with_dispatch(self)
+
+ def __repr__(self):
+ kwarg = []
+ if self.key != self.name:
+ kwarg.append('key')
+ if self.primary_key:
+ kwarg.append('primary_key')
+ if not self.nullable:
+ kwarg.append('nullable')
+ if self.onupdate:
+ kwarg.append('onupdate')
+ if self.default:
+ kwarg.append('default')
+ if self.server_default:
+ kwarg.append('server_default')
+ return "Column(%s)" % ', '.join(
+ [repr(self.name)] + [repr(self.type)] +
+ [repr(x) for x in self.foreign_keys if x is not None] +
+ [repr(x) for x in self.constraints] +
+ [(self.table is not None and "table=<%s>" %
+ self.table.description or "table=None")] +
+ ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
+
+ def _set_parent(self, table):
+ if not self.name:
+ raise exc.ArgumentError(
+ "Column must be constructed with a non-blank name or "
+ "assign a non-blank .name before adding to a Table.")
+ if self.key is None:
+ self.key = self.name
+
+ existing = getattr(self, 'table', None)
+ if existing is not None and existing is not table:
+ raise exc.ArgumentError(
+ "Column object already assigned to Table '%s'" %
+ existing.description)
+
+ if self.key in table._columns:
+ col = table._columns.get(self.key)
+ if col is not self:
+ for fk in col.foreign_keys:
+ table.foreign_keys.remove(fk)
+ if fk.constraint in table.constraints:
+ # this might have been removed
+ # already, if it's a composite constraint
+ # and more than one col being replaced
+ table.constraints.remove(fk.constraint)
+
+ table._columns.replace(self)
+
+ if self.primary_key:
+ table.primary_key._replace(self)
+ Table._autoincrement_column._reset(table)
+ elif self.key in table.primary_key:
+ raise exc.ArgumentError(
+ "Trying to redefine primary-key column '%s' as a "
+ "non-primary-key column on table '%s'" % (
+ self.key, table.fullname))
+ self.table = table
+
+ if self.index:
+ if isinstance(self.index, util.string_types):
+ raise exc.ArgumentError(
+ "The 'index' keyword argument on Column is boolean only. "
+ "To create indexes with a specific name, create an "
+ "explicit Index object external to the Table.")
+ Index(_truncated_label('ix_%s' % self._label),
+ self, unique=bool(self.unique))
+ elif self.unique:
+ if isinstance(self.unique, util.string_types):
+ raise exc.ArgumentError(
+ "The 'unique' keyword argument on Column is boolean "
+ "only. To create unique constraints or indexes with a "
+ "specific name, append an explicit UniqueConstraint to "
+ "the Table's list of elements, or create an explicit "
+ "Index object external to the Table.")
+ table.append_constraint(UniqueConstraint(self.key))
+
+ fk_key = (table.key, self.key)
+ if fk_key in self.table.metadata._fk_memos:
+ for fk in self.table.metadata._fk_memos[fk_key]:
+ fk._set_remote_table(table)
+
+ def _on_table_attach(self, fn):
+ if self.table is not None:
+ fn(self, self.table)
+ event.listen(self, 'after_parent_attach', fn)
+
+ def copy(self, **kw):
+ """Create a copy of this ``Column``, unitialized.
+
+ This is used in ``Table.tometadata``.
+
+ """
+
+ # Constraint objects plus non-constraint-bound ForeignKey objects
+ args = \
+ [c.copy(**kw) for c in self.constraints] + \
+ [c.copy(**kw) for c in self.foreign_keys if not c.constraint]
+
+ type_ = self.type
+ if isinstance(type_, SchemaEventTarget):
+ type_ = type_.copy(**kw)
+
+ c = self._constructor(
+ name=self.name,
+ type_=type_,
+ key=self.key,
+ primary_key=self.primary_key,
+ nullable=self.nullable,
+ unique=self.unique,
+ system=self.system,
+ #quote=self.quote,
+ index=self.index,
+ autoincrement=self.autoincrement,
+ default=self.default,
+ server_default=self.server_default,
+ onupdate=self.onupdate,
+ server_onupdate=self.server_onupdate,
+ doc=self.doc,
+ *args
+ )
+ return self._schema_item_copy(c)
+
+ def _make_proxy(self, selectable, name=None, key=None,
+ name_is_truncatable=False, **kw):
+ """Create a *proxy* for this column.
+
+ This is a copy of this ``Column`` referenced by a different parent
+ (such as an alias or select statement). The column should
+ be used only in select scenarios, as its full DDL/default
+ information is not transferred.
+
+ """
+ fk = [ForeignKey(f.column, _constraint=f.constraint)
+ for f in self.foreign_keys]
+ if name is None and self.name is None:
+ raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
+ " with this Column object until it's 'name' has "
+ "been assigned.")
+ try:
+ c = self._constructor(
+ _as_truncated(name or self.name) if \
+ name_is_truncatable else (name or self.name),
+ self.type,
+ key=key if key else name if name else self.key,
+ primary_key=self.primary_key,
+ nullable=self.nullable,
+ _proxies=[self], *fk)
+ except TypeError:
+ util.raise_from_cause(
+ TypeError(
+ "Could not create a copy of this %r object. "
+ "Ensure the class includes a _constructor() "
+ "attribute or method which accepts the "
+ "standard Column constructor arguments, or "
+ "references the Column class itself." % self.__class__)
+ )
+
+ c.table = selectable
+ selectable._columns.add(c)
+ if selectable._is_clone_of is not None:
+ c._is_clone_of = selectable._is_clone_of.columns[c.key]
+ if self.primary_key:
+ selectable.primary_key.add(c)
+ c.dispatch.after_parent_attach(c, selectable)
+ return c
+
+ def get_children(self, schema_visitor=False, **kwargs):
+ if schema_visitor:
+ return [x for x in (self.default, self.onupdate)
+ if x is not None] + \
+ list(self.foreign_keys) + list(self.constraints)
+ else:
+ return ColumnClause.get_children(self, **kwargs)
+
+
+class ForeignKey(DialectKWArgs, SchemaItem):
+ """Defines a dependency between two columns.
+
+ ``ForeignKey`` is specified as an argument to a :class:`.Column` object,
+ e.g.::
+
+ t = Table("remote_table", metadata,
+ Column("remote_id", ForeignKey("main_table.id"))
+ )
+
+ Note that ``ForeignKey`` is only a marker object that defines
+ a dependency between two columns. The actual constraint
+ is in all cases represented by the :class:`.ForeignKeyConstraint`
+ object. This object will be generated automatically when
+ a ``ForeignKey`` is associated with a :class:`.Column` which
+ in turn is associated with a :class:`.Table`. Conversely,
+ when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`,
+ ``ForeignKey`` markers are automatically generated to be
+ present on each associated :class:`.Column`, which are also
+ associated with the constraint object.
+
+ Note that you cannot define a "composite" foreign key constraint,
+ that is a constraint between a grouping of multiple parent/child
+ columns, using ``ForeignKey`` objects. To define this grouping,
+ the :class:`.ForeignKeyConstraint` object must be used, and applied
+ to the :class:`.Table`. The associated ``ForeignKey`` objects
+ are created automatically.
+
+ The ``ForeignKey`` objects associated with an individual
+ :class:`.Column` object are available in the `foreign_keys` collection
+ of that column.
+
+ Further examples of foreign key configuration are in
+ :ref:`metadata_foreignkeys`.
+
+ """
+
+ __visit_name__ = 'foreign_key'
+
+ def __init__(self, column, _constraint=None, use_alter=False, name=None,
+ onupdate=None, ondelete=None, deferrable=None,
+ initially=None, link_to_name=False, match=None,
+ **dialect_kw):
+ """
+ Construct a column-level FOREIGN KEY.
+
+ The :class:`.ForeignKey` object when constructed generates a
+ :class:`.ForeignKeyConstraint` which is associated with the parent
+ :class:`.Table` object's collection of constraints.
+
+ :param column: A single target column for the key relationship. A
+ :class:`.Column` object or a column name as a string:
+ ``tablename.columnkey`` or ``schema.tablename.columnkey``.
+ ``columnkey`` is the ``key`` which has been assigned to the column
+ (defaults to the column name itself), unless ``link_to_name`` is
+ ``True`` in which case the rendered name of the column is used.
+
+ .. versionadded:: 0.7.4
+ Note that if the schema name is not included, and the
+ underlying :class:`.MetaData` has a "schema", that value will
+ be used.
+
+ :param name: Optional string. An in-database name for the key if
+ `constraint` is not provided.
+
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+
+ :param initially: Optional string. If set, emit INITIALLY <value> when
+ issuing DDL for this constraint.
+
+ :param link_to_name: if True, the string name given in ``column`` is
+ the rendered name of the referenced column, not its locally
+ assigned ``key``.
+
+ :param use_alter: passed to the underlying
+ :class:`.ForeignKeyConstraint` to indicate the constraint should be
+ generated/dropped externally from the CREATE TABLE/ DROP TABLE
+ statement. See that classes' constructor for details.
+
+ :param match: Optional string. If set, emit MATCH <value> when issuing
+ DDL for this constraint. Typical values include SIMPLE, PARTIAL
+ and FULL.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. The arguments
+ are ultimately handled by a corresponding :class:`.ForeignKeyConstraint`.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ .. versionadded:: 0.9.2
+
+ """
+
+ self._colspec = column
+ if isinstance(self._colspec, util.string_types):
+ self._table_column = None
+ else:
+ if hasattr(self._colspec, '__clause_element__'):
+ self._table_column = self._colspec.__clause_element__()
+ else:
+ self._table_column = self._colspec
+
+ if not isinstance(self._table_column, ColumnClause):
+ raise exc.ArgumentError(
+ "String, Column, or Column-bound argument "
+ "expected, got %r" % self._table_column)
+ elif not isinstance(self._table_column.table, (util.NoneType, TableClause)):
+ raise exc.ArgumentError(
+ "ForeignKey received Column not bound "
+ "to a Table, got: %r" % self._table_column.table
+ )
+
+ # the linked ForeignKeyConstraint.
+ # ForeignKey will create this when parent Column
+ # is attached to a Table, *or* ForeignKeyConstraint
+ # object passes itself in when creating ForeignKey
+ # markers.
+ self.constraint = _constraint
+ self.parent = None
+ self.use_alter = use_alter
+ self.name = name
+ self.onupdate = onupdate
+ self.ondelete = ondelete
+ self.deferrable = deferrable
+ self.initially = initially
+ self.link_to_name = link_to_name
+ self.match = match
+ self._unvalidated_dialect_kw = dialect_kw
+
+ def __repr__(self):
+ return "ForeignKey(%r)" % self._get_colspec()
+
+ def copy(self, schema=None):
+ """Produce a copy of this :class:`.ForeignKey` object.
+
+ The new :class:`.ForeignKey` will not be bound
+ to any :class:`.Column`.
+
+ This method is usually used by the internal
+ copy procedures of :class:`.Column`, :class:`.Table`,
+ and :class:`.MetaData`.
+
+ :param schema: The returned :class:`.ForeignKey` will
+ reference the original table and column name, qualified
+ by the given string schema name.
+
+ """
+
+ fk = ForeignKey(
+ self._get_colspec(schema=schema),
+ use_alter=self.use_alter,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ **self._unvalidated_dialect_kw
+ )
+ return self._schema_item_copy(fk)
+
+
+ def _get_colspec(self, schema=None):
+ """Return a string based 'column specification' for this
+ :class:`.ForeignKey`.
+
+ This is usually the equivalent of the string-based "tablename.colname"
+ argument first passed to the object's constructor.
+
+ """
+ if schema:
+ _schema, tname, colname = self._column_tokens
+ return "%s.%s.%s" % (schema, tname, colname)
+ elif self._table_column is not None:
+ return "%s.%s" % (
+ self._table_column.table.fullname, self._table_column.key)
+ else:
+ return self._colspec
+
+
+ def _table_key(self):
+ if self._table_column is not None:
+ if self._table_column.table is None:
+ return None
+ else:
+ return self._table_column.table.key
+ else:
+ schema, tname, colname = self._column_tokens
+ return _get_table_key(tname, schema)
+
+
+
+ target_fullname = property(_get_colspec)
+
+ def references(self, table):
+ """Return True if the given :class:`.Table` is referenced by this
+ :class:`.ForeignKey`."""
+
+ return table.corresponding_column(self.column) is not None
+
+ def get_referent(self, table):
+ """Return the :class:`.Column` in the given :class:`.Table`
+ referenced by this :class:`.ForeignKey`.
+
+ Returns None if this :class:`.ForeignKey` does not reference the given
+ :class:`.Table`.
+
+ """
+
+ return table.corresponding_column(self.column)
+
+ @util.memoized_property
+ def _column_tokens(self):
+ """parse a string-based _colspec into its component parts."""
+
+ m = self._colspec.split('.')
+ if m is None:
+ raise exc.ArgumentError(
+ "Invalid foreign key column specification: %s" %
+ self._colspec)
+ if (len(m) == 1):
+ tname = m.pop()
+ colname = None
+ else:
+ colname = m.pop()
+ tname = m.pop()
+
+ # A FK between column 'bar' and table 'foo' can be
+ # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
+ # 'otherdb.dbo.foo.bar'. Once we have the column name and
+ # the table name, treat everything else as the schema
+ # name. Some databases (e.g. Sybase) support
+ # inter-database foreign keys. See tickets#1341 and --
+ # indirectly related -- Ticket #594. This assumes that '.'
+ # will never appear *within* any component of the FK.
+
+ if (len(m) > 0):
+ schema = '.'.join(m)
+ else:
+ schema = None
+ return schema, tname, colname
+
+ def _resolve_col_tokens(self):
+ if self.parent is None:
+ raise exc.InvalidRequestError(
+ "this ForeignKey object does not yet have a "
+ "parent Column associated with it.")
+
+ elif self.parent.table is None:
+ raise exc.InvalidRequestError(
+ "this ForeignKey's parent column is not yet associated "
+ "with a Table.")
+
+ parenttable = self.parent.table
+
+ # assertion, can be commented out.
+ # basically Column._make_proxy() sends the actual
+ # target Column to the ForeignKey object, so the
+ # string resolution here is never called.
+ for c in self.parent.base_columns:
+ if isinstance(c, Column):
+ assert c.table is parenttable
+ break
+ else:
+ assert False
+ ######################
+
+ schema, tname, colname = self._column_tokens
+
+ if schema is None and parenttable.metadata.schema is not None:
+ schema = parenttable.metadata.schema
+
+ tablekey = _get_table_key(tname, schema)
+ return parenttable, tablekey, colname
+
+
+ def _link_to_col_by_colstring(self, parenttable, table, colname):
+ if not hasattr(self.constraint, '_referred_table'):
+ self.constraint._referred_table = table
+ else:
+ assert self.constraint._referred_table is table
+
+ _column = None
+ if colname is None:
+ # colname is None in the case that ForeignKey argument
+ # was specified as table name only, in which case we
+ # match the column name to the same column on the
+ # parent.
+ key = self.parent
+ _column = table.c.get(self.parent.key, None)
+ elif self.link_to_name:
+ key = colname
+ for c in table.c:
+ if c.name == colname:
+ _column = c
+ else:
+ key = colname
+ _column = table.c.get(colname, None)
+
+ if _column is None:
+ raise exc.NoReferencedColumnError(
+ "Could not initialize target column for ForeignKey '%s' on table '%s': "
+ "table '%s' has no column named '%s'" % (
+ self._colspec, parenttable.name, table.name, key),
+ table.name, key)
+
+ self._set_target_column(_column)
+
+ def _set_target_column(self, column):
+ # propagate TypeEngine to parent if it didn't have one
+ if self.parent.type._isnull:
+ self.parent.type = column.type
+
+ # super-edgy case, if other FKs point to our column,
+ # they'd get the type propagated out also.
+ if isinstance(self.parent.table, Table):
+ fk_key = (self.parent.table.key, self.parent.key)
+ if fk_key in self.parent.table.metadata._fk_memos:
+ for fk in self.parent.table.metadata._fk_memos[fk_key]:
+ if fk.parent.type._isnull:
+ fk.parent.type = column.type
+
+ self.column = column
+
+ @util.memoized_property
+ def column(self):
+ """Return the target :class:`.Column` referenced by this
+ :class:`.ForeignKey`.
+
+ If no target column has been established, an exception
+ is raised.
+
+ .. versionchanged:: 0.9.0
+ Foreign key target column resolution now occurs as soon as both
+ the ForeignKey object and the remote Column to which it refers
+ are both associated with the same MetaData object.
+
+ """
+
+ if isinstance(self._colspec, util.string_types):
+
+ parenttable, tablekey, colname = self._resolve_col_tokens()
+
+ if tablekey not in parenttable.metadata:
+ raise exc.NoReferencedTableError(
+ "Foreign key associated with column '%s' could not find "
+ "table '%s' with which to generate a "
+ "foreign key to target column '%s'" %
+ (self.parent, tablekey, colname),
+ tablekey)
+ elif parenttable.key not in parenttable.metadata:
+ raise exc.InvalidRequestError(
+ "Table %s is no longer associated with its "
+ "parent MetaData" % parenttable)
+ else:
+ raise exc.NoReferencedColumnError(
+ "Could not initialize target column for "
+ "ForeignKey '%s' on table '%s': "
+ "table '%s' has no column named '%s'" % (
+ self._colspec, parenttable.name, tablekey, colname),
+ tablekey, colname)
+ elif hasattr(self._colspec, '__clause_element__'):
+ _column = self._colspec.__clause_element__()
+ return _column
+ else:
+ _column = self._colspec
+ return _column
+
+ def _set_parent(self, column):
+ if self.parent is not None and self.parent is not column:
+ raise exc.InvalidRequestError(
+ "This ForeignKey already has a parent !")
+ self.parent = column
+ self.parent.foreign_keys.add(self)
+ self.parent._on_table_attach(self._set_table)
+
+ def _set_remote_table(self, table):
+ parenttable, tablekey, colname = self._resolve_col_tokens()
+ self._link_to_col_by_colstring(parenttable, table, colname)
+ self.constraint._validate_dest_table(table)
+
+ def _remove_from_metadata(self, metadata):
+ parenttable, table_key, colname = self._resolve_col_tokens()
+ fk_key = (table_key, colname)
+
+ if self in metadata._fk_memos[fk_key]:
+ # TODO: no test coverage for self not in memos
+ metadata._fk_memos[fk_key].remove(self)
+
+ def _set_table(self, column, table):
+ # standalone ForeignKey - create ForeignKeyConstraint
+ # on the hosting Table when attached to the Table.
+ if self.constraint is None and isinstance(table, Table):
+ self.constraint = ForeignKeyConstraint(
+ [], [], use_alter=self.use_alter, name=self.name,
+ onupdate=self.onupdate, ondelete=self.ondelete,
+ deferrable=self.deferrable, initially=self.initially,
+ match=self.match,
+ **self._unvalidated_dialect_kw
+ )
+ self.constraint._elements[self.parent] = self
+ self.constraint._set_parent_with_dispatch(table)
+ table.foreign_keys.add(self)
+
+ # set up remote ".column" attribute, or a note to pick it
+ # up when the other Table/Column shows up
+ if isinstance(self._colspec, util.string_types):
+ parenttable, table_key, colname = self._resolve_col_tokens()
+ fk_key = (table_key, colname)
+ if table_key in parenttable.metadata.tables:
+ table = parenttable.metadata.tables[table_key]
+ try:
+ self._link_to_col_by_colstring(parenttable, table, colname)
+ except exc.NoReferencedColumnError:
+ # this is OK, we'll try later
+ pass
+ parenttable.metadata._fk_memos[fk_key].append(self)
+ elif hasattr(self._colspec, '__clause_element__'):
+ _column = self._colspec.__clause_element__()
+ self._set_target_column(_column)
+ else:
+ _column = self._colspec
+ self._set_target_column(_column)
+
+
+
+class _NotAColumnExpr(object):
+ def _not_a_column_expr(self):
+ raise exc.InvalidRequestError(
+ "This %s cannot be used directly "
+ "as a column expression." % self.__class__.__name__)
+
+ __clause_element__ = self_group = lambda self: self._not_a_column_expr()
+ _from_objects = property(lambda self: self._not_a_column_expr())
+
+
+class DefaultGenerator(_NotAColumnExpr, SchemaItem):
+ """Base class for column *default* values."""
+
+ __visit_name__ = 'default_generator'
+
+ is_sequence = False
+ is_server_default = False
+ column = None
+
+ def __init__(self, for_update=False):
+ self.for_update = for_update
+
+ def _set_parent(self, column):
+ self.column = column
+ if self.for_update:
+ self.column.onupdate = self
+ else:
+ self.column.default = self
+
+ def execute(self, bind=None, **kwargs):
+ if bind is None:
+ bind = _bind_or_error(self)
+ return bind._execute_default(self, **kwargs)
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this default."""
+ if getattr(self, 'column', None) is not None:
+ return self.column.table.bind
+ else:
+ return None
+
+
+class ColumnDefault(DefaultGenerator):
+ """A plain default value on a column.
+
+ This could correspond to a constant, a callable function,
+ or a SQL clause.
+
+ :class:`.ColumnDefault` is generated automatically
+ whenever the ``default``, ``onupdate`` arguments of
+ :class:`.Column` are used. A :class:`.ColumnDefault`
+ can be passed positionally as well.
+
+ For example, the following::
+
+ Column('foo', Integer, default=50)
+
+ Is equivalent to::
+
+ Column('foo', Integer, ColumnDefault(50))
+
+
+ """
+
+ def __init__(self, arg, **kwargs):
+ """"Construct a new :class:`.ColumnDefault`.
+
+
+ :param arg: argument representing the default value.
+ May be one of the following:
+
+ * a plain non-callable Python value, such as a
+ string, integer, boolean, or other simple type.
+ The default value will be used as is each time.
+ * a SQL expression, that is one which derives from
+ :class:`.ColumnElement`. The SQL expression will
+ be rendered into the INSERT or UPDATE statement,
+ or in the case of a primary key column when
+ RETURNING is not used may be
+ pre-executed before an INSERT within a SELECT.
+ * A Python callable. The function will be invoked for each
+ new row subject to an INSERT or UPDATE.
+ The callable must accept exactly
+ zero or one positional arguments. The one-argument form
+ will receive an instance of the :class:`.ExecutionContext`,
+ which provides contextual information as to the current
+ :class:`.Connection` in use as well as the current
+ statement and parameters.
+
+ """
+ super(ColumnDefault, self).__init__(**kwargs)
+ if isinstance(arg, FetchedValue):
+ raise exc.ArgumentError(
+ "ColumnDefault may not be a server-side default type.")
+ if util.callable(arg):
+ arg = self._maybe_wrap_callable(arg)
+ self.arg = arg
+
+ @util.memoized_property
+ def is_callable(self):
+ return util.callable(self.arg)
+
+ @util.memoized_property
+ def is_clause_element(self):
+ return isinstance(self.arg, ClauseElement)
+
+ @util.memoized_property
+ def is_scalar(self):
+ return not self.is_callable and \
+ not self.is_clause_element and \
+ not self.is_sequence
+
+ def _maybe_wrap_callable(self, fn):
+ """Wrap callables that don't accept a context.
+
+ The alternative here is to require that
+ a simple callable passed to "default" would need
+ to be of the form "default=lambda ctx: datetime.now".
+ That is the more "correct" way to go, but the case
+ of using a zero-arg callable for "default" is so
+ much more prominent than the context-specific one
+ I'm having trouble justifying putting that inconvenience
+ on everyone.
+
+ """
+ if inspect.isfunction(fn) or inspect.ismethod(fn):
+ inspectable = fn
+ elif inspect.isclass(fn):
+ inspectable = fn.__init__
+ elif hasattr(fn, '__call__'):
+ inspectable = fn.__call__
+ else:
+ # probably not inspectable, try anyways.
+ inspectable = fn
+ try:
+ argspec = inspect.getargspec(inspectable)
+ except TypeError:
+ return lambda ctx: fn()
+
+ defaulted = argspec[3] is not None and len(argspec[3]) or 0
+ positionals = len(argspec[0]) - defaulted
+
+ # Py3K compat - no unbound methods
+ if inspect.ismethod(inspectable) or inspect.isclass(fn):
+ positionals -= 1
+
+ if positionals == 0:
+ return lambda ctx: fn()
+ elif positionals == 1:
+ return fn
+ else:
+ raise exc.ArgumentError(
+ "ColumnDefault Python function takes zero or one "
+ "positional arguments")
+
+ def _visit_name(self):
+ if self.for_update:
+ return "column_onupdate"
+ else:
+ return "column_default"
+ __visit_name__ = property(_visit_name)
+
+ def __repr__(self):
+ return "ColumnDefault(%r)" % self.arg
+
+
+class Sequence(DefaultGenerator):
+ """Represents a named database sequence.
+
+ The :class:`.Sequence` object represents the name and configurational
+ parameters of a database sequence. It also represents
+ a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
+ or :class:`.Connection`, rendering the appropriate "next value" function
+ for the target database and returning a result.
+
+ The :class:`.Sequence` is typically associated with a primary key column::
+
+ some_table = Table('some_table', metadata,
+ Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
+ )
+
+ When CREATE TABLE is emitted for the above :class:`.Table`, if the
+ target platform supports sequences, a CREATE SEQUENCE statement will
+ be emitted as well. For platforms that don't support sequences,
+ the :class:`.Sequence` construct is ignored.
+
+ .. seealso::
+
+ :class:`.CreateSequence`
+
+ :class:`.DropSequence`
+
+ """
+
+ __visit_name__ = 'sequence'
+
+ is_sequence = True
+
+ def __init__(self, name, start=None, increment=None, schema=None,
+ optional=False, quote=None, metadata=None,
+ quote_schema=None,
+ for_update=False):
+ """Construct a :class:`.Sequence` object.
+
+ :param name: The name of the sequence.
+ :param start: the starting index of the sequence. This value is
+ used when the CREATE SEQUENCE command is emitted to the database
+ as the value of the "START WITH" clause. If ``None``, the
+ clause is omitted, which on most platforms indicates a starting
+ value of 1.
+ :param increment: the increment value of the sequence. This
+ value is used when the CREATE SEQUENCE command is emitted to
+ the database as the value of the "INCREMENT BY" clause. If ``None``,
+ the clause is omitted, which on most platforms indicates an
+ increment of 1.
+ :param schema: Optional schema name for the sequence, if located
+ in a schema other than the default.
+ :param optional: boolean value, when ``True``, indicates that this
+ :class:`.Sequence` object only needs to be explicitly generated
+ on backends that don't provide another way to generate primary
+ key identifiers. Currently, it essentially means, "don't create
+ this sequence on the Postgresql backend, where the SERIAL keyword
+ creates a sequence for us automatically".
+ :param quote: boolean value, when ``True`` or ``False``, explicitly
+ forces quoting of the schema name on or off. When left at its
+ default of ``None``, normal quoting rules based on casing and reserved
+ words take place.
+ :param quote_schema: set the quoting preferences for the ``schema``
+ name.
+ :param metadata: optional :class:`.MetaData` object which will be
+ associated with this :class:`.Sequence`. A :class:`.Sequence`
+ that is associated with a :class:`.MetaData` gains access to the
+ ``bind`` of that :class:`.MetaData`, meaning the
+ :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will
+ make usage of that engine automatically.
+
+ .. versionchanged:: 0.7
+ Additionally, the appropriate CREATE SEQUENCE/
+ DROP SEQUENCE DDL commands will be emitted corresponding to this
+ :class:`.Sequence` when :meth:`.MetaData.create_all` and
+ :meth:`.MetaData.drop_all` are invoked.
+
+ Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
+ the :class:`.Sequence` is automatically associated with the
+ :class:`.MetaData` object of that column's parent :class:`.Table`,
+ when that association is made. The :class:`.Sequence` will then
+ be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
+ to when the :class:`.Table` object itself is created or dropped,
+ rather than that of the :class:`.MetaData` object overall.
+ :param for_update: Indicates this :class:`.Sequence`, when associated
+ with a :class:`.Column`, should be invoked for UPDATE statements
+ on that column's table, rather than for INSERT statements, when
+ no value is otherwise present for that column in the statement.
+
+ """
+ super(Sequence, self).__init__(for_update=for_update)
+ self.name = quoted_name(name, quote)
+ self.start = start
+ self.increment = increment
+ self.optional = optional
+ if metadata is not None and schema is None and metadata.schema:
+ self.schema = schema = metadata.schema
+ else:
+ self.schema = quoted_name(schema, quote_schema)
+ self.metadata = metadata
+ self._key = _get_table_key(name, schema)
+ if metadata:
+ self._set_metadata(metadata)
+
+ @util.memoized_property
+ def is_callable(self):
+ return False
+
+ @util.memoized_property
+ def is_clause_element(self):
+ return False
+
+ @util.dependencies("sqlalchemy.sql.functions.func")
+ def next_value(self, func):
+ """Return a :class:`.next_value` function element
+ which will render the appropriate increment function
+ for this :class:`.Sequence` within any SQL expression.
+
+ """
+ return func.next_value(self, bind=self.bind)
+
+ def _set_parent(self, column):
+ super(Sequence, self)._set_parent(column)
+ column._on_table_attach(self._set_table)
+
+ def _set_table(self, column, table):
+ self._set_metadata(table.metadata)
+
+ def _set_metadata(self, metadata):
+ self.metadata = metadata
+ self.metadata._sequences[self._key] = self
+
+ @property
+ def bind(self):
+ if self.metadata:
+ return self.metadata.bind
+ else:
+ return None
+
+ def create(self, bind=None, checkfirst=True):
+ """Creates this sequence in the database."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=True):
+ """Drops this sequence from the database."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst)
+
+ def _not_a_column_expr(self):
+ raise exc.InvalidRequestError(
+ "This %s cannot be used directly "
+ "as a column expression. Use func.next_value(sequence) "
+ "to produce a 'next value' function that's usable "
+ "as a column element."
+ % self.__class__.__name__)
+
+
+@inspection._self_inspects
+class FetchedValue(_NotAColumnExpr, SchemaEventTarget):
+ """A marker for a transparent database-side default.
+
+ Use :class:`.FetchedValue` when the database is configured
+ to provide some automatic default for a column.
+
+ E.g.::
+
+ Column('foo', Integer, FetchedValue())
+
+ Would indicate that some trigger or default generator
+ will create a new value for the ``foo`` column during an
+ INSERT.
+
+ .. seealso::
+
+ :ref:`triggered_columns`
+
+ """
+ is_server_default = True
+ reflected = False
+ has_argument = False
+
+ def __init__(self, for_update=False):
+ self.for_update = for_update
+
+ def _as_for_update(self, for_update):
+ if for_update == self.for_update:
+ return self
+ else:
+ return self._clone(for_update)
+
+ def _clone(self, for_update):
+ n = self.__class__.__new__(self.__class__)
+ n.__dict__.update(self.__dict__)
+ n.__dict__.pop('column', None)
+ n.for_update = for_update
+ return n
+
+ def _set_parent(self, column):
+ self.column = column
+ if self.for_update:
+ self.column.server_onupdate = self
+ else:
+ self.column.server_default = self
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+
+class DefaultClause(FetchedValue):
+ """A DDL-specified DEFAULT column value.
+
+ :class:`.DefaultClause` is a :class:`.FetchedValue`
+ that also generates a "DEFAULT" clause when
+ "CREATE TABLE" is emitted.
+
+ :class:`.DefaultClause` is generated automatically
+ whenever the ``server_default``, ``server_onupdate`` arguments of
+ :class:`.Column` are used. A :class:`.DefaultClause`
+ can be passed positionally as well.
+
+ For example, the following::
+
+ Column('foo', Integer, server_default="50")
+
+ Is equivalent to::
+
+ Column('foo', Integer, DefaultClause("50"))
+
+ """
+
+ has_argument = True
+
+ def __init__(self, arg, for_update=False, _reflected=False):
+ util.assert_arg_type(arg, (util.string_types[0],
+ ClauseElement,
+ TextClause), 'arg')
+ super(DefaultClause, self).__init__(for_update)
+ self.arg = arg
+ self.reflected = _reflected
+
+ def __repr__(self):
+ return "DefaultClause(%r, for_update=%r)" % \
+ (self.arg, self.for_update)
+
+
+class PassiveDefault(DefaultClause):
+ """A DDL-specified DEFAULT column value.
+
+ .. deprecated:: 0.6
+ :class:`.PassiveDefault` is deprecated.
+ Use :class:`.DefaultClause`.
+ """
+ @util.deprecated("0.6",
+ ":class:`.PassiveDefault` is deprecated. "
+ "Use :class:`.DefaultClause`.",
+ False)
+ def __init__(self, *arg, **kw):
+ DefaultClause.__init__(self, *arg, **kw)
+
+
+class Constraint(DialectKWArgs, SchemaItem):
+ """A table-level SQL constraint."""
+
+ __visit_name__ = 'constraint'
+
+ def __init__(self, name=None, deferrable=None, initially=None,
+ _create_rule=None,
+ **dialect_kw):
+ """Create a SQL constraint.
+
+ :param name:
+ Optional, the in-database name of this ``Constraint``.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ :param _create_rule:
+ a callable which is passed the DDLCompiler object during
+ compilation. Returns True or False to signal inline generation of
+ this Constraint.
+
+ The AddConstraint and DropConstraint DDL constructs provide
+ DDLElement's more comprehensive "conditional DDL" approach that is
+ passed a database connection when DDL is being issued. _create_rule
+ is instead called during any CREATE TABLE compilation, where there
+ may not be any transaction/connection in progress. However, it
+ allows conditional compilation of the constraint even for backends
+ which do not support addition of constraints through ALTER TABLE,
+ which currently includes SQLite.
+
+ _create_rule is used by some types to create constraints.
+ Currently, its call signature is subject to change at any time.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. See the
+ documentation regarding an individual dialect at :ref:`dialect_toplevel`
+ for detail on documented arguments.
+
+ """
+
+ self.name = name
+ self.deferrable = deferrable
+ self.initially = initially
+ self._create_rule = _create_rule
+ util.set_creation_order(self)
+ self._validate_dialect_kwargs(dialect_kw)
+
+ @property
+ def table(self):
+ try:
+ if isinstance(self.parent, Table):
+ return self.parent
+ except AttributeError:
+ pass
+ raise exc.InvalidRequestError(
+ "This constraint is not bound to a table. Did you "
+ "mean to call table.append_constraint(constraint) ?")
+
+ def _set_parent(self, parent):
+ self.parent = parent
+ parent.constraints.add(self)
+
+ def copy(self, **kw):
+ raise NotImplementedError()
+
+
+def _to_schema_column(element):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, Column):
+ raise exc.ArgumentError("schema.Column object expected")
+ return element
+
+
+def _to_schema_column_or_string(element):
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, util.string_types + (ColumnElement, )):
+ msg = "Element %r is not a string name or column element"
+ raise exc.ArgumentError(msg % element)
+ return element
+
+
+class ColumnCollectionMixin(object):
+ def __init__(self, *columns):
+ self.columns = ColumnCollection()
+ self._pending_colargs = [_to_schema_column_or_string(c)
+ for c in columns]
+ if self._pending_colargs and \
+ isinstance(self._pending_colargs[0], Column) and \
+ isinstance(self._pending_colargs[0].table, Table):
+ self._set_parent_with_dispatch(self._pending_colargs[0].table)
+
+ def _set_parent(self, table):
+ for col in self._pending_colargs:
+ if isinstance(col, util.string_types):
+ col = table.c[col]
+ self.columns.add(col)
+
+
+class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
+ """A constraint that proxies a ColumnCollection."""
+
+ def __init__(self, *columns, **kw):
+ """
+ :param \*columns:
+ A sequence of column names or Column objects.
+
+ :param name:
+ Optional, the in-database name of this constraint.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ :param \**kw: other keyword arguments including dialect-specific
+ arguments are propagated to the :class:`.Constraint` superclass.
+
+ """
+ ColumnCollectionMixin.__init__(self, *columns)
+ Constraint.__init__(self, **kw)
+
+ def _set_parent(self, table):
+ ColumnCollectionMixin._set_parent(self, table)
+ Constraint._set_parent(self, table)
+
+ def __contains__(self, x):
+ return x in self.columns
+
+ def copy(self, **kw):
+ c = self.__class__(name=self.name, deferrable=self.deferrable,
+ initially=self.initially, *self.columns.keys())
+ return self._schema_item_copy(c)
+
+ def contains_column(self, col):
+ return self.columns.contains_column(col)
+
+ def __iter__(self):
+ # inlining of
+ # return iter(self.columns)
+ # ColumnCollection->OrderedProperties->OrderedDict
+ ordered_dict = self.columns._data
+ return (ordered_dict[key] for key in ordered_dict._list)
+
+ def __len__(self):
+ return len(self.columns._data)
+
+
+class CheckConstraint(Constraint):
+ """A table- or column-level CHECK constraint.
+
+ Can be included in the definition of a Table or Column.
+ """
+
+ def __init__(self, sqltext, name=None, deferrable=None,
+ initially=None, table=None, _create_rule=None,
+ _autoattach=True):
+ """Construct a CHECK constraint.
+
+ :param sqltext:
+ A string containing the constraint definition, which will be used
+ verbatim, or a SQL expression construct. If given as a string,
+ the object is converted to a :class:`.Text` object. If the textual
+ string includes a colon character, escape this using a backslash::
+
+ CheckConstraint(r"foo ~ E'a(?\:b|c)d")
+
+ :param name:
+ Optional, the in-database name of the constraint.
+
+ :param deferrable:
+ Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
+ issuing DDL for this constraint.
+
+ :param initially:
+ Optional string. If set, emit INITIALLY <value> when issuing DDL
+ for this constraint.
+
+ """
+
+ super(CheckConstraint, self).\
+ __init__(name, deferrable, initially, _create_rule)
+ self.sqltext = _literal_as_text(sqltext)
+ if table is not None:
+ self._set_parent_with_dispatch(table)
+ elif _autoattach:
+ cols = _find_columns(self.sqltext)
+ tables = set([c.table for c in cols
+ if isinstance(c.table, Table)])
+ if len(tables) == 1:
+ self._set_parent_with_dispatch(
+ tables.pop())
+
+ def __visit_name__(self):
+ if isinstance(self.parent, Table):
+ return "check_constraint"
+ else:
+ return "column_check_constraint"
+ __visit_name__ = property(__visit_name__)
+
+ def copy(self, target_table=None, **kw):
+ if target_table is not None:
+ def replace(col):
+ if self.table.c.contains_column(col):
+ return target_table.c[col.key]
+ else:
+ return None
+ sqltext = visitors.replacement_traverse(self.sqltext, {}, replace)
+ else:
+ sqltext = self.sqltext
+ c = CheckConstraint(sqltext,
+ name=self.name,
+ initially=self.initially,
+ deferrable=self.deferrable,
+ _create_rule=self._create_rule,
+ table=target_table,
+ _autoattach=False)
+ return self._schema_item_copy(c)
+
+
+class ForeignKeyConstraint(Constraint):
+ """A table-level FOREIGN KEY constraint.
+
+ Defines a single column or composite FOREIGN KEY ... REFERENCES
+ constraint. For a no-frills, single column foreign key, adding a
+ :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
+ equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
+
+ Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
+
+ """
+ __visit_name__ = 'foreign_key_constraint'
+
+ def __init__(self, columns, refcolumns, name=None, onupdate=None,
+ ondelete=None, deferrable=None, initially=None, use_alter=False,
+ link_to_name=False, match=None, table=None, **dialect_kw):
+ """Construct a composite-capable FOREIGN KEY.
+
+ :param columns: A sequence of local column names. The named columns
+ must be defined and present in the parent Table. The names should
+ match the ``key`` given to each column (defaults to the name) unless
+ ``link_to_name`` is True.
+
+ :param refcolumns: A sequence of foreign column names or Column
+ objects. The columns must all be located within the same Table.
+
+ :param name: Optional, the in-database name of the key.
+
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
+ issuing DDL for this constraint. Typical values include CASCADE,
+ DELETE and RESTRICT.
+
+ :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
+ DEFERRABLE when issuing DDL for this constraint.
+
+ :param initially: Optional string. If set, emit INITIALLY <value> when
+ issuing DDL for this constraint.
+
+ :param link_to_name: if True, the string name given in ``column`` is
+ the rendered name of the referenced column, not its locally assigned
+ ``key``.
+
+ :param use_alter: If True, do not emit the DDL for this constraint as
+ part of the CREATE TABLE definition. Instead, generate it via an
+ ALTER TABLE statement issued after the full collection of tables
+ have been created, and drop it via an ALTER TABLE statement before
+ the full collection of tables are dropped. This is shorthand for the
+ usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
+ as "after-create" and "before-drop" events on the MetaData object.
+ This is normally used to generate/drop constraints on objects that
+ are mutually dependent on each other.
+
+ :param match: Optional string. If set, emit MATCH <value> when issuing
+ DDL for this constraint. Typical values include SIMPLE, PARTIAL
+ and FULL.
+
+ :param \**dialect_kw: Additional keyword arguments are dialect specific,
+ and passed in the form ``<dialectname>_<argname>``. See the
+ documentation regarding an individual dialect at :ref:`dialect_toplevel`
+ for detail on documented arguments.
+
+ .. versionadded:: 0.9.2
+
+ """
+ super(ForeignKeyConstraint, self).\
+ __init__(name, deferrable, initially, **dialect_kw)
+
+ self.onupdate = onupdate
+ self.ondelete = ondelete
+ self.link_to_name = link_to_name
+ if self.name is None and use_alter:
+ raise exc.ArgumentError("Alterable Constraint requires a name")
+ self.use_alter = use_alter
+ self.match = match
+
+ self._elements = util.OrderedDict()
+
+ # standalone ForeignKeyConstraint - create
+ # associated ForeignKey objects which will be applied to hosted
+ # Column objects (in col.foreign_keys), either now or when attached
+ # to the Table for string-specified names
+ for col, refcol in zip(columns, refcolumns):
+ self._elements[col] = ForeignKey(
+ refcol,
+ _constraint=self,
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ link_to_name=self.link_to_name,
+ match=self.match,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ **self.dialect_kwargs
+ )
+
+ if table is not None:
+ self._set_parent_with_dispatch(table)
+ elif columns and \
+ isinstance(columns[0], Column) and \
+ columns[0].table is not None:
+ self._set_parent_with_dispatch(columns[0].table)
+
+ def _validate_dest_table(self, table):
+ table_keys = set([elem._table_key() for elem in self._elements.values()])
+ if None not in table_keys and len(table_keys) > 1:
+ elem0, elem1 = sorted(table_keys)[0:2]
+ raise exc.ArgumentError(
+ 'ForeignKeyConstraint on %s(%s) refers to '
+ 'multiple remote tables: %s and %s' % (
+ table.fullname,
+ self._col_description,
+ elem0,
+ elem1
+ ))
+
+ @property
+ def _col_description(self):
+ return ", ".join(self._elements)
+
+ @property
+ def columns(self):
+ return list(self._elements)
+
+ @property
+ def elements(self):
+ return list(self._elements.values())
+
+ def _set_parent(self, table):
+ super(ForeignKeyConstraint, self)._set_parent(table)
+
+ self._validate_dest_table(table)
+
+ for col, fk in self._elements.items():
+ # string-specified column names now get
+ # resolved to Column objects
+ if isinstance(col, util.string_types):
+ try:
+ col = table.c[col]
+ except KeyError:
+ raise exc.ArgumentError(
+ "Can't create ForeignKeyConstraint "
+ "on table '%s': no column "
+ "named '%s' is present." % (table.description, col))
+
+ if not hasattr(fk, 'parent') or \
+ fk.parent is not col:
+ fk._set_parent_with_dispatch(col)
+
+ if self.use_alter:
+ def supports_alter(ddl, event, schema_item, bind, **kw):
+ return table in set(kw['tables']) and \
+ bind.dialect.supports_alter
+
+ event.listen(table.metadata, "after_create",
+ ddl.AddConstraint(self, on=supports_alter))
+ event.listen(table.metadata, "before_drop",
+ ddl.DropConstraint(self, on=supports_alter))
+
+ def copy(self, schema=None, **kw):
+ fkc = ForeignKeyConstraint(
+ [x.parent.key for x in self._elements.values()],
+ [x._get_colspec(schema=schema) for x in self._elements.values()],
+ name=self.name,
+ onupdate=self.onupdate,
+ ondelete=self.ondelete,
+ use_alter=self.use_alter,
+ deferrable=self.deferrable,
+ initially=self.initially,
+ link_to_name=self.link_to_name,
+ match=self.match
+ )
+ for self_fk, other_fk in zip(
+ self._elements.values(),
+ fkc._elements.values()):
+ self_fk._schema_item_copy(other_fk)
+ return self._schema_item_copy(fkc)
+
+
+class PrimaryKeyConstraint(ColumnCollectionConstraint):
+ """A table-level PRIMARY KEY constraint.
+
+ The :class:`.PrimaryKeyConstraint` object is present automatically
+ on any :class:`.Table` object; it is assigned a set of
+ :class:`.Column` objects corresponding to those marked with
+ the :paramref:`.Column.primary_key` flag::
+
+ >>> my_table = Table('mytable', metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('version_id', Integer, primary_key=True),
+ ... Column('data', String(50))
+ ... )
+ >>> my_table.primary_key
+ PrimaryKeyConstraint(
+ Column('id', Integer(), table=<mytable>, primary_key=True, nullable=False),
+ Column('version_id', Integer(), table=<mytable>, primary_key=True, nullable=False)
+ )
+
+ The primary key of a :class:`.Table` can also be specified by using
+ a :class:`.PrimaryKeyConstraint` object explicitly; in this mode of usage,
+ the "name" of the constraint can also be specified, as well as other
+ options which may be recognized by dialects::
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer),
+ Column('version_id', Integer),
+ Column('data', String(50)),
+ PrimaryKeyConstraint('id', 'version_id', name='mytable_pk')
+ )
+
+ The two styles of column-specification should generally not be mixed.
+ An warning is emitted if the columns present in the
+ :class:`.PrimaryKeyConstraint`
+ don't match the columns that were marked as ``primary_key=True``, if both
+ are present; in this case, the columns are taken strictly from the
+ :class:`.PrimaryKeyConstraint` declaration, and those columns otherwise marked
+ as ``primary_key=True`` are ignored. This behavior is intended to be
+ backwards compatible with previous behavior.
+
+ .. versionchanged:: 0.9.2 Using a mixture of columns within a
+ :class:`.PrimaryKeyConstraint` in addition to columns marked as
+ ``primary_key=True`` now emits a warning if the lists don't match.
+ The ultimate behavior of ignoring those columns marked with the flag
+ only is currently maintained for backwards compatibility; this warning
+ may raise an exception in a future release.
+
+ For the use case where specific options are to be specified on the
+ :class:`.PrimaryKeyConstraint`, but the usual style of using ``primary_key=True``
+ flags is still desirable, an empty :class:`.PrimaryKeyConstraint` may be
+ specified, which will take on the primary key column collection from
+ the :class:`.Table` based on the flags::
+
+ my_table = Table('mytable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('version_id', Integer, primary_key=True),
+ Column('data', String(50)),
+ PrimaryKeyConstraint(name='mytable_pk', mssql_clustered=True)
+ )
+
+ .. versionadded:: 0.9.2 an empty :class:`.PrimaryKeyConstraint` may now
+ be specified for the purposes of establishing keyword arguments with the
+ constraint, independently of the specification of "primary key" columns
+ within the :class:`.Table` itself; columns marked as ``primary_key=True``
+ will be gathered into the empty constraint's column collection.
+
+ """
+
+ __visit_name__ = 'primary_key_constraint'
+
+ def _set_parent(self, table):
+ super(PrimaryKeyConstraint, self)._set_parent(table)
+
+ if table.primary_key is not self:
+ table.constraints.discard(table.primary_key)
+ table.primary_key = self
+ table.constraints.add(self)
+
+ table_pks = [c for c in table.c if c.primary_key]
+ if self.columns and table_pks and \
+ set(table_pks) != set(self.columns.values()):
+ util.warn(
+ "Table '%s' specifies columns %s as primary_key=True, "
+ "not matching locally specified columns %s; setting the "
+ "current primary key columns to %s. This warning "
+ "may become an exception in a future release" %
+ (
+ table.name,
+ ", ".join("'%s'" % c.name for c in table_pks),
+ ", ".join("'%s'" % c.name for c in self.columns),
+ ", ".join("'%s'" % c.name for c in self.columns)
+ )
+ )
+ table_pks[:] = []
+
+ for c in self.columns:
+ c.primary_key = True
+ self.columns.extend(table_pks)
+
+ def _reload(self, columns):
+ """repopulate this :class:`.PrimaryKeyConstraint` given
+ a set of columns.
+
+ Existing columns in the table that are marked as primary_key=True
+ are maintained.
+
+ Also fires a new event.
+
+ This is basically like putting a whole new
+ :class:`.PrimaryKeyConstraint` object on the parent
+ :class:`.Table` object without actually replacing the object.
+
+ The ordering of the given list of columns is also maintained; these
+ columns will be appended to the list of columns after any which
+ are already present.
+
+ """
+
+ # set the primary key flag on new columns.
+ # note any existing PK cols on the table also have their
+ # flag still set.
+ for col in columns:
+ col.primary_key = True
+
+ self.columns.extend(columns)
+
+ self._set_parent_with_dispatch(self.table)
+
+ def _replace(self, col):
+ self.columns.replace(col)
+
+
+class UniqueConstraint(ColumnCollectionConstraint):
+ """A table-level UNIQUE constraint.
+
+ Defines a single column or composite UNIQUE constraint. For a no-frills,
+ single column constraint, adding ``unique=True`` to the ``Column``
+ definition is a shorthand equivalent for an unnamed, single column
+ UniqueConstraint.
+ """
+
+ __visit_name__ = 'unique_constraint'
+
+
+class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem):
+ """A table-level INDEX.
+
+ Defines a composite (one or more column) INDEX.
+
+ E.g.::
+
+ sometable = Table("sometable", metadata,
+ Column("name", String(50)),
+ Column("address", String(100))
+ )
+
+ Index("some_index", sometable.c.name)
+
+ For a no-frills, single column index, adding
+ :class:`.Column` also supports ``index=True``::
+
+ sometable = Table("sometable", metadata,
+ Column("name", String(50), index=True)
+ )
+
+ For a composite index, multiple columns can be specified::
+
+ Index("some_index", sometable.c.name, sometable.c.address)
+
+ Functional indexes are supported as well, keeping in mind that at least
+ one :class:`.Column` must be present::
+
+ Index("some_index", func.lower(sometable.c.name))
+
+ .. versionadded:: 0.8 support for functional and expression-based indexes.
+
+ .. seealso::
+
+ :ref:`schema_indexes` - General information on :class:`.Index`.
+
+ :ref:`postgresql_indexes` - PostgreSQL-specific options available for the
+ :class:`.Index` construct.
+
+ :ref:`mysql_indexes` - MySQL-specific options available for the
+ :class:`.Index` construct.
+
+ :ref:`mssql_indexes` - MSSQL-specific options available for the
+ :class:`.Index` construct.
+
+ """
+
+ __visit_name__ = 'index'
+
+ def __init__(self, name, *expressions, **kw):
+ """Construct an index object.
+
+ :param name:
+ The name of the index
+
+ :param \*expressions:
+ Column expressions to include in the index. The expressions
+ are normally instances of :class:`.Column`, but may also
+ be arbitrary SQL expressions which ultmately refer to a
+ :class:`.Column`.
+
+ :param unique=False:
+ Keyword only argument; if True, create a unique index.
+
+ :param quote=None:
+ Keyword only argument; whether to apply quoting to the name of
+ the index. Works in the same manner as that of
+ :paramref:`.Column.quote`.
+
+ :param \**kw: Additional keyword arguments not mentioned above are
+ dialect specific, and passed in the form ``<dialectname>_<argname>``.
+ See the documentation regarding an individual dialect at
+ :ref:`dialect_toplevel` for detail on documented arguments.
+
+ """
+ self.table = None
+
+ columns = []
+ for expr in expressions:
+ if not isinstance(expr, ClauseElement):
+ columns.append(expr)
+ else:
+ cols = []
+ visitors.traverse(expr, {}, {'column': cols.append})
+ if cols:
+ columns.append(cols[0])
+ else:
+ columns.append(expr)
+
+ self.expressions = expressions
+ self.name = quoted_name(name, kw.pop("quote", None))
+ self.unique = kw.pop('unique', False)
+ self._validate_dialect_kwargs(kw)
+
+ # will call _set_parent() if table-bound column
+ # objects are present
+ ColumnCollectionMixin.__init__(self, *columns)
+
+
+
+ def _set_parent(self, table):
+ ColumnCollectionMixin._set_parent(self, table)
+
+ if self.table is not None and table is not self.table:
+ raise exc.ArgumentError(
+ "Index '%s' is against table '%s', and "
+ "cannot be associated with table '%s'." % (
+ self.name,
+ self.table.description,
+ table.description
+ )
+ )
+ self.table = table
+ for c in self.columns:
+ if c.table != self.table:
+ raise exc.ArgumentError(
+ "Column '%s' is not part of table '%s'." %
+ (c, self.table.description)
+ )
+ table.indexes.add(self)
+
+ self.expressions = [
+ expr if isinstance(expr, ClauseElement)
+ else colexpr
+ for expr, colexpr in zip(self.expressions, self.columns)
+ ]
+
+ @property
+ def bind(self):
+ """Return the connectable associated with this Index."""
+
+ return self.table.bind
+
+ def create(self, bind=None):
+ """Issue a ``CREATE`` statement for this
+ :class:`.Index`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.create_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator, self)
+ return self
+
+ def drop(self, bind=None):
+ """Issue a ``DROP`` statement for this
+ :class:`.Index`, using the given :class:`.Connectable`
+ for connectivity.
+
+ .. seealso::
+
+ :meth:`.MetaData.drop_all`.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper, self)
+
+ def __repr__(self):
+ return 'Index(%s)' % (
+ ", ".join(
+ [repr(self.name)] +
+ [repr(c) for c in self.columns] +
+ (self.unique and ["unique=True"] or [])
+ ))
+
+
+class MetaData(SchemaItem):
+ """A collection of :class:`.Table` objects and their associated schema
+ constructs.
+
+ Holds a collection of :class:`.Table` objects as well as
+ an optional binding to an :class:`.Engine` or
+ :class:`.Connection`. If bound, the :class:`.Table` objects
+ in the collection and their columns may participate in implicit SQL
+ execution.
+
+ The :class:`.Table` objects themselves are stored in the
+ :attr:`.MetaData.tables` dictionary.
+
+ :class:`.MetaData` is a thread-safe object for read operations. Construction
+ of new tables within a single :class:`.MetaData` object, either explicitly
+ or via reflection, may not be completely thread-safe.
+
+ .. seealso::
+
+ :ref:`metadata_describing` - Introduction to database metadata
+
+ """
+
+ __visit_name__ = 'metadata'
+
+ def __init__(self, bind=None, reflect=False, schema=None,
+ quote_schema=None):
+ """Create a new MetaData object.
+
+ :param bind:
+ An Engine or Connection to bind to. May also be a string or URL
+ instance, these are passed to create_engine() and this MetaData will
+ be bound to the resulting engine.
+
+ :param reflect:
+ Optional, automatically load all tables from the bound database.
+ Defaults to False. ``bind`` is required when this option is set.
+
+ .. deprecated:: 0.8
+ Please use the :meth:`.MetaData.reflect` method.
+
+ :param schema:
+ The default schema to use for the :class:`.Table`,
+ :class:`.Sequence`, and other objects associated with this
+ :class:`.MetaData`. Defaults to ``None``.
+
+ :param quote_schema:
+ Sets the ``quote_schema`` flag for those :class:`.Table`,
+ :class:`.Sequence`, and other objects which make usage of the
+ local ``schema`` name.
+
+ .. versionadded:: 0.7.4
+ ``schema`` and ``quote_schema`` parameters.
+
+ """
+ self.tables = util.immutabledict()
+ self.schema = quoted_name(schema, quote_schema)
+ self._schemas = set()
+ self._sequences = {}
+ self._fk_memos = collections.defaultdict(list)
+
+ self.bind = bind
+ if reflect:
+ util.warn("reflect=True is deprecate; please "
+ "use the reflect() method.")
+ if not bind:
+ raise exc.ArgumentError(
+ "A bind must be supplied in conjunction "
+ "with reflect=True")
+ self.reflect()
+
+ tables = None
+ """A dictionary of :class:`.Table` objects keyed to their name or "table key".
+
+ The exact key is that determined by the :attr:`.Table.key` attribute;
+ for a table with no :attr:`.Table.schema` attribute, this is the same
+ as :attr:`.Table.name`. For a table with a schema, it is typically of the
+ form ``schemaname.tablename``.
+
+ .. seealso::
+
+ :attr:`.MetaData.sorted_tables`
+
+ """
+
+ def __repr__(self):
+ return 'MetaData(bind=%r)' % self.bind
+
+ def __contains__(self, table_or_key):
+ if not isinstance(table_or_key, util.string_types):
+ table_or_key = table_or_key.key
+ return table_or_key in self.tables
+
+ def _add_table(self, name, schema, table):
+ key = _get_table_key(name, schema)
+ dict.__setitem__(self.tables, key, table)
+ if schema:
+ self._schemas.add(schema)
+
+
+
+ def _remove_table(self, name, schema):
+ key = _get_table_key(name, schema)
+ removed = dict.pop(self.tables, key, None)
+ if removed is not None:
+ for fk in removed.foreign_keys:
+ fk._remove_from_metadata(self)
+ if self._schemas:
+ self._schemas = set([t.schema
+ for t in self.tables.values()
+ if t.schema is not None])
+
+
+ def __getstate__(self):
+ return {'tables': self.tables,
+ 'schema': self.schema,
+ 'schemas': self._schemas,
+ 'sequences': self._sequences,
+ 'fk_memos': self._fk_memos}
+
+ def __setstate__(self, state):
+ self.tables = state['tables']
+ self.schema = state['schema']
+ self._bind = None
+ self._sequences = state['sequences']
+ self._schemas = state['schemas']
+ self._fk_memos = state['fk_memos']
+
+ def is_bound(self):
+ """True if this MetaData is bound to an Engine or Connection."""
+
+ return self._bind is not None
+
+ def bind(self):
+ """An :class:`.Engine` or :class:`.Connection` to which this
+ :class:`.MetaData` is bound.
+
+ Typically, a :class:`.Engine` is assigned to this attribute
+ so that "implicit execution" may be used, or alternatively
+ as a means of providing engine binding information to an
+ ORM :class:`.Session` object::
+
+ engine = create_engine("someurl://")
+ metadata.bind = engine
+
+ .. seealso::
+
+ :ref:`dbengine_implicit` - background on "bound metadata"
+
+ """
+ return self._bind
+
+ @util.dependencies("sqlalchemy.engine.url")
+ def _bind_to(self, url, bind):
+ """Bind this MetaData to an Engine, Connection, string or URL."""
+
+ if isinstance(bind, util.string_types + (url.URL, )):
+ self._bind = sqlalchemy.create_engine(bind)
+ else:
+ self._bind = bind
+ bind = property(bind, _bind_to)
+
+ def clear(self):
+ """Clear all Table objects from this MetaData."""
+
+ dict.clear(self.tables)
+ self._schemas.clear()
+ self._fk_memos.clear()
+
+ def remove(self, table):
+ """Remove the given Table object from this MetaData."""
+
+ self._remove_table(table.name, table.schema)
+
+ @property
+ def sorted_tables(self):
+ """Returns a list of :class:`.Table` objects sorted in order of
+ foreign key dependency.
+
+ The sorting will place :class:`.Table` objects that have dependencies
+ first, before the dependencies themselves, representing the
+ order in which they can be created. To get the order in which
+ the tables would be dropped, use the ``reversed()`` Python built-in.
+
+ .. seealso::
+
+ :attr:`.MetaData.tables`
+
+ :meth:`.Inspector.get_table_names`
+
+ """
+ return ddl.sort_tables(self.tables.values())
+
+ def reflect(self, bind=None, schema=None, views=False, only=None,
+ extend_existing=False,
+ autoload_replace=True):
+ """Load all available table definitions from the database.
+
+ Automatically creates ``Table`` entries in this ``MetaData`` for any
+ table available in the database but not yet present in the
+ ``MetaData``. May be called multiple times to pick up tables recently
+ added to the database, however no special action is taken if a table
+ in this ``MetaData`` no longer exists in the database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the database; if None, uses
+ the existing bind on this ``MetaData``, if any.
+
+ :param schema:
+ Optional, query and reflect tables from an alterate schema.
+ If None, the schema associated with this :class:`.MetaData`
+ is used, if any.
+
+ :param views:
+ If True, also reflect views.
+
+ :param only:
+ Optional. Load only a sub-set of available named tables. May be
+ specified as a sequence of names or a callable.
+
+ If a sequence of names is provided, only those tables will be
+ reflected. An error is raised if a table is requested but not
+ available. Named tables already present in this ``MetaData`` are
+ ignored.
+
+ If a callable is provided, it will be used as a boolean predicate to
+ filter the list of potential table names. The callable is called
+ with a table name and this ``MetaData`` instance as positional
+ arguments and should return a true value for any table to reflect.
+
+ :param extend_existing: Passed along to each :class:`.Table` as
+ :paramref:`.Table.extend_existing`.
+
+ .. versionadded:: 0.9.1
+
+ :param autoload_replace: Passed along to each :class:`.Table` as
+ :paramref:`.Table.autoload_replace`.
+
+ .. versionadded:: 0.9.1
+
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+
+ with bind.connect() as conn:
+
+ reflect_opts = {
+ 'autoload': True,
+ 'autoload_with': conn,
+ 'extend_existing': extend_existing,
+ 'autoload_replace': autoload_replace
+ }
+
+ if schema is None:
+ schema = self.schema
+
+ if schema is not None:
+ reflect_opts['schema'] = schema
+
+ available = util.OrderedSet(bind.engine.table_names(schema,
+ connection=conn))
+ if views:
+ available.update(
+ bind.dialect.get_view_names(conn, schema)
+ )
+
+ if schema is not None:
+ available_w_schema = util.OrderedSet(["%s.%s" % (schema, name)
+ for name in available])
+ else:
+ available_w_schema = available
+
+ current = set(self.tables)
+
+ if only is None:
+ load = [name for name, schname in
+ zip(available, available_w_schema)
+ if extend_existing or schname not in current]
+ elif util.callable(only):
+ load = [name for name, schname in
+ zip(available, available_w_schema)
+ if (extend_existing or schname not in current)
+ and only(name, self)]
+ else:
+ missing = [name for name in only if name not in available]
+ if missing:
+ s = schema and (" schema '%s'" % schema) or ''
+ raise exc.InvalidRequestError(
+ 'Could not reflect: requested table(s) not available '
+ 'in %s%s: (%s)' %
+ (bind.engine.url, s, ', '.join(missing)))
+ load = [name for name in only if extend_existing or
+ name not in current]
+
+ for name in load:
+ Table(name, self, **reflect_opts)
+
+ def append_ddl_listener(self, event_name, listener):
+ """Append a DDL event listener to this ``MetaData``.
+
+ .. deprecated:: 0.7
+ See :class:`.DDLEvents`.
+
+ """
+ def adapt_listener(target, connection, **kw):
+ tables = kw['tables']
+ listener(event, target, connection, tables=tables)
+
+ event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
+
+ def create_all(self, bind=None, tables=None, checkfirst=True):
+ """Create all tables stored in this metadata.
+
+ Conditional by default, will not attempt to recreate tables already
+ present in the target database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
+
+ :param tables:
+ Optional list of ``Table`` objects, which is a subset of the total
+ tables in the ``MetaData`` (others are ignored).
+
+ :param checkfirst:
+ Defaults to True, don't issue CREATEs for tables already present
+ in the target database.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaGenerator,
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
+
+ def drop_all(self, bind=None, tables=None, checkfirst=True):
+ """Drop all tables stored in this metadata.
+
+ Conditional by default, will not attempt to drop tables not present in
+ the target database.
+
+ :param bind:
+ A :class:`.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
+
+ :param tables:
+ Optional list of ``Table`` objects, which is a subset of the
+ total tables in the ``MetaData`` (others are ignored).
+
+ :param checkfirst:
+ Defaults to True, only issue DROPs for tables confirmed to be
+ present in the target database.
+
+ """
+ if bind is None:
+ bind = _bind_or_error(self)
+ bind._run_visitor(ddl.SchemaDropper,
+ self,
+ checkfirst=checkfirst,
+ tables=tables)
+
+
+class ThreadLocalMetaData(MetaData):
+ """A MetaData variant that presents a different ``bind`` in every thread.
+
+ Makes the ``bind`` property of the MetaData a thread-local value, allowing
+ this collection of tables to be bound to different ``Engine``
+ implementations or connections in each thread.
+
+ The ThreadLocalMetaData starts off bound to None in each thread. Binds
+ must be made explicitly by assigning to the ``bind`` property or using
+ ``connect()``. You can also re-bind dynamically multiple times per
+ thread, just like a regular ``MetaData``.
+
+ """
+
+ __visit_name__ = 'metadata'
+
+ def __init__(self):
+ """Construct a ThreadLocalMetaData."""
+
+ self.context = util.threading.local()
+ self.__engines = {}
+ super(ThreadLocalMetaData, self).__init__()
+
+ def bind(self):
+ """The bound Engine or Connection for this thread.
+
+ This property may be assigned an Engine or Connection, or assigned a
+ string or URL to automatically create a basic Engine for this bind
+ with ``create_engine()``."""
+
+ return getattr(self.context, '_engine', None)
+
+ @util.dependencies("sqlalchemy.engine.url")
+ def _bind_to(self, url, bind):
+ """Bind to a Connectable in the caller's thread."""
+
+ if isinstance(bind, util.string_types + (url.URL, )):
+ try:
+ self.context._engine = self.__engines[bind]
+ except KeyError:
+ e = sqlalchemy.create_engine(bind)
+ self.__engines[bind] = e
+ self.context._engine = e
+ else:
+ # TODO: this is squirrely. we shouldnt have to hold onto engines
+ # in a case like this
+ if bind not in self.__engines:
+ self.__engines[bind] = bind
+ self.context._engine = bind
+
+ bind = property(bind, _bind_to)
+
+ def is_bound(self):
+ """True if there is a bind for this thread."""
+ return (hasattr(self.context, '_engine') and
+ self.context._engine is not None)
+
+ def dispose(self):
+ """Dispose all bound engines, in all thread contexts."""
+
+ for e in self.__engines.values():
+ if hasattr(e, 'dispose'):
+ e.dispose()
+
+
+
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
new file mode 100644
index 000000000..951268b22
--- /dev/null
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -0,0 +1,3001 @@
+# sql/selectable.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""The :class:`.FromClause` class of SQL expression elements, representing
+SQL tables and derived rowsets.
+
+"""
+
+from .elements import ClauseElement, TextClause, ClauseList, \
+ and_, Grouping, UnaryExpression, literal_column
+from .elements import _clone, \
+ _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
+ _select_iterables, _anonymous_label, _clause_element_as_expr,\
+ _cloned_intersection, _cloned_difference, True_, _only_column_elements
+from .base import Immutable, Executable, _generative, \
+ ColumnCollection, ColumnSet, _from_objects, Generative
+from . import type_api
+from .. import inspection
+from .. import util
+from .. import exc
+from operator import attrgetter
+from . import operators
+import operator
+from .annotation import Annotated
+import itertools
+
+def _interpret_as_from(element):
+ insp = inspection.inspect(element, raiseerr=False)
+ if insp is None:
+ if isinstance(element, util.string_types):
+ return TextClause(util.text_type(element))
+ elif hasattr(insp, "selectable"):
+ return insp.selectable
+ raise exc.ArgumentError("FROM expression expected")
+
+def _interpret_as_select(element):
+ element = _interpret_as_from(element)
+ if isinstance(element, Alias):
+ element = element.original
+ if not isinstance(element, Select):
+ element = element.select()
+ return element
+
+def subquery(alias, *args, **kwargs):
+ """Return an :class:`.Alias` object derived
+ from a :class:`.Select`.
+
+ name
+ alias name
+
+ \*args, \**kwargs
+
+ all other arguments are delivered to the
+ :func:`select` function.
+
+ """
+ return Select(*args, **kwargs).alias(alias)
+
+
+
+def alias(selectable, name=None, flat=False):
+ """Return an :class:`.Alias` object.
+
+ An :class:`.Alias` represents any :class:`.FromClause`
+ with an alternate name assigned within SQL, typically using the ``AS``
+ clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
+
+ Similar functionality is available via the
+ :meth:`~.FromClause.alias` method
+ available on all :class:`.FromClause` subclasses.
+
+ When an :class:`.Alias` is created from a :class:`.Table` object,
+ this has the effect of the table being rendered
+ as ``tablename AS aliasname`` in a SELECT statement.
+
+ For :func:`.select` objects, the effect is that of creating a named
+ subquery, i.e. ``(select ...) AS aliasname``.
+
+ The ``name`` parameter is optional, and provides the name
+ to use in the rendered SQL. If blank, an "anonymous" name
+ will be deterministically generated at compile time.
+ Deterministic means the name is guaranteed to be unique against
+ other constructs used in the same statement, and will also be the
+ same name for each successive compilation of the same statement
+ object.
+
+ :param selectable: any :class:`.FromClause` subclass,
+ such as a table, select statement, etc.
+
+ :param name: string name to be assigned as the alias.
+ If ``None``, a name will be deterministically generated
+ at compile time.
+
+ :param flat: Will be passed through to if the given selectable
+ is an instance of :class:`.Join` - see :meth:`.Join.alias`
+ for details.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return selectable.alias(name=name, flat=flat)
+
+
+class Selectable(ClauseElement):
+ """mark a class as being selectable"""
+ __visit_name__ = 'selectable'
+
+ is_selectable = True
+
+ @property
+ def selectable(self):
+ return self
+
+
+class FromClause(Selectable):
+ """Represent an element that can be used within the ``FROM``
+ clause of a ``SELECT`` statement.
+
+ The most common forms of :class:`.FromClause` are the
+ :class:`.Table` and the :func:`.select` constructs. Key
+ features common to all :class:`.FromClause` objects include:
+
+ * a :attr:`.c` collection, which provides per-name access to a collection
+ of :class:`.ColumnElement` objects.
+ * a :attr:`.primary_key` attribute, which is a collection of all those
+ :class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
+ * Methods to generate various derivations of a "from" clause, including
+ :meth:`.FromClause.alias`, :meth:`.FromClause.join`,
+ :meth:`.FromClause.select`.
+
+
+ """
+ __visit_name__ = 'fromclause'
+ named_with_column = False
+ _hide_froms = []
+
+ schema = None
+ """Define the 'schema' attribute for this :class:`.FromClause`.
+
+ This is typically ``None`` for most objects except that of :class:`.Table`,
+ where it is taken as the value of the :paramref:`.Table.schema` argument.
+
+ """
+
+ _memoized_property = util.group_expirable_memoized_property(["_columns"])
+
+ @util.dependencies("sqlalchemy.sql.functions")
+ def count(self, functions, whereclause=None, **params):
+ """return a SELECT COUNT generated against this
+ :class:`.FromClause`."""
+
+ if self.primary_key:
+ col = list(self.primary_key)[0]
+ else:
+ col = list(self.columns)[0]
+ return Select(
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
+
+ def select(self, whereclause=None, **params):
+ """return a SELECT of this :class:`.FromClause`.
+
+ .. seealso::
+
+ :func:`~.sql.expression.select` - general purpose
+ method which allows for arbitrary column lists.
+
+ """
+
+ return Select([self], whereclause, **params)
+
+ def join(self, right, onclause=None, isouter=False):
+ """return a join of this :class:`.FromClause` against another
+ :class:`.FromClause`."""
+
+ return Join(self, right, onclause, isouter)
+
+ def outerjoin(self, right, onclause=None):
+ """return an outer join of this :class:`.FromClause` against another
+ :class:`.FromClause`."""
+
+ return Join(self, right, onclause, True)
+
+ def alias(self, name=None, flat=False):
+ """return an alias of this :class:`.FromClause`.
+
+ This is shorthand for calling::
+
+ from sqlalchemy import alias
+ a = alias(self, name=name)
+
+ See :func:`~.expression.alias` for details.
+
+ """
+
+ return Alias(self, name)
+
+ def is_derived_from(self, fromclause):
+ """Return True if this FromClause is 'derived' from the given
+ FromClause.
+
+ An example would be an Alias of a Table is derived from that Table.
+
+ """
+ # this is essentially an "identity" check in the base class.
+ # Other constructs override this to traverse through
+ # contained elements.
+ return fromclause in self._cloned_set
+
+ def _is_lexical_equivalent(self, other):
+ """Return True if this FromClause and the other represent
+ the same lexical identity.
+
+ This tests if either one is a copy of the other, or
+ if they are the same via annotation identity.
+
+ """
+ return self._cloned_set.intersection(other._cloned_set)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def replace_selectable(self, sqlutil, old, alias):
+ """replace all occurrences of FromClause 'old' with the given Alias
+ object, returning a copy of this :class:`.FromClause`.
+
+ """
+
+ return sqlutil.ClauseAdapter(alias).traverse(self)
+
+ def correspond_on_equivalents(self, column, equivalents):
+ """Return corresponding_column for the given column, or if None
+ search for a match in the given dictionary.
+
+ """
+ col = self.corresponding_column(column, require_embedded=True)
+ if col is None and col in equivalents:
+ for equiv in equivalents[col]:
+ nc = self.corresponding_column(equiv, require_embedded=True)
+ if nc:
+ return nc
+ return col
+
+ def corresponding_column(self, column, require_embedded=False):
+ """Given a :class:`.ColumnElement`, return the exported
+ :class:`.ColumnElement` object from this :class:`.Selectable`
+ which corresponds to that original
+ :class:`~sqlalchemy.schema.Column` via a common ancestor
+ column.
+
+ :param column: the target :class:`.ColumnElement` to be matched
+
+ :param require_embedded: only return corresponding columns for
+ the given :class:`.ColumnElement`, if the given :class:`.ColumnElement`
+ is actually present within a sub-element
+ of this :class:`.FromClause`. Normally the column will match if
+ it merely shares a common ancestor with one of the exported
+ columns of this :class:`.FromClause`.
+
+ """
+
+ def embedded(expanded_proxy_set, target_set):
+ for t in target_set.difference(expanded_proxy_set):
+ if not set(_expand_cloned([t])
+ ).intersection(expanded_proxy_set):
+ return False
+ return True
+
+ # don't dig around if the column is locally present
+ if self.c.contains_column(column):
+ return column
+ col, intersect = None, None
+ target_set = column.proxy_set
+ cols = self.c
+ for c in cols:
+ expanded_proxy_set = set(_expand_cloned(c.proxy_set))
+ i = target_set.intersection(expanded_proxy_set)
+ if i and (not require_embedded
+ or embedded(expanded_proxy_set, target_set)):
+ if col is None:
+
+ # no corresponding column yet, pick this one.
+
+ col, intersect = c, i
+ elif len(i) > len(intersect):
+
+ # 'c' has a larger field of correspondence than
+ # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
+ # matches a1.c.x->table.c.x better than
+ # selectable.c.x->table.c.x does.
+
+ col, intersect = c, i
+ elif i == intersect:
+
+ # they have the same field of correspondence. see
+ # which proxy_set has fewer columns in it, which
+ # indicates a closer relationship with the root
+ # column. Also take into account the "weight"
+ # attribute which CompoundSelect() uses to give
+ # higher precedence to columns based on vertical
+ # position in the compound statement, and discard
+ # columns that have no reference to the target
+ # column (also occurs with CompoundSelect)
+
+ col_distance = util.reduce(operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ col.proxy_set if sc.shares_lineage(column)])
+ c_distance = util.reduce(operator.add,
+ [sc._annotations.get('weight', 1) for sc in
+ c.proxy_set if sc.shares_lineage(column)])
+ if c_distance < col_distance:
+ col, intersect = c, i
+ return col
+
+ @property
+ def description(self):
+ """a brief description of this FromClause.
+
+ Used primarily for error message formatting.
+
+ """
+ return getattr(self, 'name', self.__class__.__name__ + " object")
+
+ def _reset_exported(self):
+ """delete memoized collections when a FromClause is cloned."""
+
+ self._memoized_property.expire_instance(self)
+
+ @_memoized_property
+ def columns(self):
+ """A named-based collection of :class:`.ColumnElement` objects
+ maintained by this :class:`.FromClause`.
+
+ The :attr:`.columns`, or :attr:`.c` collection, is the gateway
+ to the construction of SQL expressions using table-bound or
+ other selectable-bound columns::
+
+ select([mytable]).where(mytable.c.somecolumn == 5)
+
+ """
+
+ if '_columns' not in self.__dict__:
+ self._init_collections()
+ self._populate_column_collection()
+ return self._columns.as_immutable()
+
+ @_memoized_property
+ def primary_key(self):
+ """Return the collection of Column objects which comprise the
+ primary key of this FromClause."""
+
+ self._init_collections()
+ self._populate_column_collection()
+ return self.primary_key
+
+ @_memoized_property
+ def foreign_keys(self):
+ """Return the collection of ForeignKey objects which this
+ FromClause references."""
+
+ self._init_collections()
+ self._populate_column_collection()
+ return self.foreign_keys
+
+ c = property(attrgetter('columns'),
+ doc="An alias for the :attr:`.columns` attribute.")
+ _select_iterable = property(attrgetter('columns'))
+
+ def _init_collections(self):
+ assert '_columns' not in self.__dict__
+ assert 'primary_key' not in self.__dict__
+ assert 'foreign_keys' not in self.__dict__
+
+ self._columns = ColumnCollection()
+ self.primary_key = ColumnSet()
+ self.foreign_keys = set()
+
+ @property
+ def _cols_populated(self):
+ return '_columns' in self.__dict__
+
+ def _populate_column_collection(self):
+ """Called on subclasses to establish the .c collection.
+
+ Each implementation has a different way of establishing
+ this collection.
+
+ """
+
+ def _refresh_for_new_column(self, column):
+ """Given a column added to the .c collection of an underlying
+ selectable, produce the local version of that column, assuming this
+ selectable ultimately should proxy this column.
+
+ this is used to "ping" a derived selectable to add a new column
+ to its .c. collection when a Column has been added to one of the
+ Table objects it ultimtely derives from.
+
+ If the given selectable hasn't populated it's .c. collection yet,
+ it should at least pass on the message to the contained selectables,
+ but it will return None.
+
+ This method is currently used by Declarative to allow Table
+ columns to be added to a partially constructed inheritance
+ mapping that may have already produced joins. The method
+ isn't public right now, as the full span of implications
+ and/or caveats aren't yet clear.
+
+ It's also possible that this functionality could be invoked by
+ default via an event, which would require that
+ selectables maintain a weak referencing collection of all
+ derivations.
+
+ """
+ if not self._cols_populated:
+ return None
+ elif column.key in self.columns and self.columns[column.key] is column:
+ return column
+ else:
+ return None
+
+
+class Join(FromClause):
+ """represent a ``JOIN`` construct between two :class:`.FromClause`
+ elements.
+
+ The public constructor function for :class:`.Join` is the module-level
+ :func:`join()` function, as well as the :func:`join()` method available
+ off all :class:`.FromClause` subclasses.
+
+ """
+ __visit_name__ = 'join'
+
+ def __init__(self, left, right, onclause=None, isouter=False):
+ """Construct a new :class:`.Join`.
+
+ The usual entrypoint here is the :func:`~.expression.join`
+ function or the :meth:`.FromClause.join` method of any
+ :class:`.FromClause` object.
+
+ """
+ self.left = _interpret_as_from(left)
+ self.right = _interpret_as_from(right).self_group()
+
+ if onclause is None:
+ self.onclause = self._match_primaries(self.left, self.right)
+ else:
+ self.onclause = onclause
+
+ self.isouter = isouter
+
+ @classmethod
+ def _create_outerjoin(cls, left, right, onclause=None):
+ """Return an ``OUTER JOIN`` clause element.
+
+ The returned object is an instance of :class:`.Join`.
+
+ Similar functionality is also available via the
+ :meth:`~.FromClause.outerjoin()` method on any
+ :class:`.FromClause`.
+
+ :param left: The left side of the join.
+
+ :param right: The right side of the join.
+
+ :param onclause: Optional criterion for the ``ON`` clause, is
+ derived from foreign key relationships established between
+ left and right otherwise.
+
+ To chain joins together, use the :meth:`.FromClause.join` or
+ :meth:`.FromClause.outerjoin` methods on the resulting
+ :class:`.Join` object.
+
+ """
+ return cls(left, right, onclause, isouter=True)
+
+
+ @classmethod
+ def _create_join(cls, left, right, onclause=None, isouter=False):
+ """Return a ``JOIN`` clause element (regular inner join).
+
+ The returned object is an instance of :class:`.Join`.
+
+ Similar functionality is also available via the
+ :meth:`~.FromClause.join()` method on any
+ :class:`.FromClause`.
+
+ :param left: The left side of the join.
+
+ :param right: The right side of the join.
+
+ :param onclause: Optional criterion for the ``ON`` clause, is
+ derived from foreign key relationships established between
+ left and right otherwise.
+
+ :param isouter: if True, produce an outer join; synonymous
+ with :func:`.outerjoin`.
+
+ To chain joins together, use the :meth:`.FromClause.join` or
+ :meth:`.FromClause.outerjoin` methods on the resulting
+ :class:`.Join` object.
+
+
+ """
+ return cls(left, right, onclause, isouter)
+
+
+ @property
+ def description(self):
+ return "Join object on %s(%d) and %s(%d)" % (
+ self.left.description,
+ id(self.left),
+ self.right.description,
+ id(self.right))
+
+ def is_derived_from(self, fromclause):
+ return fromclause is self or \
+ self.left.is_derived_from(fromclause) or \
+ self.right.is_derived_from(fromclause)
+
+ def self_group(self, against=None):
+ return FromGrouping(self)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def _populate_column_collection(self, sqlutil):
+ columns = [c for c in self.left.columns] + \
+ [c for c in self.right.columns]
+
+ self.primary_key.extend(sqlutil.reduce_columns(
+ (c for c in columns if c.primary_key), self.onclause))
+ self._columns.update((col._label, col) for col in columns)
+ self.foreign_keys.update(itertools.chain(
+ *[col.foreign_keys for col in columns]))
+
+ def _refresh_for_new_column(self, column):
+ col = self.left._refresh_for_new_column(column)
+ if col is None:
+ col = self.right._refresh_for_new_column(column)
+ if col is not None:
+ if self._cols_populated:
+ self._columns[col._label] = col
+ self.foreign_keys.add(col)
+ if col.primary_key:
+ self.primary_key.add(col)
+ return col
+ return None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.left = clone(self.left, **kw)
+ self.right = clone(self.right, **kw)
+ self.onclause = clone(self.onclause, **kw)
+
+ def get_children(self, **kwargs):
+ return self.left, self.right, self.onclause
+
+ def _match_primaries(self, left, right):
+ if isinstance(left, Join):
+ left_right = left.right
+ else:
+ left_right = None
+ return self._join_condition(left, right, a_subset=left_right)
+
+ @classmethod
+ def _join_condition(cls, a, b, ignore_nonexistent_tables=False,
+ a_subset=None,
+ consider_as_foreign_keys=None):
+ """create a join condition between two tables or selectables.
+
+ e.g.::
+
+ join_condition(tablea, tableb)
+
+ would produce an expression along the lines of::
+
+ tablea.c.id==tableb.c.tablea_id
+
+ The join is determined based on the foreign key relationships
+ between the two selectables. If there are multiple ways
+ to join, or no way to join, an error is raised.
+
+ :param ignore_nonexistent_tables: Deprecated - this
+ flag is no longer used. Only resolution errors regarding
+ the two given tables are propagated.
+
+ :param a_subset: An optional expression that is a sub-component
+ of ``a``. An attempt will be made to join to just this sub-component
+ first before looking at the full ``a`` construct, and if found
+ will be successful even if there are other ways to join to ``a``.
+ This allows the "right side" of a join to be passed thereby
+ providing a "natural join".
+
+ """
+ crit = []
+ constraints = set()
+
+ for left in (a_subset, a):
+ if left is None:
+ continue
+ for fk in sorted(
+ b.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
+ if consider_as_foreign_keys is not None and \
+ fk.parent not in consider_as_foreign_keys:
+ continue
+ try:
+ col = fk.get_referent(left)
+ except exc.NoReferenceError as nrte:
+ if nrte.table_name == left.name:
+ raise
+ else:
+ continue
+
+ if col is not None:
+ crit.append(col == fk.parent)
+ constraints.add(fk.constraint)
+ if left is not b:
+ for fk in sorted(
+ left.foreign_keys,
+ key=lambda fk: fk.parent._creation_order):
+ if consider_as_foreign_keys is not None and \
+ fk.parent not in consider_as_foreign_keys:
+ continue
+ try:
+ col = fk.get_referent(b)
+ except exc.NoReferenceError as nrte:
+ if nrte.table_name == b.name:
+ raise
+ else:
+ # this is totally covered. can't get
+ # coverage to mark it.
+ continue
+
+ if col is not None:
+ crit.append(col == fk.parent)
+ constraints.add(fk.constraint)
+ if crit:
+ break
+
+ if len(crit) == 0:
+ if isinstance(b, FromGrouping):
+ hint = " Perhaps you meant to convert the right side to a "\
+ "subquery using alias()?"
+ else:
+ hint = ""
+ raise exc.NoForeignKeysError(
+ "Can't find any foreign key relationships "
+ "between '%s' and '%s'.%s" % (a.description, b.description, hint))
+ elif len(constraints) > 1:
+ raise exc.AmbiguousForeignKeysError(
+ "Can't determine join between '%s' and '%s'; "
+ "tables have more than one foreign key "
+ "constraint relationship between them. "
+ "Please specify the 'onclause' of this "
+ "join explicitly." % (a.description, b.description))
+ elif len(crit) == 1:
+ return (crit[0])
+ else:
+ return and_(*crit)
+
+
+ def select(self, whereclause=None, **kwargs):
+ """Create a :class:`.Select` from this :class:`.Join`.
+
+ The equivalent long-hand form, given a :class:`.Join` object
+ ``j``, is::
+
+ from sqlalchemy import select
+ j = select([j.left, j.right], **kw).\\
+ where(whereclause).\\
+ select_from(j)
+
+ :param whereclause: the WHERE criterion that will be sent to
+ the :func:`select()` function
+
+ :param \**kwargs: all other kwargs are sent to the
+ underlying :func:`select()` function.
+
+ """
+ collist = [self.left, self.right]
+
+ return Select(collist, whereclause, from_obj=[self], **kwargs)
+
+ @property
+ def bind(self):
+ return self.left.bind or self.right.bind
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def alias(self, sqlutil, name=None, flat=False):
+ """return an alias of this :class:`.Join`.
+
+ The default behavior here is to first produce a SELECT
+ construct from this :class:`.Join`, then to produce a
+ :class:`.Alias` from that. So given a join of the form::
+
+ j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
+
+ The JOIN by itself would look like::
+
+ table_a JOIN table_b ON table_a.id = table_b.a_id
+
+ Whereas the alias of the above, ``j.alias()``, would in a
+ SELECT context look like::
+
+ (SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
+ table_b.a_id AS table_b_a_id
+ FROM table_a
+ JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
+
+ The equivalent long-hand form, given a :class:`.Join` object
+ ``j``, is::
+
+ from sqlalchemy import select, alias
+ j = alias(
+ select([j.left, j.right]).\\
+ select_from(j).\\
+ with_labels(True).\\
+ correlate(False),
+ name=name
+ )
+
+ The selectable produced by :meth:`.Join.alias` features the same
+ columns as that of the two individual selectables presented under
+ a single name - the individual columns are "auto-labeled", meaning
+ the ``.c.`` collection of the resulting :class:`.Alias` represents
+ the names of the individual columns using a ``<tablename>_<columname>``
+ scheme::
+
+ j.c.table_a_id
+ j.c.table_b_a_id
+
+ :meth:`.Join.alias` also features an alternate
+ option for aliasing joins which produces no enclosing SELECT and
+ does not normally apply labels to the column names. The
+ ``flat=True`` option will call :meth:`.FromClause.alias`
+ against the left and right sides individually.
+ Using this option, no new ``SELECT`` is produced;
+ we instead, from a construct as below::
+
+ j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
+ j = j.alias(flat=True)
+
+ we get a result like this::
+
+ table_a AS table_a_1 JOIN table_b AS table_b_1 ON
+ table_a_1.id = table_b_1.a_id
+
+ The ``flat=True`` argument is also propagated to the contained
+ selectables, so that a composite join such as::
+
+ j = table_a.join(
+ table_b.join(table_c,
+ table_b.c.id == table_c.c.b_id),
+ table_b.c.a_id == table_a.c.id
+ ).alias(flat=True)
+
+ Will produce an expression like::
+
+ table_a AS table_a_1 JOIN (
+ table_b AS table_b_1 JOIN table_c AS table_c_1
+ ON table_b_1.id = table_c_1.b_id
+ ) ON table_a_1.id = table_b_1.a_id
+
+ The standalone :func:`~.expression.alias` function as well as the
+ base :meth:`.FromClause.alias` method also support the ``flat=True``
+ argument as a no-op, so that the argument can be passed to the
+ ``alias()`` method of any selectable.
+
+ .. versionadded:: 0.9.0 Added the ``flat=True`` option to create
+ "aliases" of joins without enclosing inside of a SELECT
+ subquery.
+
+ :param name: name given to the alias.
+
+ :param flat: if True, produce an alias of the left and right
+ sides of this :class:`.Join` and return the join of those
+ two selectables. This produces join expression that does not
+ include an enclosing SELECT.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :func:`~.expression.alias`
+
+ """
+ if flat:
+ assert name is None, "Can't send name argument with flat"
+ left_a, right_a = self.left.alias(flat=True), \
+ self.right.alias(flat=True)
+ adapter = sqlutil.ClauseAdapter(left_a).\
+ chain(sqlutil.ClauseAdapter(right_a))
+
+ return left_a.join(right_a,
+ adapter.traverse(self.onclause), isouter=self.isouter)
+ else:
+ return self.select(use_labels=True, correlate=False).alias(name)
+
+ @property
+ def _hide_froms(self):
+ return itertools.chain(*[_from_objects(x.left, x.right)
+ for x in self._cloned_set])
+
+ @property
+ def _from_objects(self):
+ return [self] + \
+ self.onclause._from_objects + \
+ self.left._from_objects + \
+ self.right._from_objects
+
+
+class Alias(FromClause):
+ """Represents an table or selectable alias (AS).
+
+ Represents an alias, as typically applied to any table or
+ sub-select within a SQL statement using the ``AS`` keyword (or
+ without the keyword on certain databases such as Oracle).
+
+ This object is constructed from the :func:`~.expression.alias` module level
+ function as well as the :meth:`.FromClause.alias` method available on all
+ :class:`.FromClause` subclasses.
+
+ """
+
+ __visit_name__ = 'alias'
+ named_with_column = True
+
+ def __init__(self, selectable, name=None):
+ baseselectable = selectable
+ while isinstance(baseselectable, Alias):
+ baseselectable = baseselectable.element
+ self.original = baseselectable
+ self.supports_execution = baseselectable.supports_execution
+ if self.supports_execution:
+ self._execution_options = baseselectable._execution_options
+ self.element = selectable
+ if name is None:
+ if self.original.named_with_column:
+ name = getattr(self.original, 'name', None)
+ name = _anonymous_label('%%(%d %s)s' % (id(self), name
+ or 'anon'))
+ self.name = name
+
+ @property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ def as_scalar(self):
+ try:
+ return self.element.as_scalar()
+ except AttributeError:
+ raise AttributeError("Element %s does not support "
+ "'as_scalar()'" % self.element)
+
+ def is_derived_from(self, fromclause):
+ if fromclause in self._cloned_set:
+ return True
+ return self.element.is_derived_from(fromclause)
+
+ def _populate_column_collection(self):
+ for col in self.element.columns:
+ col._make_proxy(self)
+
+ def _refresh_for_new_column(self, column):
+ col = self.element._refresh_for_new_column(column)
+ if col is not None:
+ if not self._cols_populated:
+ return None
+ else:
+ return col._make_proxy(self)
+ else:
+ return None
+
+ def _copy_internals(self, clone=_clone, **kw):
+ # don't apply anything to an aliased Table
+ # for now. May want to drive this from
+ # the given **kw.
+ if isinstance(self.element, TableClause):
+ return
+ self._reset_exported()
+ self.element = clone(self.element, **kw)
+ baseselectable = self.element
+ while isinstance(baseselectable, Alias):
+ baseselectable = baseselectable.element
+ self.original = baseselectable
+
+ def get_children(self, column_collections=True, **kw):
+ if column_collections:
+ for c in self.c:
+ yield c
+ yield self.element
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+ @property
+ def bind(self):
+ return self.element.bind
+
+
+class CTE(Alias):
+ """Represent a Common Table Expression.
+
+ The :class:`.CTE` object is obtained using the
+ :meth:`.SelectBase.cte` method from any selectable.
+ See that method for complete examples.
+
+ .. versionadded:: 0.7.6
+
+ """
+ __visit_name__ = 'cte'
+
+ def __init__(self, selectable,
+ name=None,
+ recursive=False,
+ _cte_alias=None,
+ _restates=frozenset()):
+ self.recursive = recursive
+ self._cte_alias = _cte_alias
+ self._restates = _restates
+ super(CTE, self).__init__(selectable, name=name)
+
+ def alias(self, name=None, flat=False):
+ return CTE(
+ self.original,
+ name=name,
+ recursive=self.recursive,
+ _cte_alias=self,
+ )
+
+ def union(self, other):
+ return CTE(
+ self.original.union(other),
+ name=self.name,
+ recursive=self.recursive,
+ _restates=self._restates.union([self])
+ )
+
+ def union_all(self, other):
+ return CTE(
+ self.original.union_all(other),
+ name=self.name,
+ recursive=self.recursive,
+ _restates=self._restates.union([self])
+ )
+
+
+
+
+class FromGrouping(FromClause):
+ """Represent a grouping of a FROM clause"""
+ __visit_name__ = 'grouping'
+
+ def __init__(self, element):
+ self.element = element
+
+ def _init_collections(self):
+ pass
+
+ @property
+ def columns(self):
+ return self.element.columns
+
+ @property
+ def primary_key(self):
+ return self.element.primary_key
+
+ @property
+ def foreign_keys(self):
+ return self.element.foreign_keys
+
+ def is_derived_from(self, element):
+ return self.element.is_derived_from(element)
+
+ def alias(self, **kw):
+ return FromGrouping(self.element.alias(**kw))
+
+ @property
+ def _hide_froms(self):
+ return self.element._hide_froms
+
+ def get_children(self, **kwargs):
+ return self.element,
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self.element = clone(self.element, **kw)
+
+ @property
+ def _from_objects(self):
+ return self.element._from_objects
+
+ def __getattr__(self, attr):
+ return getattr(self.element, attr)
+
+ def __getstate__(self):
+ return {'element': self.element}
+
+ def __setstate__(self, state):
+ self.element = state['element']
+
+class TableClause(Immutable, FromClause):
+ """Represents a minimal "table" construct.
+
+ This is a lightweight table object that has only a name and a
+ collection of columns, which are typically produced
+ by the :func:`.expression.column` function::
+
+ from sqlalchemy.sql import table, column
+
+ user = table("user",
+ column("id"),
+ column("name"),
+ column("description"),
+ )
+
+ The :class:`.TableClause` construct serves as the base for
+ the more commonly used :class:`~.schema.Table` object, providing
+ the usual set of :class:`~.expression.FromClause` services including
+ the ``.c.`` collection and statement generation methods.
+
+ It does **not** provide all the additional schema-level services
+ of :class:`~.schema.Table`, including constraints, references to other
+ tables, or support for :class:`.MetaData`-level services. It's useful
+ on its own as an ad-hoc construct used to generate quick SQL
+ statements when a more fully fledged :class:`~.schema.Table`
+ is not on hand.
+
+ """
+
+ __visit_name__ = 'table'
+
+ named_with_column = True
+
+ implicit_returning = False
+ """:class:`.TableClause` doesn't support having a primary key or column
+ -level defaults, so implicit returning doesn't apply."""
+
+ _autoincrement_column = None
+ """No PK or default support so no autoincrement column."""
+
+ def __init__(self, name, *columns):
+ """Produce a new :class:`.TableClause`.
+
+ The object returned is an instance of :class:`.TableClause`, which
+ represents the "syntactical" portion of the schema-level
+ :class:`~.schema.Table` object.
+ It may be used to construct lightweight table constructs.
+
+ Note that the :func:`.expression.table` function is not part of
+ the ``sqlalchemy`` namespace. It must be imported from the
+ ``sql`` package::
+
+ from sqlalchemy.sql import table, column
+
+ :param name: Name of the table.
+
+ :param columns: A collection of :func:`.expression.column` constructs.
+
+ """
+
+ super(TableClause, self).__init__()
+ self.name = self.fullname = name
+ self._columns = ColumnCollection()
+ self.primary_key = ColumnSet()
+ self.foreign_keys = set()
+ for c in columns:
+ self.append_column(c)
+
+ def _init_collections(self):
+ pass
+
+ @util.memoized_property
+ def description(self):
+ if util.py3k:
+ return self.name
+ else:
+ return self.name.encode('ascii', 'backslashreplace')
+
+ def append_column(self, c):
+ self._columns[c.key] = c
+ c.table = self
+
+ def get_children(self, column_collections=True, **kwargs):
+ if column_collections:
+ return [c for c in self.c]
+ else:
+ return []
+
+ @util.dependencies("sqlalchemy.sql.functions")
+ def count(self, functions, whereclause=None, **params):
+ """return a SELECT COUNT generated against this
+ :class:`.TableClause`."""
+
+ if self.primary_key:
+ col = list(self.primary_key)[0]
+ else:
+ col = list(self.columns)[0]
+ return Select(
+ [functions.func.count(col).label('tbl_row_count')],
+ whereclause,
+ from_obj=[self],
+ **params)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def insert(self, dml, values=None, inline=False, **kwargs):
+ """Generate an :func:`.insert` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.insert().values(name='foo')
+
+ See :func:`.insert` for argument and usage information.
+
+ """
+
+ return dml.Insert(self, values=values, inline=inline, **kwargs)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def update(self, dml, whereclause=None, values=None, inline=False, **kwargs):
+ """Generate an :func:`.update` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.update().where(table.c.id==7).values(name='foo')
+
+ See :func:`.update` for argument and usage information.
+
+ """
+
+ return dml.Update(self, whereclause=whereclause,
+ values=values, inline=inline, **kwargs)
+
+ @util.dependencies("sqlalchemy.sql.dml")
+ def delete(self, dml, whereclause=None, **kwargs):
+ """Generate a :func:`.delete` construct against this
+ :class:`.TableClause`.
+
+ E.g.::
+
+ table.delete().where(table.c.id==7)
+
+ See :func:`.delete` for argument and usage information.
+
+ """
+
+ return dml.Delete(self, whereclause, **kwargs)
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+
+class ForUpdateArg(ClauseElement):
+
+ @classmethod
+ def parse_legacy_select(self, arg):
+ """Parse the for_update arugment of :func:`.select`.
+
+ :param mode: Defines the lockmode to use.
+
+ ``None`` - translates to no lockmode
+
+ ``'update'`` - translates to ``FOR UPDATE``
+ (standard SQL, supported by most dialects)
+
+ ``'nowait'`` - translates to ``FOR UPDATE NOWAIT``
+ (supported by Oracle, PostgreSQL 8.1 upwards)
+
+ ``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
+ and ``FOR SHARE`` (for PostgreSQL)
+
+ ``'read_nowait'`` - translates to ``FOR SHARE NOWAIT``
+ (supported by PostgreSQL). ``FOR SHARE`` and
+ ``FOR SHARE NOWAIT`` (PostgreSQL).
+
+ """
+ if arg in (None, False):
+ return None
+
+ nowait = read = False
+ if arg == 'nowait':
+ nowait = True
+ elif arg == 'read':
+ read = True
+ elif arg == 'read_nowait':
+ read = nowait = True
+ elif arg is not True:
+ raise exc.ArgumentError("Unknown for_update argument: %r" % arg)
+
+ return ForUpdateArg(read=read, nowait=nowait)
+
+ @property
+ def legacy_for_update_value(self):
+ if self.read and not self.nowait:
+ return "read"
+ elif self.read and self.nowait:
+ return "read_nowait"
+ elif self.nowait:
+ return "nowait"
+ else:
+ return True
+
+ def _copy_internals(self, clone=_clone, **kw):
+ if self.of is not None:
+ self.of = [clone(col, **kw) for col in self.of]
+
+ def __init__(self, nowait=False, read=False, of=None):
+ """Represents arguments specified to :meth:`.Select.for_update`.
+
+ .. versionadded:: 0.9.0
+ """
+
+ self.nowait = nowait
+ self.read = read
+ if of is not None:
+ self.of = [_interpret_as_column_or_from(elem)
+ for elem in util.to_list(of)]
+ else:
+ self.of = None
+
+
+class SelectBase(Executable, FromClause):
+ """Base class for SELECT statements.
+
+
+ This includes :class:`.Select`, :class:`.CompoundSelect` and
+ :class:`.TextAsFrom`.
+
+
+ """
+
+ def as_scalar(self):
+ """return a 'scalar' representation of this selectable, which can be
+ used as a column expression.
+
+ Typically, a select statement which has only one column in its columns
+ clause is eligible to be used as a scalar expression.
+
+ The returned object is an instance of
+ :class:`ScalarSelect`.
+
+ """
+ return ScalarSelect(self)
+
+
+ def label(self, name):
+ """return a 'scalar' representation of this selectable, embedded as a
+ subquery with a label.
+
+ .. seealso::
+
+ :meth:`~.SelectBase.as_scalar`.
+
+ """
+ return self.as_scalar().label(name)
+
+ def cte(self, name=None, recursive=False):
+ """Return a new :class:`.CTE`, or Common Table Expression instance.
+
+ Common table expressions are a SQL standard whereby SELECT
+ statements can draw upon secondary statements specified along
+ with the primary statement, using a clause called "WITH".
+ Special semantics regarding UNION can also be employed to
+ allow "recursive" queries, where a SELECT statement can draw
+ upon the set of rows that have previously been selected.
+
+ SQLAlchemy detects :class:`.CTE` objects, which are treated
+ similarly to :class:`.Alias` objects, as special elements
+ to be delivered to the FROM clause of the statement as well
+ as to a WITH clause at the top of the statement.
+
+ .. versionadded:: 0.7.6
+
+ :param name: name given to the common table expression. Like
+ :meth:`._FromClause.alias`, the name can be left as ``None``
+ in which case an anonymous symbol will be used at query
+ compile time.
+ :param recursive: if ``True``, will render ``WITH RECURSIVE``.
+ A recursive common table expression is intended to be used in
+ conjunction with UNION ALL in order to derive rows
+ from those already selected.
+
+ The following examples illustrate two examples from
+ Postgresql's documentation at
+ http://www.postgresql.org/docs/8.4/static/queries-with.html.
+
+ Example 1, non recursive::
+
+ from sqlalchemy import Table, Column, String, Integer, MetaData, \\
+ select, func
+
+ metadata = MetaData()
+
+ orders = Table('orders', metadata,
+ Column('region', String),
+ Column('amount', Integer),
+ Column('product', String),
+ Column('quantity', Integer)
+ )
+
+ regional_sales = select([
+ orders.c.region,
+ func.sum(orders.c.amount).label('total_sales')
+ ]).group_by(orders.c.region).cte("regional_sales")
+
+
+ top_regions = select([regional_sales.c.region]).\\
+ where(
+ regional_sales.c.total_sales >
+ select([
+ func.sum(regional_sales.c.total_sales)/10
+ ])
+ ).cte("top_regions")
+
+ statement = select([
+ orders.c.region,
+ orders.c.product,
+ func.sum(orders.c.quantity).label("product_units"),
+ func.sum(orders.c.amount).label("product_sales")
+ ]).where(orders.c.region.in_(
+ select([top_regions.c.region])
+ )).group_by(orders.c.region, orders.c.product)
+
+ result = conn.execute(statement).fetchall()
+
+ Example 2, WITH RECURSIVE::
+
+ from sqlalchemy import Table, Column, String, Integer, MetaData, \\
+ select, func
+
+ metadata = MetaData()
+
+ parts = Table('parts', metadata,
+ Column('part', String),
+ Column('sub_part', String),
+ Column('quantity', Integer),
+ )
+
+ included_parts = select([
+ parts.c.sub_part,
+ parts.c.part,
+ parts.c.quantity]).\\
+ where(parts.c.part=='our part').\\
+ cte(recursive=True)
+
+
+ incl_alias = included_parts.alias()
+ parts_alias = parts.alias()
+ included_parts = included_parts.union_all(
+ select([
+ parts_alias.c.part,
+ parts_alias.c.sub_part,
+ parts_alias.c.quantity
+ ]).
+ where(parts_alias.c.part==incl_alias.c.sub_part)
+ )
+
+ statement = select([
+ included_parts.c.sub_part,
+ func.sum(included_parts.c.quantity).
+ label('total_quantity')
+ ]).\
+ select_from(included_parts.join(parts,
+ included_parts.c.part==parts.c.part)).\\
+ group_by(included_parts.c.sub_part)
+
+ result = conn.execute(statement).fetchall()
+
+
+ .. seealso::
+
+ :meth:`.orm.query.Query.cte` - ORM version of :meth:`.SelectBase.cte`.
+
+ """
+ return CTE(self, name=name, recursive=recursive)
+
+ @_generative
+ @util.deprecated('0.6',
+ message="``autocommit()`` is deprecated. Use "
+ ":meth:`.Executable.execution_options` with the "
+ "'autocommit' flag.")
+ def autocommit(self):
+ """return a new selectable with the 'autocommit' flag set to
+ True.
+ """
+
+ self._execution_options = \
+ self._execution_options.union({'autocommit': True})
+
+ def _generate(self):
+ """Override the default _generate() method to also clear out
+ exported collections."""
+
+ s = self.__class__.__new__(self.__class__)
+ s.__dict__ = self.__dict__.copy()
+ s._reset_exported()
+ return s
+
+ @property
+ def _from_objects(self):
+ return [self]
+
+class GenerativeSelect(SelectBase):
+ """Base class for SELECT statements where additional elements can be
+ added.
+
+ This serves as the base for :class:`.Select` and :class:`.CompoundSelect`
+ where elements such as ORDER BY, GROUP BY can be added and column rendering
+ can be controlled. Compare to :class:`.TextAsFrom`, which, while it
+ subclasses :class:`.SelectBase` and is also a SELECT construct, represents
+ a fixed textual string which cannot be altered at this level, only
+ wrapped as a subquery.
+
+ .. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to
+ provide functionality specific to :class:`.Select` and :class:`.CompoundSelect`
+ while allowing :class:`.SelectBase` to be used for other SELECT-like
+ objects, e.g. :class:`.TextAsFrom`.
+
+ """
+ _order_by_clause = ClauseList()
+ _group_by_clause = ClauseList()
+ _limit = None
+ _offset = None
+ _for_update_arg = None
+
+ def __init__(self,
+ use_labels=False,
+ for_update=False,
+ limit=None,
+ offset=None,
+ order_by=None,
+ group_by=None,
+ bind=None,
+ autocommit=None):
+ self.use_labels = use_labels
+
+ if for_update is not False:
+ self._for_update_arg = ForUpdateArg.parse_legacy_select(for_update)
+
+ if autocommit is not None:
+ util.warn_deprecated('autocommit on select() is '
+ 'deprecated. Use .execution_options(a'
+ 'utocommit=True)')
+ self._execution_options = \
+ self._execution_options.union(
+ {'autocommit': autocommit})
+ if limit is not None:
+ self._limit = util.asint(limit)
+ if offset is not None:
+ self._offset = util.asint(offset)
+ self._bind = bind
+
+ if order_by is not None:
+ self._order_by_clause = ClauseList(*util.to_list(order_by))
+ if group_by is not None:
+ self._group_by_clause = ClauseList(*util.to_list(group_by))
+
+ @property
+ def for_update(self):
+ """Provide legacy dialect support for the ``for_update`` attribute.
+ """
+ if self._for_update_arg is not None:
+ return self._for_update_arg.legacy_for_update_value
+ else:
+ return None
+
+ @for_update.setter
+ def for_update(self, value):
+ self._for_update_arg = ForUpdateArg.parse_legacy_select(value)
+
+ @_generative
+ def with_for_update(self, nowait=False, read=False, of=None):
+ """Specify a ``FOR UPDATE`` clause for this :class:`.GenerativeSelect`.
+
+ E.g.::
+
+ stmt = select([table]).with_for_update(nowait=True)
+
+ On a database like Postgresql or Oracle, the above would render a
+ statement like::
+
+ SELECT table.a, table.b FROM table FOR UPDATE NOWAIT
+
+ on other backends, the ``nowait`` option is ignored and instead
+ would produce::
+
+ SELECT table.a, table.b FROM table FOR UPDATE
+
+ When called with no arguments, the statement will render with
+ the suffix ``FOR UPDATE``. Additional arguments can then be
+ provided which allow for common database-specific
+ variants.
+
+ :param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle and
+ Postgresql dialects.
+
+ :param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
+ ``FOR SHARE`` on Postgresql. On Postgresql, when combined with
+ ``nowait``, will render ``FOR SHARE NOWAIT``.
+
+ :param of: SQL expression or list of SQL expression elements
+ (typically :class:`.Column` objects or a compatible expression) which
+ will render into a ``FOR UPDATE OF`` clause; supported by PostgreSQL
+ and Oracle. May render as a table or as a column depending on
+ backend.
+
+ .. versionadded:: 0.9.0
+
+ """
+ self._for_update_arg = ForUpdateArg(nowait=nowait, read=read, of=of)
+
+ @_generative
+ def apply_labels(self):
+ """return a new selectable with the 'use_labels' flag set to True.
+
+ This will result in column expressions being generated using labels
+ against their table name, such as "SELECT somecolumn AS
+ tablename_somecolumn". This allows selectables which contain multiple
+ FROM clauses to produce a unique set of column names regardless of
+ name conflicts among the individual FROM clauses.
+
+ """
+ self.use_labels = True
+
+ @_generative
+ def limit(self, limit):
+ """return a new selectable with the given LIMIT criterion
+ applied."""
+
+ self._limit = util.asint(limit)
+
+ @_generative
+ def offset(self, offset):
+ """return a new selectable with the given OFFSET criterion
+ applied."""
+
+ self._offset = util.asint(offset)
+
+ @_generative
+ def order_by(self, *clauses):
+ """return a new selectable with the given list of ORDER BY
+ criterion applied.
+
+ The criterion will be appended to any pre-existing ORDER BY
+ criterion.
+
+ """
+
+ self.append_order_by(*clauses)
+
+ @_generative
+ def group_by(self, *clauses):
+ """return a new selectable with the given list of GROUP BY
+ criterion applied.
+
+ The criterion will be appended to any pre-existing GROUP BY
+ criterion.
+
+ """
+
+ self.append_group_by(*clauses)
+
+ def append_order_by(self, *clauses):
+ """Append the given ORDER BY criterion applied to this selectable.
+
+ The criterion will be appended to any pre-existing ORDER BY criterion.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.GenerativeSelect.order_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ if len(clauses) == 1 and clauses[0] is None:
+ self._order_by_clause = ClauseList()
+ else:
+ if getattr(self, '_order_by_clause', None) is not None:
+ clauses = list(self._order_by_clause) + list(clauses)
+ self._order_by_clause = ClauseList(*clauses)
+
+ def append_group_by(self, *clauses):
+ """Append the given GROUP BY criterion applied to this selectable.
+
+ The criterion will be appended to any pre-existing GROUP BY criterion.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.GenerativeSelect.group_by` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ if len(clauses) == 1 and clauses[0] is None:
+ self._group_by_clause = ClauseList()
+ else:
+ if getattr(self, '_group_by_clause', None) is not None:
+ clauses = list(self._group_by_clause) + list(clauses)
+ self._group_by_clause = ClauseList(*clauses)
+
+
+class CompoundSelect(GenerativeSelect):
+ """Forms the basis of ``UNION``, ``UNION ALL``, and other
+ SELECT-based set operations.
+
+
+ .. seealso::
+
+ :func:`.union`
+
+ :func:`.union_all`
+
+ :func:`.intersect`
+
+ :func:`.intersect_all`
+
+ :func:`.except`
+
+ :func:`.except_all`
+
+ """
+
+ __visit_name__ = 'compound_select'
+
+ UNION = util.symbol('UNION')
+ UNION_ALL = util.symbol('UNION ALL')
+ EXCEPT = util.symbol('EXCEPT')
+ EXCEPT_ALL = util.symbol('EXCEPT ALL')
+ INTERSECT = util.symbol('INTERSECT')
+ INTERSECT_ALL = util.symbol('INTERSECT ALL')
+
+ def __init__(self, keyword, *selects, **kwargs):
+ self._auto_correlate = kwargs.pop('correlate', False)
+ self.keyword = keyword
+ self.selects = []
+
+ numcols = None
+
+ # some DBs do not like ORDER BY in the inner queries of a UNION, etc.
+ for n, s in enumerate(selects):
+ s = _clause_element_as_expr(s)
+
+ if not numcols:
+ numcols = len(s.c)
+ elif len(s.c) != numcols:
+ raise exc.ArgumentError('All selectables passed to '
+ 'CompoundSelect must have identical numbers of '
+ 'columns; select #%d has %d columns, select '
+ '#%d has %d' % (1, len(self.selects[0].c), n
+ + 1, len(s.c)))
+
+ self.selects.append(s.self_group(self))
+
+ GenerativeSelect.__init__(self, **kwargs)
+
+ @classmethod
+ def _create_union(cls, *selects, **kwargs):
+ """Return a ``UNION`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ A similar :func:`union()` method is available on all
+ :class:`.FromClause` subclasses.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
+
+ @classmethod
+ def _create_union_all(cls, *selects, **kwargs):
+ """Return a ``UNION ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ A similar :func:`union_all()` method is available on all
+ :class:`.FromClause` subclasses.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_except(cls, *selects, **kwargs):
+ """Return an ``EXCEPT`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_except_all(cls, *selects, **kwargs):
+ """Return an ``EXCEPT ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_intersect(cls, *selects, **kwargs):
+ """Return an ``INTERSECT`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
+
+
+ @classmethod
+ def _create_intersect_all(cls, *selects, **kwargs):
+ """Return an ``INTERSECT ALL`` of multiple selectables.
+
+ The returned object is an instance of
+ :class:`.CompoundSelect`.
+
+ \*selects
+ a list of :class:`.Select` instances.
+
+ \**kwargs
+ available keyword arguments are the same as those of
+ :func:`select`.
+
+ """
+ return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
+
+
+ def _scalar_type(self):
+ return self.selects[0]._scalar_type()
+
+ def self_group(self, against=None):
+ return FromGrouping(self)
+
+ def is_derived_from(self, fromclause):
+ for s in self.selects:
+ if s.is_derived_from(fromclause):
+ return True
+ return False
+
+ def _populate_column_collection(self):
+ for cols in zip(*[s.c for s in self.selects]):
+
+ # this is a slightly hacky thing - the union exports a
+ # column that resembles just that of the *first* selectable.
+ # to get at a "composite" column, particularly foreign keys,
+ # you have to dig through the proxies collection which we
+ # generate below. We may want to improve upon this, such as
+ # perhaps _make_proxy can accept a list of other columns
+ # that are "shared" - schema.column can then copy all the
+ # ForeignKeys in. this would allow the union() to have all
+ # those fks too.
+
+ proxy = cols[0]._make_proxy(self,
+ name=cols[0]._label if self.use_labels else None,
+ key=cols[0]._key_label if self.use_labels else None)
+
+ # hand-construct the "_proxies" collection to include all
+ # derived columns place a 'weight' annotation corresponding
+ # to how low in the list of select()s the column occurs, so
+ # that the corresponding_column() operation can resolve
+ # conflicts
+
+ proxy._proxies = [c._annotate({'weight': i + 1}) for (i,
+ c) in enumerate(cols)]
+
+ def _refresh_for_new_column(self, column):
+ for s in self.selects:
+ s._refresh_for_new_column(column)
+
+ if not self._cols_populated:
+ return None
+
+ raise NotImplementedError("CompoundSelect constructs don't support "
+ "addition of columns to underlying selectables")
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.selects = [clone(s, **kw) for s in self.selects]
+ if hasattr(self, '_col_map'):
+ del self._col_map
+ for attr in ('_order_by_clause', '_group_by_clause', '_for_update_arg'):
+ if getattr(self, attr) is not None:
+ setattr(self, attr, clone(getattr(self, attr), **kw))
+
+ def get_children(self, column_collections=True, **kwargs):
+ return (column_collections and list(self.c) or []) \
+ + [self._order_by_clause, self._group_by_clause] \
+ + list(self.selects)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+ for s in self.selects:
+ e = s.bind
+ if e:
+ return e
+ else:
+ return None
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+
+class HasPrefixes(object):
+ _prefixes = ()
+
+ @_generative
+ def prefix_with(self, *expr, **kw):
+ """Add one or more expressions following the statement keyword, i.e.
+ SELECT, INSERT, UPDATE, or DELETE. Generative.
+
+ This is used to support backend-specific prefix keywords such as those
+ provided by MySQL.
+
+ E.g.::
+
+ stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
+
+ Multiple prefixes can be specified by multiple calls
+ to :meth:`.prefix_with`.
+
+ :param \*expr: textual or :class:`.ClauseElement` construct which
+ will be rendered following the INSERT, UPDATE, or DELETE
+ keyword.
+ :param \**kw: A single keyword 'dialect' is accepted. This is an
+ optional string dialect name which will
+ limit rendering of this prefix to only that dialect.
+
+ """
+ dialect = kw.pop('dialect', None)
+ if kw:
+ raise exc.ArgumentError("Unsupported argument(s): %s" %
+ ",".join(kw))
+ self._setup_prefixes(expr, dialect)
+
+ def _setup_prefixes(self, prefixes, dialect=None):
+ self._prefixes = self._prefixes + tuple(
+ [(_literal_as_text(p), dialect) for p in prefixes])
+
+
+
+class Select(HasPrefixes, GenerativeSelect):
+ """Represents a ``SELECT`` statement.
+
+ """
+
+ __visit_name__ = 'select'
+
+ _prefixes = ()
+ _hints = util.immutabledict()
+ _distinct = False
+ _from_cloned = None
+ _correlate = ()
+ _correlate_except = None
+ _memoized_property = SelectBase._memoized_property
+
+ def __init__(self,
+ columns=None,
+ whereclause=None,
+ from_obj=None,
+ distinct=False,
+ having=None,
+ correlate=True,
+ prefixes=None,
+ **kwargs):
+ """Construct a new :class:`.Select`.
+
+ Similar functionality is also available via the :meth:`.FromClause.select`
+ method on any :class:`.FromClause`.
+
+ All arguments which accept :class:`.ClauseElement` arguments also accept
+ string arguments, which will be converted as appropriate into
+ either :func:`text()` or :func:`literal_column()` constructs.
+
+ .. seealso::
+
+ :ref:`coretutorial_selecting` - Core Tutorial description of
+ :func:`.select`.
+
+ :param columns:
+ A list of :class:`.ClauseElement` objects, typically
+ :class:`.ColumnElement` objects or subclasses, which will form the
+ columns clause of the resulting statement. For all members which are
+ instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
+ members of the :class:`.Selectable` will be added individually to the
+ columns clause. For example, specifying a
+ :class:`~sqlalchemy.schema.Table` instance will result in all the
+ contained :class:`~sqlalchemy.schema.Column` objects within to be added
+ to the columns clause.
+
+ This argument is not present on the form of :func:`select()`
+ available on :class:`~sqlalchemy.schema.Table`.
+
+ :param whereclause:
+ A :class:`.ClauseElement` expression which will be used to form the
+ ``WHERE`` clause.
+
+ :param from_obj:
+ A list of :class:`.ClauseElement` objects which will be added to the
+ ``FROM`` clause of the resulting statement. Note that "from" objects are
+ automatically located within the columns and whereclause ClauseElements.
+ Use this parameter to explicitly specify "from" objects which are not
+ automatically locatable. This could include
+ :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
+ or :class:`.Join` objects whose presence will supercede that of the
+ :class:`~sqlalchemy.schema.Table` objects already located in the other
+ clauses.
+
+ :param autocommit:
+ Deprecated. Use .execution_options(autocommit=<True|False>)
+ to set the autocommit option.
+
+ :param bind=None:
+ an :class:`~.Engine` or :class:`~.Connection` instance
+ to which the
+ resulting :class:`.Select` object will be bound. The :class:`.Select`
+ object will otherwise automatically bind to whatever
+ :class:`~.base.Connectable` instances can be located within its contained
+ :class:`.ClauseElement` members.
+
+ :param correlate=True:
+ indicates that this :class:`.Select` object should have its
+ contained :class:`.FromClause` elements "correlated" to an enclosing
+ :class:`.Select` object. This means that any :class:`.ClauseElement`
+ instance within the "froms" collection of this :class:`.Select`
+ which is also present in the "froms" collection of an
+ enclosing select will not be rendered in the ``FROM`` clause
+ of this select statement.
+
+ :param distinct=False:
+ when ``True``, applies a ``DISTINCT`` qualifier to the columns
+ clause of the resulting statement.
+
+ The boolean argument may also be a column expression or list
+ of column expressions - this is a special calling form which
+ is understood by the Postgresql dialect to render the
+ ``DISTINCT ON (<columns>)`` syntax.
+
+ ``distinct`` is also available via the :meth:`~.Select.distinct`
+ generative method.
+
+ :param for_update=False:
+ when ``True``, applies ``FOR UPDATE`` to the end of the
+ resulting statement.
+
+ .. deprecated:: 0.9.0 - use :meth:`.GenerativeSelect.with_for_update`
+ to specify the structure of the ``FOR UPDATE`` clause.
+
+ ``for_update`` accepts various string values interpreted by
+ specific backends, including:
+
+ * ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``;
+ on Postgresql, translates to ``FOR SHARE``.
+ * ``"nowait"`` - on Postgresql and Oracle, translates to
+ ``FOR UPDATE NOWAIT``.
+ * ``"read_nowait"`` - on Postgresql, translates to
+ ``FOR SHARE NOWAIT``.
+
+ .. seealso::
+
+ :meth:`.GenerativeSelect.with_for_update` - improved API for
+ specifying the ``FOR UPDATE`` clause.
+
+ :param group_by:
+ a list of :class:`.ClauseElement` objects which will comprise the
+ ``GROUP BY`` clause of the resulting select.
+
+ :param having:
+ a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
+ of the resulting select when ``GROUP BY`` is used.
+
+ :param limit=None:
+ a numerical value which usually compiles to a ``LIMIT``
+ expression in the resulting select. Databases that don't
+ support ``LIMIT`` will attempt to provide similar
+ functionality.
+
+ :param offset=None:
+ a numeric value which usually compiles to an ``OFFSET``
+ expression in the resulting select. Databases that don't
+ support ``OFFSET`` will attempt to provide similar
+ functionality.
+
+ :param order_by:
+ a scalar or list of :class:`.ClauseElement` objects which will
+ comprise the ``ORDER BY`` clause of the resulting select.
+
+ :param use_labels=False:
+ when ``True``, the statement will be generated using labels
+ for each column in the columns clause, which qualify each
+ column with its parent table's (or aliases) name so that name
+ conflicts between columns in different tables don't occur.
+ The format of the label is <tablename>_<column>. The "c"
+ collection of the resulting :class:`.Select` object will use these
+ names as well for targeting column members.
+
+ use_labels is also available via the :meth:`~.GenerativeSelect.apply_labels`
+ generative method.
+
+ """
+ self._auto_correlate = correlate
+ if distinct is not False:
+ if distinct is True:
+ self._distinct = True
+ else:
+ self._distinct = [
+ _literal_as_text(e)
+ for e in util.to_list(distinct)
+ ]
+
+ if from_obj is not None:
+ self._from_obj = util.OrderedSet(
+ _interpret_as_from(f)
+ for f in util.to_list(from_obj))
+ else:
+ self._from_obj = util.OrderedSet()
+
+ try:
+ cols_present = bool(columns)
+ except TypeError:
+ raise exc.ArgumentError("columns argument to select() must "
+ "be a Python list or other iterable")
+
+ if cols_present:
+ self._raw_columns = []
+ for c in columns:
+ c = _interpret_as_column_or_from(c)
+ if isinstance(c, ScalarSelect):
+ c = c.self_group(against=operators.comma_op)
+ self._raw_columns.append(c)
+ else:
+ self._raw_columns = []
+
+ if whereclause is not None:
+ self._whereclause = _literal_as_text(whereclause)
+ else:
+ self._whereclause = None
+
+ if having is not None:
+ self._having = _literal_as_text(having)
+ else:
+ self._having = None
+
+ if prefixes:
+ self._setup_prefixes(prefixes)
+
+ GenerativeSelect.__init__(self, **kwargs)
+
+ @property
+ def _froms(self):
+ # would love to cache this,
+ # but there's just enough edge cases, particularly now that
+ # declarative encourages construction of SQL expressions
+ # without tables present, to just regen this each time.
+ froms = []
+ seen = set()
+ translate = self._from_cloned
+
+ def add(items):
+ for item in items:
+ if item is self:
+ raise exc.InvalidRequestError(
+ "select() construct refers to itself as a FROM")
+ if translate and item in translate:
+ item = translate[item]
+ if not seen.intersection(item._cloned_set):
+ froms.append(item)
+ seen.update(item._cloned_set)
+
+ add(_from_objects(*self._raw_columns))
+ if self._whereclause is not None:
+ add(_from_objects(self._whereclause))
+ add(self._from_obj)
+
+ return froms
+
+ def _get_display_froms(self, explicit_correlate_froms=None,
+ implicit_correlate_froms=None):
+ """Return the full list of 'from' clauses to be displayed.
+
+ Takes into account a set of existing froms which may be
+ rendered in the FROM clause of enclosing selects; this Select
+ may want to leave those absent if it is automatically
+ correlating.
+
+ """
+ froms = self._froms
+
+ toremove = set(itertools.chain(*[
+ _expand_cloned(f._hide_froms)
+ for f in froms]))
+ if toremove:
+ # if we're maintaining clones of froms,
+ # add the copies out to the toremove list. only include
+ # clones that are lexical equivalents.
+ if self._from_cloned:
+ toremove.update(
+ self._from_cloned[f] for f in
+ toremove.intersection(self._from_cloned)
+ if self._from_cloned[f]._is_lexical_equivalent(f)
+ )
+ # filter out to FROM clauses not in the list,
+ # using a list to maintain ordering
+ froms = [f for f in froms if f not in toremove]
+
+ if self._correlate:
+ to_correlate = self._correlate
+ if to_correlate:
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(
+ _cloned_intersection(froms, explicit_correlate_froms or ()),
+ to_correlate
+ )
+ ]
+
+ if self._correlate_except is not None:
+
+ froms = [
+ f for f in froms if f not in
+ _cloned_difference(
+ _cloned_intersection(froms, explicit_correlate_froms or ()),
+ self._correlate_except
+ )
+ ]
+
+ if self._auto_correlate and \
+ implicit_correlate_froms and \
+ len(froms) > 1:
+
+ froms = [
+ f for f in froms if f not in
+ _cloned_intersection(froms, implicit_correlate_froms)
+ ]
+
+ if not len(froms):
+ raise exc.InvalidRequestError("Select statement '%s"
+ "' returned no FROM clauses due to "
+ "auto-correlation; specify "
+ "correlate(<tables>) to control "
+ "correlation manually." % self)
+
+ return froms
+
+ def _scalar_type(self):
+ elem = self._raw_columns[0]
+ cols = list(elem._select_iterable)
+ return cols[0].type
+
+ @property
+ def froms(self):
+ """Return the displayed list of FromClause elements."""
+
+ return self._get_display_froms()
+
+ @_generative
+ def with_hint(self, selectable, text, dialect_name='*'):
+ """Add an indexing hint for the given selectable to this
+ :class:`.Select`.
+
+ The text of the hint is rendered in the appropriate
+ location for the database backend in use, relative
+ to the given :class:`.Table` or :class:`.Alias` passed as the
+ ``selectable`` argument. The dialect implementation
+ typically uses Python string substitution syntax
+ with the token ``%(name)s`` to render the name of
+ the table or alias. E.g. when using Oracle, the
+ following::
+
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)")
+
+ Would render SQL as::
+
+ select /*+ index(mytable ix_mytable) */ ... from mytable
+
+ The ``dialect_name`` option will limit the rendering of a particular
+ hint to a particular backend. Such as, to add hints for both Oracle
+ and Sybase simultaneously::
+
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
+ with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
+
+ """
+ self._hints = self._hints.union(
+ {(selectable, dialect_name): text})
+
+ @property
+ def type(self):
+ raise exc.InvalidRequestError("Select objects don't have a type. "
+ "Call as_scalar() on this Select object "
+ "to return a 'scalar' version of this Select.")
+
+ @_memoized_property.method
+ def locate_all_froms(self):
+ """return a Set of all FromClause elements referenced by this Select.
+
+ This set is a superset of that returned by the ``froms`` property,
+ which is specifically for those FromClause elements that would
+ actually be rendered.
+
+ """
+ froms = self._froms
+ return froms + list(_from_objects(*froms))
+
+ @property
+ def inner_columns(self):
+ """an iterator of all ColumnElement expressions which would
+ be rendered into the columns clause of the resulting SELECT statement.
+
+ """
+ return _select_iterables(self._raw_columns)
+
+ def is_derived_from(self, fromclause):
+ if self in fromclause._cloned_set:
+ return True
+
+ for f in self.locate_all_froms():
+ if f.is_derived_from(fromclause):
+ return True
+ return False
+
+ def _copy_internals(self, clone=_clone, **kw):
+
+ # Select() object has been cloned and probably adapted by the
+ # given clone function. Apply the cloning function to internal
+ # objects
+
+ # 1. keep a dictionary of the froms we've cloned, and what
+ # they've become. This is consulted later when we derive
+ # additional froms from "whereclause" and the columns clause,
+ # which may still reference the uncloned parent table.
+ # as of 0.7.4 we also put the current version of _froms, which
+ # gets cleared on each generation. previously we were "baking"
+ # _froms into self._from_obj.
+ self._from_cloned = from_cloned = dict((f, clone(f, **kw))
+ for f in self._from_obj.union(self._froms))
+
+ # 3. update persistent _from_obj with the cloned versions.
+ self._from_obj = util.OrderedSet(from_cloned[f] for f in
+ self._from_obj)
+
+ # the _correlate collection is done separately, what can happen
+ # here is the same item is _correlate as in _from_obj but the
+ # _correlate version has an annotation on it - (specifically
+ # RelationshipProperty.Comparator._criterion_exists() does
+ # this). Also keep _correlate liberally open with it's previous
+ # contents, as this set is used for matching, not rendering.
+ self._correlate = set(clone(f) for f in
+ self._correlate).union(self._correlate)
+
+ # 4. clone other things. The difficulty here is that Column
+ # objects are not actually cloned, and refer to their original
+ # .table, resulting in the wrong "from" parent after a clone
+ # operation. Hence _from_cloned and _from_obj supercede what is
+ # present here.
+ self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
+ for attr in '_whereclause', '_having', '_order_by_clause', \
+ '_group_by_clause', '_for_update_arg':
+ if getattr(self, attr) is not None:
+ setattr(self, attr, clone(getattr(self, attr), **kw))
+
+ # erase exported column list, _froms collection,
+ # etc.
+ self._reset_exported()
+
+ def get_children(self, column_collections=True, **kwargs):
+ """return child elements as per the ClauseElement specification."""
+
+ return (column_collections and list(self.columns) or []) + \
+ self._raw_columns + list(self._froms) + \
+ [x for x in
+ (self._whereclause, self._having,
+ self._order_by_clause, self._group_by_clause)
+ if x is not None]
+
+ @_generative
+ def column(self, column):
+ """return a new select() construct with the given column expression
+ added to its columns clause.
+
+ """
+ self.append_column(column)
+
+ @util.dependencies("sqlalchemy.sql.util")
+ def reduce_columns(self, sqlutil, only_synonyms=True):
+ """Return a new :func`.select` construct with redundantly
+ named, equivalently-valued columns removed from the columns clause.
+
+ "Redundant" here means two columns where one refers to the
+ other either based on foreign key, or via a simple equality
+ comparison in the WHERE clause of the statement. The primary purpose
+ of this method is to automatically construct a select statement
+ with all uniquely-named columns, without the need to use
+ table-qualified labels as :meth:`.apply_labels` does.
+
+ When columns are omitted based on foreign key, the referred-to
+ column is the one that's kept. When columns are omitted based on
+ WHERE eqivalence, the first column in the columns clause is the
+ one that's kept.
+
+ :param only_synonyms: when True, limit the removal of columns
+ to those which have the same name as the equivalent. Otherwise,
+ all columns that are equivalent to another are removed.
+
+ .. versionadded:: 0.8
+
+ """
+ return self.with_only_columns(
+ sqlutil.reduce_columns(
+ self.inner_columns,
+ only_synonyms=only_synonyms,
+ *(self._whereclause, ) + tuple(self._from_obj)
+ )
+ )
+
+ @_generative
+ def with_only_columns(self, columns):
+ """Return a new :func:`.select` construct with its columns
+ clause replaced with the given columns.
+
+ .. versionchanged:: 0.7.3
+ Due to a bug fix, this method has a slight
+ behavioral change as of version 0.7.3.
+ Prior to version 0.7.3, the FROM clause of
+ a :func:`.select` was calculated upfront and as new columns
+ were added; in 0.7.3 and later it's calculated
+ at compile time, fixing an issue regarding late binding
+ of columns to parent tables. This changes the behavior of
+ :meth:`.Select.with_only_columns` in that FROM clauses no
+ longer represented in the new list are dropped,
+ but this behavior is more consistent in
+ that the FROM clauses are consistently derived from the
+ current columns clause. The original intent of this method
+ is to allow trimming of the existing columns list to be fewer
+ columns than originally present; the use case of replacing
+ the columns list with an entirely different one hadn't
+ been anticipated until 0.7.3 was released; the usage
+ guidelines below illustrate how this should be done.
+
+ This method is exactly equivalent to as if the original
+ :func:`.select` had been called with the given columns
+ clause. I.e. a statement::
+
+ s = select([table1.c.a, table1.c.b])
+ s = s.with_only_columns([table1.c.b])
+
+ should be exactly equivalent to::
+
+ s = select([table1.c.b])
+
+ This means that FROM clauses which are only derived
+ from the column list will be discarded if the new column
+ list no longer contains that FROM::
+
+ >>> table1 = table('t1', column('a'), column('b'))
+ >>> table2 = table('t2', column('a'), column('b'))
+ >>> s1 = select([table1.c.a, table2.c.b])
+ >>> print s1
+ SELECT t1.a, t2.b FROM t1, t2
+ >>> s2 = s1.with_only_columns([table2.c.b])
+ >>> print s2
+ SELECT t2.b FROM t1
+
+ The preferred way to maintain a specific FROM clause
+ in the construct, assuming it won't be represented anywhere
+ else (i.e. not in the WHERE clause, etc.) is to set it using
+ :meth:`.Select.select_from`::
+
+ >>> s1 = select([table1.c.a, table2.c.b]).\\
+ ... select_from(table1.join(table2,
+ ... table1.c.a==table2.c.a))
+ >>> s2 = s1.with_only_columns([table2.c.b])
+ >>> print s2
+ SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
+
+ Care should also be taken to use the correct
+ set of column objects passed to :meth:`.Select.with_only_columns`.
+ Since the method is essentially equivalent to calling the
+ :func:`.select` construct in the first place with the given
+ columns, the columns passed to :meth:`.Select.with_only_columns`
+ should usually be a subset of those which were passed
+ to the :func:`.select` construct, not those which are available
+ from the ``.c`` collection of that :func:`.select`. That
+ is::
+
+ s = select([table1.c.a, table1.c.b]).select_from(table1)
+ s = s.with_only_columns([table1.c.b])
+
+ and **not**::
+
+ # usually incorrect
+ s = s.with_only_columns([s.c.b])
+
+ The latter would produce the SQL::
+
+ SELECT b
+ FROM (SELECT t1.a AS a, t1.b AS b
+ FROM t1), t1
+
+ Since the :func:`.select` construct is essentially being
+ asked to select both from ``table1`` as well as itself.
+
+ """
+ self._reset_exported()
+ rc = []
+ for c in columns:
+ c = _interpret_as_column_or_from(c)
+ if isinstance(c, ScalarSelect):
+ c = c.self_group(against=operators.comma_op)
+ rc.append(c)
+ self._raw_columns = rc
+
+ @_generative
+ def where(self, whereclause):
+ """return a new select() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+
+ self.append_whereclause(whereclause)
+
+ @_generative
+ def having(self, having):
+ """return a new select() construct with the given expression added to
+ its HAVING clause, joined to the existing clause via AND, if any.
+
+ """
+ self.append_having(having)
+
+ @_generative
+ def distinct(self, *expr):
+ """Return a new select() construct which will apply DISTINCT to its
+ columns clause.
+
+ :param \*expr: optional column expressions. When present,
+ the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
+ construct.
+
+ """
+ if expr:
+ expr = [_literal_as_text(e) for e in expr]
+ if isinstance(self._distinct, list):
+ self._distinct = self._distinct + expr
+ else:
+ self._distinct = expr
+ else:
+ self._distinct = True
+
+ @_generative
+ def select_from(self, fromclause):
+ """return a new :func:`.select` construct with the
+ given FROM expression
+ merged into its list of FROM objects.
+
+ E.g.::
+
+ table1 = table('t1', column('a'))
+ table2 = table('t2', column('b'))
+ s = select([table1.c.a]).\\
+ select_from(
+ table1.join(table2, table1.c.a==table2.c.b)
+ )
+
+ The "from" list is a unique set on the identity of each element,
+ so adding an already present :class:`.Table` or other selectable
+ will have no effect. Passing a :class:`.Join` that refers
+ to an already present :class:`.Table` or other selectable will have
+ the effect of concealing the presence of that selectable as
+ an individual element in the rendered FROM list, instead
+ rendering it into a JOIN clause.
+
+ While the typical purpose of :meth:`.Select.select_from` is to
+ replace the default, derived FROM clause with a join, it can
+ also be called with individual table elements, multiple times
+ if desired, in the case that the FROM clause cannot be fully
+ derived from the columns clause::
+
+ select([func.count('*')]).select_from(table1)
+
+ """
+ self.append_from(fromclause)
+
+ @_generative
+ def correlate(self, *fromclauses):
+ """return a new :class:`.Select` which will correlate the given FROM
+ clauses to that of an enclosing :class:`.Select`.
+
+ Calling this method turns off the :class:`.Select` object's
+ default behavior of "auto-correlation". Normally, FROM elements
+ which appear in a :class:`.Select` that encloses this one via
+ its :term:`WHERE clause`, ORDER BY, HAVING or
+ :term:`columns clause` will be omitted from this :class:`.Select`
+ object's :term:`FROM clause`.
+ Setting an explicit correlation collection using the
+ :meth:`.Select.correlate` method provides a fixed list of FROM objects
+ that can potentially take place in this process.
+
+ When :meth:`.Select.correlate` is used to apply specific FROM clauses
+ for correlation, the FROM elements become candidates for
+ correlation regardless of how deeply nested this :class:`.Select`
+ object is, relative to an enclosing :class:`.Select` which refers to
+ the same FROM object. This is in contrast to the behavior of
+ "auto-correlation" which only correlates to an immediate enclosing
+ :class:`.Select`. Multi-level correlation ensures that the link
+ between enclosed and enclosing :class:`.Select` is always via
+ at least one WHERE/ORDER BY/HAVING/columns clause in order for
+ correlation to take place.
+
+ If ``None`` is passed, the :class:`.Select` object will correlate
+ none of its FROM entries, and all will render unconditionally
+ in the local FROM clause.
+
+ :param \*fromclauses: a list of one or more :class:`.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate collection.
+
+ .. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
+ :meth:`.Select.correlate`.
+
+ .. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
+ longer unconditionally removes entries from the FROM clause; instead,
+ the candidate FROM entries must also be matched by a FROM entry
+ located in an enclosing :class:`.Select`, which ultimately encloses
+ this one as present in the WHERE clause, ORDER BY clause, HAVING
+ clause, or columns clause of an enclosing :meth:`.Select`.
+
+ .. versionchanged:: 0.8.2 explicit correlation takes place
+ via any level of nesting of :class:`.Select` objects; in previous
+ 0.8 versions, correlation would only occur relative to the immediate
+ enclosing :class:`.Select` construct.
+
+ .. seealso::
+
+ :meth:`.Select.correlate_except`
+
+ :ref:`correlated_subqueries`
+
+ """
+ self._auto_correlate = False
+ if fromclauses and fromclauses[0] is None:
+ self._correlate = ()
+ else:
+ self._correlate = set(self._correlate).union(
+ _interpret_as_from(f) for f in fromclauses)
+
+ @_generative
+ def correlate_except(self, *fromclauses):
+ """return a new :class:`.Select` which will omit the given FROM
+ clauses from the auto-correlation process.
+
+ Calling :meth:`.Select.correlate_except` turns off the
+ :class:`.Select` object's default behavior of
+ "auto-correlation" for the given FROM elements. An element
+ specified here will unconditionally appear in the FROM list, while
+ all other FROM elements remain subject to normal auto-correlation
+ behaviors.
+
+ .. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
+ method was improved to fully prevent FROM clauses specified here
+ from being omitted from the immediate FROM clause of this
+ :class:`.Select`.
+
+ If ``None`` is passed, the :class:`.Select` object will correlate
+ all of its FROM entries.
+
+ .. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
+ correctly auto-correlate all FROM clauses.
+
+ :param \*fromclauses: a list of one or more :class:`.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate-exception collection.
+
+ .. seealso::
+
+ :meth:`.Select.correlate`
+
+ :ref:`correlated_subqueries`
+
+ """
+
+ self._auto_correlate = False
+ if fromclauses and fromclauses[0] is None:
+ self._correlate_except = ()
+ else:
+ self._correlate_except = set(self._correlate_except or ()).union(
+ _interpret_as_from(f) for f in fromclauses)
+
+ def append_correlation(self, fromclause):
+ """append the given correlation expression to this select()
+ construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.correlate` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+
+ self._auto_correlate = False
+ self._correlate = set(self._correlate).union(
+ _interpret_as_from(f) for f in fromclause)
+
+ def append_column(self, column):
+ """append the given column expression to the columns clause of this
+ select() construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.column` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ column = _interpret_as_column_or_from(column)
+
+ if isinstance(column, ScalarSelect):
+ column = column.self_group(against=operators.comma_op)
+
+ self._raw_columns = self._raw_columns + [column]
+
+ def append_prefix(self, clause):
+ """append the given columns clause prefix expression to this select()
+ construct.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.prefix_with` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ clause = _literal_as_text(clause)
+ self._prefixes = self._prefixes + (clause,)
+
+ def append_whereclause(self, whereclause):
+ """append the given expression to this select() construct's WHERE
+ criterion.
+
+ The expression will be joined to existing WHERE criterion via AND.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.where` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+
+ self._reset_exported()
+ self._whereclause = and_(True_._ifnone(self._whereclause), whereclause)
+
+ def append_having(self, having):
+ """append the given expression to this select() construct's HAVING
+ criterion.
+
+ The expression will be joined to existing HAVING criterion via AND.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.having` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ self._having = and_(True_._ifnone(self._having), having)
+
+ def append_from(self, fromclause):
+ """append the given FromClause expression to this select() construct's
+ FROM clause.
+
+ This is an **in-place** mutation method; the
+ :meth:`~.Select.select_from` method is preferred, as it provides standard
+ :term:`method chaining`.
+
+ """
+ self._reset_exported()
+ fromclause = _interpret_as_from(fromclause)
+ self._from_obj = self._from_obj.union([fromclause])
+
+
+ @_memoized_property
+ def _columns_plus_names(self):
+ if self.use_labels:
+ names = set()
+ def name_for_col(c):
+ if c._label is None:
+ return (None, c)
+ name = c._label
+ if name in names:
+ name = c.anon_label
+ else:
+ names.add(name)
+ return name, c
+
+ return [
+ name_for_col(c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+ else:
+ return [
+ (None, c)
+ for c in util.unique_list(_select_iterables(self._raw_columns))
+ ]
+
+ def _populate_column_collection(self):
+ for name, c in self._columns_plus_names:
+ if not hasattr(c, '_make_proxy'):
+ continue
+ if name is None:
+ key = None
+ elif self.use_labels:
+ key = c._key_label
+ if key is not None and key in self.c:
+ key = c.anon_label
+ else:
+ key = None
+
+ c._make_proxy(self, key=key,
+ name=name,
+ name_is_truncatable=True)
+
+ def _refresh_for_new_column(self, column):
+ for fromclause in self._froms:
+ col = fromclause._refresh_for_new_column(column)
+ if col is not None:
+ if col in self.inner_columns and self._cols_populated:
+ our_label = col._key_label if self.use_labels else col.key
+ if our_label not in self.c:
+ return col._make_proxy(self,
+ name=col._label if self.use_labels else None,
+ key=col._key_label if self.use_labels else None,
+ name_is_truncatable=True)
+ return None
+ return None
+
+ def self_group(self, against=None):
+ """return a 'grouping' construct as per the ClauseElement
+ specification.
+
+ This produces an element that can be embedded in an expression. Note
+ that this method is called automatically as needed when constructing
+ expressions and should not require explicit use.
+
+ """
+ if isinstance(against, CompoundSelect):
+ return self
+ return FromGrouping(self)
+
+ def union(self, other, **kwargs):
+ """return a SQL UNION of this select() construct against the given
+ selectable."""
+
+ return CompoundSelect._create_union(self, other, **kwargs)
+
+ def union_all(self, other, **kwargs):
+ """return a SQL UNION ALL of this select() construct against the given
+ selectable.
+
+ """
+ return CompoundSelect._create_union_all(self, other, **kwargs)
+
+ def except_(self, other, **kwargs):
+ """return a SQL EXCEPT of this select() construct against the given
+ selectable."""
+
+ return CompoundSelect._create_except(self, other, **kwargs)
+
+ def except_all(self, other, **kwargs):
+ """return a SQL EXCEPT ALL of this select() construct against the
+ given selectable.
+
+ """
+ return CompoundSelect._create_except_all(self, other, **kwargs)
+
+ def intersect(self, other, **kwargs):
+ """return a SQL INTERSECT of this select() construct against the given
+ selectable.
+
+ """
+ return CompoundSelect._create_intersect(self, other, **kwargs)
+
+ def intersect_all(self, other, **kwargs):
+ """return a SQL INTERSECT ALL of this select() construct against the
+ given selectable.
+
+ """
+ return CompoundSelect._create_intersect_all(self, other, **kwargs)
+
+ def bind(self):
+ if self._bind:
+ return self._bind
+ froms = self._froms
+ if not froms:
+ for c in self._raw_columns:
+ e = c.bind
+ if e:
+ self._bind = e
+ return e
+ else:
+ e = list(froms)[0].bind
+ if e:
+ self._bind = e
+ return e
+
+ return None
+
+ def _set_bind(self, bind):
+ self._bind = bind
+ bind = property(bind, _set_bind)
+
+
+class ScalarSelect(Generative, Grouping):
+ _from_objects = []
+
+ def __init__(self, element):
+ self.element = element
+ self.type = element._scalar_type()
+
+ @property
+ def columns(self):
+ raise exc.InvalidRequestError('Scalar Select expression has no '
+ 'columns; use this object directly within a '
+ 'column-level expression.')
+ c = columns
+
+ @_generative
+ def where(self, crit):
+ """Apply a WHERE clause to the SELECT statement referred to
+ by this :class:`.ScalarSelect`.
+
+ """
+ self.element = self.element.where(crit)
+
+ def self_group(self, **kwargs):
+ return self
+
+
+class Exists(UnaryExpression):
+ """Represent an ``EXISTS`` clause.
+
+ """
+ __visit_name__ = UnaryExpression.__visit_name__
+ _from_objects = []
+
+
+ def __init__(self, *args, **kwargs):
+ """Construct a new :class:`.Exists` against an existing
+ :class:`.Select` object.
+
+ Calling styles are of the following forms::
+
+ # use on an existing select()
+ s = select([table.c.col1]).where(table.c.col2==5)
+ s = exists(s)
+
+ # construct a select() at once
+ exists(['*'], **select_arguments).where(criterion)
+
+ # columns argument is optional, generates "EXISTS (SELECT *)"
+ # by default.
+ exists().where(table.c.col2==5)
+
+ """
+ if args and isinstance(args[0], (SelectBase, ScalarSelect)):
+ s = args[0]
+ else:
+ if not args:
+ args = ([literal_column('*')],)
+ s = Select(*args, **kwargs).as_scalar().self_group()
+
+ UnaryExpression.__init__(self, s, operator=operators.exists,
+ type_=type_api.BOOLEANTYPE)
+
+ def select(self, whereclause=None, **params):
+ return Select([self], whereclause, **params)
+
+ def correlate(self, *fromclause):
+ e = self._clone()
+ e.element = self.element.correlate(*fromclause).self_group()
+ return e
+
+ def correlate_except(self, *fromclause):
+ e = self._clone()
+ e.element = self.element.correlate_except(*fromclause).self_group()
+ return e
+
+ def select_from(self, clause):
+ """return a new :class:`.Exists` construct, applying the given
+ expression to the :meth:`.Select.select_from` method of the select
+ statement contained.
+
+ """
+ e = self._clone()
+ e.element = self.element.select_from(clause).self_group()
+ return e
+
+ def where(self, clause):
+ """return a new exists() construct with the given expression added to
+ its WHERE clause, joined to the existing clause via AND, if any.
+
+ """
+ e = self._clone()
+ e.element = self.element.where(clause).self_group()
+ return e
+
+
+class TextAsFrom(SelectBase):
+ """Wrap a :class:`.TextClause` construct within a :class:`.SelectBase`
+ interface.
+
+ This allows the :class:`.TextClause` object to gain a ``.c`` collection and
+ other FROM-like capabilities such as :meth:`.FromClause.alias`,
+ :meth:`.SelectBase.cte`, etc.
+
+ The :class:`.TextAsFrom` construct is produced via the
+ :meth:`.TextClause.columns` method - see that method for details.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :func:`.text`
+
+ :meth:`.TextClause.columns`
+
+ """
+ __visit_name__ = "text_as_from"
+
+ def __init__(self, text, columns):
+ self.element = text
+ self.column_args = columns
+
+ @property
+ def _bind(self):
+ return self.element._bind
+
+ @_generative
+ def bindparams(self, *binds, **bind_as_values):
+ self.element = self.element.bindparams(*binds, **bind_as_values)
+
+ def _populate_column_collection(self):
+ for c in self.column_args:
+ c._make_proxy(self)
+
+ def _copy_internals(self, clone=_clone, **kw):
+ self._reset_exported()
+ self.element = clone(self.element, **kw)
+
+ def _scalar_type(self):
+ return self.column_args[0].type
+
+class AnnotatedFromClause(Annotated):
+ def __init__(self, element, values):
+ # force FromClause to generate their internal
+ # collections into __dict__
+ element.c
+ Annotated.__init__(self, element, values)
+
+
diff --git a/lib/sqlalchemy/sql/sqltypes.py b/lib/sqlalchemy/sql/sqltypes.py
new file mode 100644
index 000000000..d779caaea
--- /dev/null
+++ b/lib/sqlalchemy/sql/sqltypes.py
@@ -0,0 +1,1628 @@
+# sql/sqltypes.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""SQL specific types.
+
+"""
+
+import datetime as dt
+import codecs
+
+from .type_api import TypeEngine, TypeDecorator, to_instance
+from .elements import quoted_name, type_coerce
+from .default_comparator import _DefaultColumnComparator
+from .. import exc, util, processors
+from .base import _bind_or_error, SchemaEventTarget
+from . import operators
+from .. import event
+from ..util import pickle
+import decimal
+
+if util.jython:
+ import array
+
+class _DateAffinity(object):
+ """Mixin date/time specific expression adaptations.
+
+ Rules are implemented within Date,Time,Interval,DateTime, Numeric,
+ Integer. Based on http://www.postgresql.org/docs/current/static
+ /functions-datetime.html.
+
+ """
+
+ @property
+ def _expression_adaptations(self):
+ raise NotImplementedError()
+
+ class Comparator(TypeEngine.Comparator):
+ _blank_dict = util.immutabledict()
+
+ def _adapt_expression(self, op, other_comparator):
+ othertype = other_comparator.type._type_affinity
+ return op, \
+ to_instance(self.type._expression_adaptations.get(op, self._blank_dict).\
+ get(othertype, NULLTYPE))
+ comparator_factory = Comparator
+
+class Concatenable(object):
+ """A mixin that marks a type as supporting 'concatenation',
+ typically strings."""
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if op is operators.add and isinstance(other_comparator,
+ (Concatenable.Comparator, NullType.Comparator)):
+ return operators.concat_op, self.expr.type
+ else:
+ return op, self.expr.type
+
+ comparator_factory = Comparator
+
+
+class String(Concatenable, TypeEngine):
+ """The base for all string and character types.
+
+ In SQL, corresponds to VARCHAR. Can also take Python unicode objects
+ and encode to the database's encoding in bind params (and the reverse for
+ result sets.)
+
+ The `length` field is usually required when the `String` type is
+ used within a CREATE TABLE statement, as VARCHAR requires a length
+ on most databases.
+
+ """
+
+ __visit_name__ = 'string'
+
+ def __init__(self, length=None, collation=None,
+ convert_unicode=False,
+ unicode_error=None,
+ _warn_on_bytestring=False
+ ):
+ """
+ Create a string-holding type.
+
+ :param length: optional, a length for the column for use in
+ DDL and CAST expressions. May be safely omitted if no ``CREATE
+ TABLE`` will be issued. Certain databases may require a
+ ``length`` for use in DDL, and will raise an exception when
+ the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
+ with no length is included. Whether the value is
+ interpreted as bytes or characters is database specific.
+
+ :param collation: Optional, a column-level collation for
+ use in DDL and CAST expressions. Renders using the
+ COLLATE keyword supported by SQLite, MySQL, and Postgresql.
+ E.g.::
+
+ >>> from sqlalchemy import cast, select, String
+ >>> print select([cast('some string', String(collation='utf8'))])
+ SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
+
+ .. versionadded:: 0.8 Added support for COLLATE to all
+ string types.
+
+ :param convert_unicode: When set to ``True``, the
+ :class:`.String` type will assume that
+ input is to be passed as Python ``unicode`` objects,
+ and results returned as Python ``unicode`` objects.
+ If the DBAPI in use does not support Python unicode
+ (which is fewer and fewer these days), SQLAlchemy
+ will encode/decode the value, using the
+ value of the ``encoding`` parameter passed to
+ :func:`.create_engine` as the encoding.
+
+ When using a DBAPI that natively supports Python
+ unicode objects, this flag generally does not
+ need to be set. For columns that are explicitly
+ intended to store non-ASCII data, the :class:`.Unicode`
+ or :class:`.UnicodeText`
+ types should be used regardless, which feature
+ the same behavior of ``convert_unicode`` but
+ also indicate an underlying column type that
+ directly supports unicode, such as ``NVARCHAR``.
+
+ For the extremely rare case that Python ``unicode``
+ is to be encoded/decoded by SQLAlchemy on a backend
+ that does natively support Python ``unicode``,
+ the value ``force`` can be passed here which will
+ cause SQLAlchemy's encode/decode services to be
+ used unconditionally.
+
+ :param unicode_error: Optional, a method to use to handle Unicode
+ conversion errors. Behaves like the ``errors`` keyword argument to
+ the standard library's ``string.decode()`` functions. This flag
+ requires that ``convert_unicode`` is set to ``force`` - otherwise,
+ SQLAlchemy is not guaranteed to handle the task of unicode
+ conversion. Note that this flag adds significant performance
+ overhead to row-fetching operations for backends that already
+ return unicode objects natively (which most DBAPIs do). This
+ flag should only be used as a last resort for reading
+ strings from a column with varied or corrupted encodings.
+
+ """
+ if unicode_error is not None and convert_unicode != 'force':
+ raise exc.ArgumentError("convert_unicode must be 'force' "
+ "when unicode_error is set.")
+
+ self.length = length
+ self.collation = collation
+ self.convert_unicode = convert_unicode
+ self.unicode_error = unicode_error
+ self._warn_on_bytestring = _warn_on_bytestring
+
+ def literal_processor(self, dialect):
+ def process(value):
+ value = value.replace("'", "''")
+ return "'%s'" % value
+ return process
+
+ def bind_processor(self, dialect):
+ if self.convert_unicode or dialect.convert_unicode:
+ if dialect.supports_unicode_binds and \
+ self.convert_unicode != 'force':
+ if self._warn_on_bytestring:
+ def process(value):
+ if isinstance(value, util.binary_type):
+ util.warn("Unicode type received non-unicode bind "
+ "param value.")
+ return value
+ return process
+ else:
+ return None
+ else:
+ encoder = codecs.getencoder(dialect.encoding)
+ warn_on_bytestring = self._warn_on_bytestring
+
+ def process(value):
+ if isinstance(value, util.text_type):
+ return encoder(value, self.unicode_error)[0]
+ elif warn_on_bytestring and value is not None:
+ util.warn("Unicode type received non-unicode bind "
+ "param value")
+ return value
+ return process
+ else:
+ return None
+
+ def result_processor(self, dialect, coltype):
+ wants_unicode = self.convert_unicode or dialect.convert_unicode
+ needs_convert = wants_unicode and \
+ (dialect.returns_unicode_strings is not True or
+ self.convert_unicode in ('force', 'force_nocheck'))
+ needs_isinstance = (
+ needs_convert and
+ dialect.returns_unicode_strings and
+ self.convert_unicode != 'force_nocheck'
+ )
+
+ if needs_convert:
+ to_unicode = processors.to_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+
+ if needs_isinstance:
+ return processors.to_conditional_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+ else:
+ return processors.to_unicode_processor_factory(
+ dialect.encoding, self.unicode_error)
+ else:
+ return None
+
+ @property
+ def python_type(self):
+ if self.convert_unicode:
+ return util.text_type
+ else:
+ return str
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.STRING
+
+
+class Text(String):
+ """A variably sized string type.
+
+ In SQL, usually corresponds to CLOB or TEXT. Can also take Python
+ unicode objects and encode to the database's encoding in bind
+ params (and the reverse for result sets.) In general, TEXT objects
+ do not have a length; while some databases will accept a length
+ argument here, it will be rejected by others.
+
+ """
+ __visit_name__ = 'text'
+
+
+class Unicode(String):
+ """A variable length Unicode string type.
+
+ The :class:`.Unicode` type is a :class:`.String` subclass
+ that assumes input and output as Python ``unicode`` data,
+ and in that regard is equivalent to the usage of the
+ ``convert_unicode`` flag with the :class:`.String` type.
+ However, unlike plain :class:`.String`, it also implies an
+ underlying column type that is explicitly supporting of non-ASCII
+ data, such as ``NVARCHAR`` on Oracle and SQL Server.
+ This can impact the output of ``CREATE TABLE`` statements
+ and ``CAST`` functions at the dialect level, and can
+ also affect the handling of bound parameters in some
+ specific DBAPI scenarios.
+
+ The encoding used by the :class:`.Unicode` type is usually
+ determined by the DBAPI itself; most modern DBAPIs
+ feature support for Python ``unicode`` objects as bound
+ values and result set values, and the encoding should
+ be configured as detailed in the notes for the target
+ DBAPI in the :ref:`dialect_toplevel` section.
+
+ For those DBAPIs which do not support, or are not configured
+ to accommodate Python ``unicode`` objects
+ directly, SQLAlchemy does the encoding and decoding
+ outside of the DBAPI. The encoding in this scenario
+ is determined by the ``encoding`` flag passed to
+ :func:`.create_engine`.
+
+ When using the :class:`.Unicode` type, it is only appropriate
+ to pass Python ``unicode`` objects, and not plain ``str``.
+ If a plain ``str`` is passed under Python 2, a warning
+ is emitted. If you notice your application emitting these warnings but
+ you're not sure of the source of them, the Python
+ ``warnings`` filter, documented at
+ http://docs.python.org/library/warnings.html,
+ can be used to turn these warnings into exceptions
+ which will illustrate a stack trace::
+
+ import warnings
+ warnings.simplefilter('error')
+
+ For an application that wishes to pass plain bytestrings
+ and Python ``unicode`` objects to the ``Unicode`` type
+ equally, the bytestrings must first be decoded into
+ unicode. The recipe at :ref:`coerce_to_unicode` illustrates
+ how this is done.
+
+ See also:
+
+ :class:`.UnicodeText` - unlengthed textual counterpart
+ to :class:`.Unicode`.
+
+ """
+
+ __visit_name__ = 'unicode'
+
+ def __init__(self, length=None, **kwargs):
+ """
+ Create a :class:`.Unicode` object.
+
+ Parameters are the same as that of :class:`.String`,
+ with the exception that ``convert_unicode``
+ defaults to ``True``.
+
+ """
+ kwargs.setdefault('convert_unicode', True)
+ kwargs.setdefault('_warn_on_bytestring', True)
+ super(Unicode, self).__init__(length=length, **kwargs)
+
+
+class UnicodeText(Text):
+ """An unbounded-length Unicode string type.
+
+ See :class:`.Unicode` for details on the unicode
+ behavior of this object.
+
+ Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
+ unicode-capable type being used on the backend, such as
+ ``NCLOB``, ``NTEXT``.
+
+ """
+
+ __visit_name__ = 'unicode_text'
+
+ def __init__(self, length=None, **kwargs):
+ """
+ Create a Unicode-converting Text type.
+
+ Parameters are the same as that of :class:`.Text`,
+ with the exception that ``convert_unicode``
+ defaults to ``True``.
+
+ """
+ kwargs.setdefault('convert_unicode', True)
+ kwargs.setdefault('_warn_on_bytestring', True)
+ super(UnicodeText, self).__init__(length=length, **kwargs)
+
+
+class Integer(_DateAffinity, TypeEngine):
+ """A type for ``int`` integers."""
+
+ __visit_name__ = 'integer'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.NUMBER
+
+ @property
+ def python_type(self):
+ return int
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return str(value)
+ return process
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ # TODO: need a dictionary object that will
+ # handle operators generically here, this is incomplete
+ return {
+ operators.add: {
+ Date: Date,
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.mul: {
+ Interval: Interval,
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.div: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.truediv: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ operators.sub: {
+ Integer: self.__class__,
+ Numeric: Numeric,
+ },
+ }
+
+
+
+class SmallInteger(Integer):
+ """A type for smaller ``int`` integers.
+
+ Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
+ a normal :class:`.Integer` on the Python side.
+
+ """
+
+ __visit_name__ = 'small_integer'
+
+
+class BigInteger(Integer):
+ """A type for bigger ``int`` integers.
+
+ Typically generates a ``BIGINT`` in DDL, and otherwise acts like
+ a normal :class:`.Integer` on the Python side.
+
+ """
+
+ __visit_name__ = 'big_integer'
+
+
+
+class Numeric(_DateAffinity, TypeEngine):
+ """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``.
+
+ This type returns Python ``decimal.Decimal`` objects by default, unless the
+ :paramref:`.Numeric.asdecimal` flag is set to False, in which case they
+ are coerced to Python ``float`` objects.
+
+ .. note::
+
+ The :class:`.Numeric` type is designed to receive data from a database
+ type that is explicitly known to be a decimal type
+ (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point
+ type (e.g. ``FLOAT``, ``REAL``, others).
+ If the database column on the server is in fact a floating-point type
+ type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float`
+ type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
+ may or may not function as expected.
+
+ .. note::
+
+ The Python ``decimal.Decimal`` class is generally slow
+ performing; cPython 3.3 has now switched to use the `cdecimal
+ <http://pypi.python.org/pypi/cdecimal/>`_ library natively. For
+ older Python versions, the ``cdecimal`` library can be patched
+ into any application where it will replace the ``decimal``
+ library fully, however this needs to be applied globally and
+ before any other modules have been imported, as follows::
+
+ import sys
+ import cdecimal
+ sys.modules["decimal"] = cdecimal
+
+ Note that the ``cdecimal`` and ``decimal`` libraries are **not
+ compatible with each other**, so patching ``cdecimal`` at the
+ global level is the only way it can be used effectively with
+ various DBAPIs that hardcode to import the ``decimal`` library.
+
+ """
+
+ __visit_name__ = 'numeric'
+
+ _default_decimal_return_scale = 10
+
+ def __init__(self, precision=None, scale=None,
+ decimal_return_scale=None, asdecimal=True):
+ """
+ Construct a Numeric.
+
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
+
+ :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
+
+ :param asdecimal: default True. Return whether or not
+ values should be sent as Python Decimal objects, or
+ as floats. Different DBAPIs send one or the other based on
+ datatypes - the Numeric type will ensure that return values
+ are one or the other across DBAPIs consistently.
+
+ :param decimal_return_scale: Default scale to use when converting
+ from floats to Python decimals. Floating point values will typically
+ be much longer due to decimal inaccuracy, and most floating point
+ database types don't have a notion of "scale", so by default the
+ float type looks for the first ten decimal places when converting.
+ Specfiying this value will override that length. Types which
+ do include an explicit ".scale" value, such as the base :class:`.Numeric`
+ as well as the MySQL float types, will use the value of ".scale"
+ as the default for decimal_return_scale, if not otherwise specified.
+
+ .. versionadded:: 0.9.0
+
+ When using the ``Numeric`` type, care should be taken to ensure
+ that the asdecimal setting is apppropriate for the DBAPI in use -
+ when Numeric applies a conversion from Decimal->float or float->
+ Decimal, this conversion incurs an additional performance overhead
+ for all result columns received.
+
+ DBAPIs that return Decimal natively (e.g. psycopg2) will have
+ better accuracy and higher performance with a setting of ``True``,
+ as the native translation to Decimal reduces the amount of floating-
+ point issues at play, and the Numeric type itself doesn't need
+ to apply any further conversions. However, another DBAPI which
+ returns floats natively *will* incur an additional conversion
+ overhead, and is still subject to floating point data loss - in
+ which case ``asdecimal=False`` will at least remove the extra
+ conversion overhead.
+
+ """
+ self.precision = precision
+ self.scale = scale
+ self.decimal_return_scale = decimal_return_scale
+ self.asdecimal = asdecimal
+
+ @property
+ def _effective_decimal_return_scale(self):
+ if self.decimal_return_scale is not None:
+ return self.decimal_return_scale
+ elif getattr(self, "scale", None) is not None:
+ return self.scale
+ else:
+ return self._default_decimal_return_scale
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.NUMBER
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return str(value)
+ return process
+
+ @property
+ def python_type(self):
+ if self.asdecimal:
+ return decimal.Decimal
+ else:
+ return float
+
+ def bind_processor(self, dialect):
+ if dialect.supports_native_decimal:
+ return None
+ else:
+ return processors.to_float
+
+ def result_processor(self, dialect, coltype):
+ if self.asdecimal:
+ if dialect.supports_native_decimal:
+ # we're a "numeric", DBAPI will give us Decimal directly
+ return None
+ else:
+ util.warn('Dialect %s+%s does *not* support Decimal '
+ 'objects natively, and SQLAlchemy must '
+ 'convert from floating point - rounding '
+ 'errors and other issues may occur. Please '
+ 'consider storing Decimal numbers as strings '
+ 'or integers on this platform for lossless '
+ 'storage.' % (dialect.name, dialect.driver))
+
+ # we're a "numeric", DBAPI returns floats, convert.
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self.scale if self.scale is not None
+ else self._default_decimal_return_scale)
+ else:
+ if dialect.supports_native_decimal:
+ return processors.to_float
+ else:
+ return None
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.div: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.truediv: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.add: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ },
+ operators.sub: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
+ }
+ }
+
+
+class Float(Numeric):
+ """Type representing floating point types, such as ``FLOAT`` or ``REAL``.
+
+ This type returns Python ``float`` objects by default, unless the
+ :paramref:`.Float.asdecimal` flag is set to True, in which case they
+ are coerced to ``decimal.Decimal`` objects.
+
+ .. note::
+
+ The :class:`.Float` type is designed to receive data from a database
+ type that is explicitly known to be a floating point type
+ (e.g. ``FLOAT``, ``REAL``, others)
+ and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others).
+ If the database column on the server is in fact a Numeric
+ type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric`
+ type or a subclass, otherwise numeric coercion between ``float``/``Decimal``
+ may or may not function as expected.
+
+ """
+
+ __visit_name__ = 'float'
+
+ scale = None
+
+ def __init__(self, precision=None, asdecimal=False,
+ decimal_return_scale=None, **kwargs):
+ """
+ Construct a Float.
+
+ :param precision: the numeric precision for use in DDL ``CREATE
+ TABLE``.
+
+ :param asdecimal: the same flag as that of :class:`.Numeric`, but
+ defaults to ``False``. Note that setting this flag to ``True``
+ results in floating point conversion.
+
+ :param decimal_return_scale: Default scale to use when converting
+ from floats to Python decimals. Floating point values will typically
+ be much longer due to decimal inaccuracy, and most floating point
+ database types don't have a notion of "scale", so by default the
+ float type looks for the first ten decimal places when converting.
+ Specfiying this value will override that length. Note that the
+ MySQL float types, which do include "scale", will use "scale"
+ as the default for decimal_return_scale, if not otherwise specified.
+
+ .. versionadded:: 0.9.0
+
+ :param \**kwargs: deprecated. Additional arguments here are ignored
+ by the default :class:`.Float` type. For database specific
+ floats that support additional arguments, see that dialect's
+ documentation for details, such as
+ :class:`sqlalchemy.dialects.mysql.FLOAT`.
+
+ """
+ self.precision = precision
+ self.asdecimal = asdecimal
+ self.decimal_return_scale = decimal_return_scale
+ if kwargs:
+ util.warn_deprecated("Additional keyword arguments "
+ "passed to Float ignored.")
+
+ def result_processor(self, dialect, coltype):
+ if self.asdecimal:
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
+ else:
+ return None
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
+ },
+ operators.div: {
+ Numeric: self.__class__,
+ },
+ operators.truediv: {
+ Numeric: self.__class__,
+ },
+ operators.add: {
+ Numeric: self.__class__,
+ },
+ operators.sub: {
+ Numeric: self.__class__,
+ }
+ }
+
+
+class DateTime(_DateAffinity, TypeEngine):
+ """A type for ``datetime.datetime()`` objects.
+
+ Date and time types return objects from the Python ``datetime``
+ module. Most DBAPIs have built in support for the datetime
+ module, with the noted exception of SQLite. In the case of
+ SQLite, date and time types are stored as strings which are then
+ converted back to datetime objects when rows are returned.
+
+ """
+
+ __visit_name__ = 'datetime'
+
+ def __init__(self, timezone=False):
+ """Construct a new :class:`.DateTime`.
+
+ :param timezone: boolean. If True, and supported by the
+ backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends
+ that don't support timezone aware timestamps, has no
+ effect.
+
+ """
+ self.timezone = timezone
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.datetime
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Interval: self.__class__,
+ },
+ operators.sub: {
+ Interval: self.__class__,
+ DateTime: Interval,
+ },
+ }
+
+
+class Date(_DateAffinity, TypeEngine):
+ """A type for ``datetime.date()`` objects."""
+
+ __visit_name__ = 'date'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.date
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Integer: self.__class__,
+ Interval: DateTime,
+ Time: DateTime,
+ },
+ operators.sub: {
+ # date - integer = date
+ Integer: self.__class__,
+
+ # date - date = integer.
+ Date: Integer,
+
+ Interval: DateTime,
+
+ # date - datetime = interval,
+ # this one is not in the PG docs
+ # but works
+ DateTime: Interval,
+ },
+ }
+
+
+class Time(_DateAffinity, TypeEngine):
+ """A type for ``datetime.time()`` objects."""
+
+ __visit_name__ = 'time'
+
+ def __init__(self, timezone=False):
+ self.timezone = timezone
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.DATETIME
+
+ @property
+ def python_type(self):
+ return dt.time
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__
+ },
+ operators.sub: {
+ Time: Interval,
+ Interval: self.__class__,
+ },
+ }
+
+
+class _Binary(TypeEngine):
+ """Define base behavior for binary types."""
+
+ def __init__(self, length=None):
+ self.length = length
+
+ def literal_processor(self, dialect):
+ def process(value):
+ value = value.decode(self.dialect.encoding).replace("'", "''")
+ return "'%s'" % value
+ return process
+
+ @property
+ def python_type(self):
+ return util.binary_type
+
+ # Python 3 - sqlite3 doesn't need the `Binary` conversion
+ # here, though pg8000 does to indicate "bytea"
+ def bind_processor(self, dialect):
+ DBAPIBinary = dialect.dbapi.Binary
+
+ def process(value):
+ if value is not None:
+ return DBAPIBinary(value)
+ else:
+ return None
+ return process
+
+ # Python 3 has native bytes() type
+ # both sqlite3 and pg8000 seem to return it,
+ # psycopg2 as of 2.5 returns 'memoryview'
+ if util.py2k:
+ def result_processor(self, dialect, coltype):
+ if util.jython:
+ def process(value):
+ if value is not None:
+ if isinstance(value, array.array):
+ return value.tostring()
+ return str(value)
+ else:
+ return None
+ else:
+ process = processors.to_str
+ return process
+ else:
+ def result_processor(self, dialect, coltype):
+ def process(value):
+ if value is not None:
+ value = bytes(value)
+ return value
+ return process
+
+ def coerce_compared_value(self, op, value):
+ """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+ if isinstance(value, util.string_types):
+ return self
+ else:
+ return super(_Binary, self).coerce_compared_value(op, value)
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.BINARY
+
+
+class LargeBinary(_Binary):
+ """A type for large binary byte data.
+
+ The Binary type generates BLOB or BYTEA when tables are created,
+ and also converts incoming values using the ``Binary`` callable
+ provided by each DB-API.
+
+ """
+
+ __visit_name__ = 'large_binary'
+
+ def __init__(self, length=None):
+ """
+ Construct a LargeBinary type.
+
+ :param length: optional, a length for the column for use in
+ DDL statements, for those BLOB types that accept a length
+ (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
+ type - use the BINARY/VARBINARY types specifically for those.
+ May be safely omitted if no ``CREATE
+ TABLE`` will be issued. Certain databases may require a
+ *length* for use in DDL, and will raise an exception when
+ the ``CREATE TABLE`` DDL is issued.
+
+ """
+ _Binary.__init__(self, length=length)
+
+
+class Binary(LargeBinary):
+ """Deprecated. Renamed to LargeBinary."""
+
+ def __init__(self, *arg, **kw):
+ util.warn_deprecated('The Binary type has been renamed to '
+ 'LargeBinary.')
+ LargeBinary.__init__(self, *arg, **kw)
+
+
+
+class SchemaType(SchemaEventTarget):
+ """Mark a type as possibly requiring schema-level DDL for usage.
+
+ Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
+ as well as types that are complimented by table or schema level
+ constraints, triggers, and other rules.
+
+ :class:`.SchemaType` classes can also be targets for the
+ :meth:`.DDLEvents.before_parent_attach` and
+ :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
+ surrounding the association of the type object with a parent
+ :class:`.Column`.
+
+ .. seealso::
+
+ :class:`.Enum`
+
+ :class:`.Boolean`
+
+
+ """
+
+ def __init__(self, name=None, schema=None, metadata=None,
+ inherit_schema=False, quote=None):
+ if name is not None:
+ self.name = quoted_name(name, quote)
+ else:
+ self.name = None
+ self.schema = schema
+ self.metadata = metadata
+ self.inherit_schema = inherit_schema
+ if self.metadata:
+ event.listen(
+ self.metadata,
+ "before_create",
+ util.portable_instancemethod(self._on_metadata_create)
+ )
+ event.listen(
+ self.metadata,
+ "after_drop",
+ util.portable_instancemethod(self._on_metadata_drop)
+ )
+
+ def _set_parent(self, column):
+ column._on_table_attach(util.portable_instancemethod(self._set_table))
+
+ def _set_table(self, column, table):
+ if self.inherit_schema:
+ self.schema = table.schema
+
+ event.listen(
+ table,
+ "before_create",
+ util.portable_instancemethod(
+ self._on_table_create)
+ )
+ event.listen(
+ table,
+ "after_drop",
+ util.portable_instancemethod(self._on_table_drop)
+ )
+ if self.metadata is None:
+ # TODO: what's the difference between self.metadata
+ # and table.metadata here ?
+ event.listen(
+ table.metadata,
+ "before_create",
+ util.portable_instancemethod(self._on_metadata_create)
+ )
+ event.listen(
+ table.metadata,
+ "after_drop",
+ util.portable_instancemethod(self._on_metadata_drop)
+ )
+
+ def copy(self, **kw):
+ return self.adapt(self.__class__)
+
+ def adapt(self, impltype, **kw):
+ schema = kw.pop('schema', self.schema)
+ metadata = kw.pop('metadata', self.metadata)
+ return impltype(name=self.name,
+ schema=schema,
+ metadata=metadata,
+ inherit_schema=self.inherit_schema,
+ **kw
+ )
+
+ @property
+ def bind(self):
+ return self.metadata and self.metadata.bind or None
+
+ def create(self, bind=None, checkfirst=False):
+ """Issue CREATE ddl for this type, if applicable."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t.create(bind=bind, checkfirst=checkfirst)
+
+ def drop(self, bind=None, checkfirst=False):
+ """Issue DROP ddl for this type, if applicable."""
+
+ if bind is None:
+ bind = _bind_or_error(self)
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t.drop(bind=bind, checkfirst=checkfirst)
+
+ def _on_table_create(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_table_create(target, bind, **kw)
+
+ def _on_table_drop(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_table_drop(target, bind, **kw)
+
+ def _on_metadata_create(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_metadata_create(target, bind, **kw)
+
+ def _on_metadata_drop(self, target, bind, **kw):
+ t = self.dialect_impl(bind.dialect)
+ if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
+ t._on_metadata_drop(target, bind, **kw)
+
+class Enum(String, SchemaType):
+ """Generic Enum Type.
+
+ The Enum type provides a set of possible string values which the
+ column is constrained towards.
+
+ By default, uses the backend's native ENUM type if available,
+ else uses VARCHAR + a CHECK constraint.
+
+ .. seealso::
+
+ :class:`~.postgresql.ENUM` - PostgreSQL-specific type,
+ which has additional functionality.
+
+ """
+
+ __visit_name__ = 'enum'
+
+ def __init__(self, *enums, **kw):
+ """Construct an enum.
+
+ Keyword arguments which don't apply to a specific backend are ignored
+ by that backend.
+
+ :param \*enums: string or unicode enumeration labels. If unicode
+ labels are present, the `convert_unicode` flag is auto-enabled.
+
+ :param convert_unicode: Enable unicode-aware bind parameter and
+ result-set processing for this Enum's data. This is set
+ automatically based on the presence of unicode label strings.
+
+ :param metadata: Associate this type directly with a ``MetaData``
+ object. For types that exist on the target database as an
+ independent schema construct (Postgresql), this type will be
+ created and dropped within ``create_all()`` and ``drop_all()``
+ operations. If the type is not associated with any ``MetaData``
+ object, it will associate itself with each ``Table`` in which it is
+ used, and will be created when any of those individual tables are
+ created, after a check is performed for it's existence. The type is
+ only dropped when ``drop_all()`` is called for that ``Table``
+ object's metadata, however.
+
+ :param name: The name of this type. This is required for Postgresql
+ and any future supported database which requires an explicitly
+ named type, or an explicitly named constraint in order to generate
+ the type and/or a table that uses it.
+
+ :param native_enum: Use the database's native ENUM type when
+ available. Defaults to True. When False, uses VARCHAR + check
+ constraint for all backends.
+
+ :param schema: Schema name of this type. For types that exist on the
+ target database as an independent schema construct (Postgresql),
+ this parameter specifies the named schema in which the type is
+ present.
+
+ .. note::
+
+ The ``schema`` of the :class:`.Enum` type does not
+ by default make use of the ``schema`` established on the
+ owning :class:`.Table`. If this behavior is desired,
+ set the ``inherit_schema`` flag to ``True``.
+
+ :param quote: Set explicit quoting preferences for the type's name.
+
+ :param inherit_schema: When ``True``, the "schema" from the owning
+ :class:`.Table` will be copied to the "schema" attribute of this
+ :class:`.Enum`, replacing whatever value was passed for the
+ ``schema`` attribute. This also takes effect when using the
+ :meth:`.Table.tometadata` operation.
+
+ .. versionadded:: 0.8
+
+ """
+ self.enums = enums
+ self.native_enum = kw.pop('native_enum', True)
+ convert_unicode = kw.pop('convert_unicode', None)
+ if convert_unicode is None:
+ for e in enums:
+ if isinstance(e, util.text_type):
+ convert_unicode = True
+ break
+ else:
+ convert_unicode = False
+
+ if self.enums:
+ length = max(len(x) for x in self.enums)
+ else:
+ length = 0
+ String.__init__(self,
+ length=length,
+ convert_unicode=convert_unicode,
+ )
+ SchemaType.__init__(self, **kw)
+
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[Enum, SchemaType],
+ )
+
+ def _should_create_constraint(self, compiler):
+ return not self.native_enum or \
+ not compiler.dialect.supports_native_enum
+
+ @util.dependencies("sqlalchemy.sql.schema")
+ def _set_table(self, schema, column, table):
+ if self.native_enum:
+ SchemaType._set_table(self, column, table)
+
+ e = schema.CheckConstraint(
+ type_coerce(column, self).in_(self.enums),
+ name=self.name,
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
+ table.append_constraint(e)
+
+ def adapt(self, impltype, **kw):
+ schema = kw.pop('schema', self.schema)
+ metadata = kw.pop('metadata', self.metadata)
+ if issubclass(impltype, Enum):
+ return impltype(name=self.name,
+ schema=schema,
+ metadata=metadata,
+ convert_unicode=self.convert_unicode,
+ native_enum=self.native_enum,
+ inherit_schema=self.inherit_schema,
+ *self.enums,
+ **kw
+ )
+ else:
+ return super(Enum, self).adapt(impltype, **kw)
+
+
+class PickleType(TypeDecorator):
+ """Holds Python objects, which are serialized using pickle.
+
+ PickleType builds upon the Binary type to apply Python's
+ ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
+ the way out, allowing any pickleable Python object to be stored as
+ a serialized binary field.
+
+ To allow ORM change events to propagate for elements associated
+ with :class:`.PickleType`, see :ref:`mutable_toplevel`.
+
+ """
+
+ impl = LargeBinary
+
+ def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
+ pickler=None, comparator=None):
+ """
+ Construct a PickleType.
+
+ :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
+
+ :param pickler: defaults to cPickle.pickle or pickle.pickle if
+ cPickle is not available. May be any object with
+ pickle-compatible ``dumps` and ``loads`` methods.
+
+ :param comparator: a 2-arg callable predicate used
+ to compare values of this type. If left as ``None``,
+ the Python "equals" operator is used to compare values.
+
+ """
+ self.protocol = protocol
+ self.pickler = pickler or pickle
+ self.comparator = comparator
+ super(PickleType, self).__init__()
+
+ def __reduce__(self):
+ return PickleType, (self.protocol,
+ None,
+ self.comparator)
+
+ def bind_processor(self, dialect):
+ impl_processor = self.impl.bind_processor(dialect)
+ dumps = self.pickler.dumps
+ protocol = self.protocol
+ if impl_processor:
+ def process(value):
+ if value is not None:
+ value = dumps(value, protocol)
+ return impl_processor(value)
+ else:
+ def process(value):
+ if value is not None:
+ value = dumps(value, protocol)
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ impl_processor = self.impl.result_processor(dialect, coltype)
+ loads = self.pickler.loads
+ if impl_processor:
+ def process(value):
+ value = impl_processor(value)
+ if value is None:
+ return None
+ return loads(value)
+ else:
+ def process(value):
+ if value is None:
+ return None
+ return loads(value)
+ return process
+
+ def compare_values(self, x, y):
+ if self.comparator:
+ return self.comparator(x, y)
+ else:
+ return x == y
+
+
+class Boolean(TypeEngine, SchemaType):
+ """A bool datatype.
+
+ Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
+ the Python side deals in ``True`` or ``False``.
+
+ """
+
+ __visit_name__ = 'boolean'
+
+ def __init__(self, create_constraint=True, name=None):
+ """Construct a Boolean.
+
+ :param create_constraint: defaults to True. If the boolean
+ is generated as an int/smallint, also create a CHECK constraint
+ on the table that ensures 1 or 0 as a value.
+
+ :param name: if a CHECK constraint is generated, specify
+ the name of the constraint.
+
+ """
+ self.create_constraint = create_constraint
+ self.name = name
+
+ def _should_create_constraint(self, compiler):
+ return not compiler.dialect.supports_native_boolean
+
+ @util.dependencies("sqlalchemy.sql.schema")
+ def _set_table(self, schema, column, table):
+ if not self.create_constraint:
+ return
+
+ e = schema.CheckConstraint(
+ type_coerce(column, self).in_([0, 1]),
+ name=self.name,
+ _create_rule=util.portable_instancemethod(
+ self._should_create_constraint)
+ )
+ table.append_constraint(e)
+
+ @property
+ def python_type(self):
+ return bool
+
+ def bind_processor(self, dialect):
+ if dialect.supports_native_boolean:
+ return None
+ else:
+ return processors.boolean_to_int
+
+ def result_processor(self, dialect, coltype):
+ if dialect.supports_native_boolean:
+ return None
+ else:
+ return processors.int_to_boolean
+
+
+class Interval(_DateAffinity, TypeDecorator):
+ """A type for ``datetime.timedelta()`` objects.
+
+ The Interval type deals with ``datetime.timedelta`` objects. In
+ PostgreSQL, the native ``INTERVAL`` type is used; for others, the
+ value is stored as a date which is relative to the "epoch"
+ (Jan. 1, 1970).
+
+ Note that the ``Interval`` type does not currently provide date arithmetic
+ operations on platforms which do not support interval types natively. Such
+ operations usually require transformation of both sides of the expression
+ (such as, conversion of both sides into integer epoch values first) which
+ currently is a manual procedure (such as via
+ :attr:`~sqlalchemy.sql.expression.func`).
+
+ """
+
+ impl = DateTime
+ epoch = dt.datetime.utcfromtimestamp(0)
+
+ def __init__(self, native=True,
+ second_precision=None,
+ day_precision=None):
+ """Construct an Interval object.
+
+ :param native: when True, use the actual
+ INTERVAL type provided by the database, if
+ supported (currently Postgresql, Oracle).
+ Otherwise, represent the interval data as
+ an epoch value regardless.
+
+ :param second_precision: For native interval types
+ which support a "fractional seconds precision" parameter,
+ i.e. Oracle and Postgresql
+
+ :param day_precision: for native interval types which
+ support a "day precision" parameter, i.e. Oracle.
+
+ """
+ super(Interval, self).__init__()
+ self.native = native
+ self.second_precision = second_precision
+ self.day_precision = day_precision
+
+ def adapt(self, cls, **kw):
+ if self.native and hasattr(cls, '_adapt_from_generic_interval'):
+ return cls._adapt_from_generic_interval(self, **kw)
+ else:
+ return self.__class__(
+ native=self.native,
+ second_precision=self.second_precision,
+ day_precision=self.day_precision,
+ **kw)
+
+ @property
+ def python_type(self):
+ return dt.timedelta
+
+ def bind_processor(self, dialect):
+ impl_processor = self.impl.bind_processor(dialect)
+ epoch = self.epoch
+ if impl_processor:
+ def process(value):
+ if value is not None:
+ value = epoch + value
+ return impl_processor(value)
+ else:
+ def process(value):
+ if value is not None:
+ value = epoch + value
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ impl_processor = self.impl.result_processor(dialect, coltype)
+ epoch = self.epoch
+ if impl_processor:
+ def process(value):
+ value = impl_processor(value)
+ if value is None:
+ return None
+ return value - epoch
+ else:
+ def process(value):
+ if value is None:
+ return None
+ return value - epoch
+ return process
+
+ @util.memoized_property
+ def _expression_adaptations(self):
+ return {
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__,
+ DateTime: DateTime,
+ Time: Time,
+ },
+ operators.sub: {
+ Interval: self.__class__
+ },
+ operators.mul: {
+ Numeric: self.__class__
+ },
+ operators.truediv: {
+ Numeric: self.__class__
+ },
+ operators.div: {
+ Numeric: self.__class__
+ }
+ }
+
+ @property
+ def _type_affinity(self):
+ return Interval
+
+ def coerce_compared_value(self, op, value):
+ """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+
+ return self.impl.coerce_compared_value(op, value)
+
+
+class REAL(Float):
+ """The SQL REAL type."""
+
+ __visit_name__ = 'REAL'
+
+
+class FLOAT(Float):
+ """The SQL FLOAT type."""
+
+ __visit_name__ = 'FLOAT'
+
+
+class NUMERIC(Numeric):
+ """The SQL NUMERIC type."""
+
+ __visit_name__ = 'NUMERIC'
+
+
+class DECIMAL(Numeric):
+ """The SQL DECIMAL type."""
+
+ __visit_name__ = 'DECIMAL'
+
+
+class INTEGER(Integer):
+ """The SQL INT or INTEGER type."""
+
+ __visit_name__ = 'INTEGER'
+INT = INTEGER
+
+
+class SMALLINT(SmallInteger):
+ """The SQL SMALLINT type."""
+
+ __visit_name__ = 'SMALLINT'
+
+
+class BIGINT(BigInteger):
+ """The SQL BIGINT type."""
+
+ __visit_name__ = 'BIGINT'
+
+
+class TIMESTAMP(DateTime):
+ """The SQL TIMESTAMP type."""
+
+ __visit_name__ = 'TIMESTAMP'
+
+ def get_dbapi_type(self, dbapi):
+ return dbapi.TIMESTAMP
+
+
+class DATETIME(DateTime):
+ """The SQL DATETIME type."""
+
+ __visit_name__ = 'DATETIME'
+
+
+class DATE(Date):
+ """The SQL DATE type."""
+
+ __visit_name__ = 'DATE'
+
+
+class TIME(Time):
+ """The SQL TIME type."""
+
+ __visit_name__ = 'TIME'
+
+
+class TEXT(Text):
+ """The SQL TEXT type."""
+
+ __visit_name__ = 'TEXT'
+
+
+class CLOB(Text):
+ """The CLOB type.
+
+ This type is found in Oracle and Informix.
+ """
+
+ __visit_name__ = 'CLOB'
+
+
+class VARCHAR(String):
+ """The SQL VARCHAR type."""
+
+ __visit_name__ = 'VARCHAR'
+
+
+class NVARCHAR(Unicode):
+ """The SQL NVARCHAR type."""
+
+ __visit_name__ = 'NVARCHAR'
+
+
+class CHAR(String):
+ """The SQL CHAR type."""
+
+ __visit_name__ = 'CHAR'
+
+
+class NCHAR(Unicode):
+ """The SQL NCHAR type."""
+
+ __visit_name__ = 'NCHAR'
+
+
+class BLOB(LargeBinary):
+ """The SQL BLOB type."""
+
+ __visit_name__ = 'BLOB'
+
+
+class BINARY(_Binary):
+ """The SQL BINARY type."""
+
+ __visit_name__ = 'BINARY'
+
+
+class VARBINARY(_Binary):
+ """The SQL VARBINARY type."""
+
+ __visit_name__ = 'VARBINARY'
+
+
+class BOOLEAN(Boolean):
+ """The SQL BOOLEAN type."""
+
+ __visit_name__ = 'BOOLEAN'
+
+class NullType(TypeEngine):
+ """An unknown type.
+
+ :class:`.NullType` is used as a default type for those cases where
+ a type cannot be determined, including:
+
+ * During table reflection, when the type of a column is not recognized
+ by the :class:`.Dialect`
+ * When constructing SQL expressions using plain Python objects of
+ unknown types (e.g. ``somecolumn == my_special_object``)
+ * When a new :class:`.Column` is created, and the given type is passed
+ as ``None`` or is not passed at all.
+
+ The :class:`.NullType` can be used within SQL expression invocation
+ without issue, it just has no behavior either at the expression construction
+ level or at the bind-parameter/result processing level. :class:`.NullType`
+ will result in a :exc:`.CompileError` if the compiler is asked to render
+ the type itself, such as if it is used in a :func:`.cast` operation
+ or within a schema creation operation such as that invoked by
+ :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct.
+
+ """
+ __visit_name__ = 'null'
+
+ _isnull = True
+
+ def literal_processor(self, dialect):
+ def process(value):
+ return "NULL"
+ return process
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if isinstance(other_comparator, NullType.Comparator) or \
+ not operators.is_commutative(op):
+ return op, self.expr.type
+ else:
+ return other_comparator._adapt_expression(op, self)
+ comparator_factory = Comparator
+
+
+NULLTYPE = NullType()
+BOOLEANTYPE = Boolean()
+STRINGTYPE = String()
+INTEGERTYPE = Integer()
+
+_type_map = {
+ int: Integer(),
+ float: Numeric(),
+ bool: BOOLEANTYPE,
+ decimal.Decimal: Numeric(),
+ dt.date: Date(),
+ dt.datetime: DateTime(),
+ dt.time: Time(),
+ dt.timedelta: Interval(),
+ util.NoneType: NULLTYPE
+}
+
+if util.py3k:
+ _type_map[bytes] = LargeBinary()
+ _type_map[str] = Unicode()
+else:
+ _type_map[unicode] = Unicode()
+ _type_map[str] = String()
+
+
+# back-assign to type_api
+from . import type_api
+type_api.BOOLEANTYPE = BOOLEANTYPE
+type_api.STRINGTYPE = STRINGTYPE
+type_api.INTEGERTYPE = INTEGERTYPE
+type_api.NULLTYPE = NULLTYPE
+type_api._type_map = _type_map
+
+# this one, there's all kinds of ways to play it, but at the EOD
+# there's just a giant dependency cycle between the typing system and
+# the expression element system, as you might expect. We can use
+# importlaters or whatnot, but the typing system just necessarily has
+# to have some kind of connection like this. right now we're injecting the
+# _DefaultColumnComparator implementation into the TypeEngine.Comparator interface.
+# Alternatively TypeEngine.Comparator could have an "impl" injected, though
+# just injecting the base is simpler, error free, and more performant.
+class Comparator(_DefaultColumnComparator):
+ BOOLEANTYPE = BOOLEANTYPE
+
+TypeEngine.Comparator.__bases__ = (Comparator, ) + TypeEngine.Comparator.__bases__
+
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
new file mode 100644
index 000000000..c6aad92ba
--- /dev/null
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -0,0 +1,1053 @@
+# sql/types_api.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""Base types API.
+
+"""
+
+
+from .. import exc, util
+from . import operators
+from .visitors import Visitable
+
+# these are back-assigned by sqltypes.
+BOOLEANTYPE = None
+INTEGERTYPE = None
+NULLTYPE = None
+STRINGTYPE = None
+
+class TypeEngine(Visitable):
+ """Base for built-in types."""
+
+ _sqla_type = True
+ _isnull = False
+
+ class Comparator(operators.ColumnOperators):
+ """Base class for custom comparison operations defined at the
+ type level. See :attr:`.TypeEngine.comparator_factory`.
+
+
+ """
+
+ def __init__(self, expr):
+ self.expr = expr
+
+ def __reduce__(self):
+ return _reconstitute_comparator, (self.expr, )
+
+
+ hashable = True
+ """Flag, if False, means values from this type aren't hashable.
+
+ Used by the ORM when uniquing result lists.
+
+ """
+
+ comparator_factory = Comparator
+ """A :class:`.TypeEngine.Comparator` class which will apply
+ to operations performed by owning :class:`.ColumnElement` objects.
+
+ The :attr:`.comparator_factory` attribute is a hook consulted by
+ the core expression system when column and SQL expression operations
+ are performed. When a :class:`.TypeEngine.Comparator` class is
+ associated with this attribute, it allows custom re-definition of
+ all existing operators, as well as definition of new operators.
+ Existing operators include those provided by Python operator overloading
+ such as :meth:`.operators.ColumnOperators.__add__` and
+ :meth:`.operators.ColumnOperators.__eq__`,
+ those provided as standard
+ attributes of :class:`.operators.ColumnOperators` such as
+ :meth:`.operators.ColumnOperators.like`
+ and :meth:`.operators.ColumnOperators.in_`.
+
+ Rudimentary usage of this hook is allowed through simple subclassing
+ of existing types, or alternatively by using :class:`.TypeDecorator`.
+ See the documentation section :ref:`types_operators` for examples.
+
+ .. versionadded:: 0.8 The expression system was enhanced to support
+ customization of operators on a per-type level.
+
+ """
+
+ def copy_value(self, value):
+ return value
+
+ def literal_processor(self, dialect):
+ """Return a conversion function for processing literal values that are
+ to be rendered directly without using binds.
+
+ This function is used when the compiler makes use of the
+ "literal_binds" flag, typically used in DDL generation as well
+ as in certain scenarios where backends don't accept bound parameters.
+
+ .. versionadded:: 0.9.0
+
+ """
+ return None
+
+ def bind_processor(self, dialect):
+ """Return a conversion function for processing bind values.
+
+ Returns a callable which will receive a bind parameter value
+ as the sole positional argument and will return a value to
+ send to the DB-API.
+
+ If processing is not necessary, the method should return ``None``.
+
+ :param dialect: Dialect instance in use.
+
+ """
+ return None
+
+ def result_processor(self, dialect, coltype):
+ """Return a conversion function for processing result row values.
+
+ Returns a callable which will receive a result row column
+ value as the sole positional argument and will return a value
+ to return to the user.
+
+ If processing is not necessary, the method should return ``None``.
+
+ :param dialect: Dialect instance in use.
+
+ :param coltype: DBAPI coltype argument received in cursor.description.
+
+ """
+ return None
+
+ def column_expression(self, colexpr):
+ """Given a SELECT column expression, return a wrapping SQL expression.
+
+ This is typically a SQL function that wraps a column expression
+ as rendered in the columns clause of a SELECT statement.
+ It is used for special data types that require
+ columns to be wrapped in some special database function in order
+ to coerce the value before being sent back to the application.
+ It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
+ method.
+
+ The method is evaluated at statement compile time, as opposed
+ to statement construction time.
+
+ See also:
+
+ :ref:`types_sql_value_processing`
+
+ """
+
+ return None
+
+ @util.memoized_property
+ def _has_column_expression(self):
+ """memoized boolean, check if column_expression is implemented.
+
+ Allows the method to be skipped for the vast majority of expression
+ types that don't use this feature.
+
+ """
+
+ return self.__class__.column_expression.__code__ \
+ is not TypeEngine.column_expression.__code__
+
+ def bind_expression(self, bindvalue):
+ """"Given a bind value (i.e. a :class:`.BindParameter` instance),
+ return a SQL expression in its place.
+
+ This is typically a SQL function that wraps the existing bound
+ parameter within the statement. It is used for special data types
+ that require literals being wrapped in some special database function
+ in order to coerce an application-level value into a database-specific
+ format. It is the SQL analogue of the
+ :meth:`.TypeEngine.bind_processor` method.
+
+ The method is evaluated at statement compile time, as opposed
+ to statement construction time.
+
+ Note that this method, when implemented, should always return
+ the exact same structure, without any conditional logic, as it
+ may be used in an executemany() call against an arbitrary number
+ of bound parameter sets.
+
+ See also:
+
+ :ref:`types_sql_value_processing`
+
+ """
+ return None
+
+ @util.memoized_property
+ def _has_bind_expression(self):
+ """memoized boolean, check if bind_expression is implemented.
+
+ Allows the method to be skipped for the vast majority of expression
+ types that don't use this feature.
+
+ """
+
+ return self.__class__.bind_expression.__code__ \
+ is not TypeEngine.bind_expression.__code__
+
+ def compare_values(self, x, y):
+ """Compare two values for equality."""
+
+ return x == y
+
+ def get_dbapi_type(self, dbapi):
+ """Return the corresponding type object from the underlying DB-API, if
+ any.
+
+ This can be useful for calling ``setinputsizes()``, for example.
+
+ """
+ return None
+
+ @property
+ def python_type(self):
+ """Return the Python type object expected to be returned
+ by instances of this type, if known.
+
+ Basically, for those types which enforce a return type,
+ or are known across the board to do such for all common
+ DBAPIs (like ``int`` for example), will return that type.
+
+ If a return type is not defined, raises
+ ``NotImplementedError``.
+
+ Note that any type also accommodates NULL in SQL which
+ means you can also get back ``None`` from any type
+ in practice.
+
+ """
+ raise NotImplementedError()
+
+ def with_variant(self, type_, dialect_name):
+ """Produce a new type object that will utilize the given
+ type when applied to the dialect of the given name.
+
+ e.g.::
+
+ from sqlalchemy.types import String
+ from sqlalchemy.dialects import mysql
+
+ s = String()
+
+ s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql')
+
+ The construction of :meth:`.TypeEngine.with_variant` is always
+ from the "fallback" type to that which is dialect specific.
+ The returned type is an instance of :class:`.Variant`, which
+ itself provides a :meth:`~sqlalchemy.types.Variant.with_variant`
+ that can be called repeatedly.
+
+ :param type_: a :class:`.TypeEngine` that will be selected
+ as a variant from the originating type, when a dialect
+ of the given name is in use.
+ :param dialect_name: base name of the dialect which uses
+ this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
+
+ .. versionadded:: 0.7.2
+
+ """
+ return Variant(self, {dialect_name: type_})
+
+
+ @util.memoized_property
+ def _type_affinity(self):
+ """Return a rudimental 'affinity' value expressing the general class
+ of type."""
+
+ typ = None
+ for t in self.__class__.__mro__:
+ if t in (TypeEngine, UserDefinedType):
+ return typ
+ elif issubclass(t, (TypeEngine, UserDefinedType)):
+ typ = t
+ else:
+ return self.__class__
+
+ def dialect_impl(self, dialect):
+ """Return a dialect-specific implementation for this
+ :class:`.TypeEngine`.
+
+ """
+ try:
+ return dialect._type_memos[self]['impl']
+ except KeyError:
+ return self._dialect_info(dialect)['impl']
+
+
+ def _cached_literal_processor(self, dialect):
+ """Return a dialect-specific literal processor for this type."""
+ try:
+ return dialect._type_memos[self]['literal']
+ except KeyError:
+ d = self._dialect_info(dialect)
+ d['literal'] = lp = d['impl'].literal_processor(dialect)
+ return lp
+
+ def _cached_bind_processor(self, dialect):
+ """Return a dialect-specific bind processor for this type."""
+
+ try:
+ return dialect._type_memos[self]['bind']
+ except KeyError:
+ d = self._dialect_info(dialect)
+ d['bind'] = bp = d['impl'].bind_processor(dialect)
+ return bp
+
+ def _cached_result_processor(self, dialect, coltype):
+ """Return a dialect-specific result processor for this type."""
+
+ try:
+ return dialect._type_memos[self][coltype]
+ except KeyError:
+ d = self._dialect_info(dialect)
+ # key assumption: DBAPI type codes are
+ # constants. Else this dictionary would
+ # grow unbounded.
+ d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
+ return rp
+
+ def _dialect_info(self, dialect):
+ """Return a dialect-specific registry which
+ caches a dialect-specific implementation, bind processing
+ function, and one or more result processing functions."""
+
+ if self in dialect._type_memos:
+ return dialect._type_memos[self]
+ else:
+ impl = self._gen_dialect_impl(dialect)
+ if impl is self:
+ impl = self.adapt(type(self))
+ # this can't be self, else we create a cycle
+ assert impl is not self
+ dialect._type_memos[self] = d = {'impl': impl}
+ return d
+
+ def _gen_dialect_impl(self, dialect):
+ return dialect.type_descriptor(self)
+
+ def adapt(self, cls, **kw):
+ """Produce an "adapted" form of this type, given an "impl" class
+ to work with.
+
+ This method is used internally to associate generic
+ types with "implementation" types that are specific to a particular
+ dialect.
+ """
+ return util.constructor_copy(self, cls, **kw)
+
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ Given an operator and value, gives the type a chance
+ to return a type which the value should be coerced into.
+
+ The default behavior here is conservative; if the right-hand
+ side is already coerced into a SQL type based on its
+ Python type, it is usually left alone.
+
+ End-user functionality extension here should generally be via
+ :class:`.TypeDecorator`, which provides more liberal behavior in that
+ it defaults to coercing the other side of the expression into this
+ type, thus applying special Python conversions above and beyond those
+ needed by the DBAPI to both ides. It also provides the public method
+ :meth:`.TypeDecorator.coerce_compared_value` which is intended for
+ end-user customization of this behavior.
+
+ """
+ _coerced_type = _type_map.get(type(value), NULLTYPE)
+ if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
+ is self._type_affinity:
+ return self
+ else:
+ return _coerced_type
+
+ def _compare_type_affinity(self, other):
+ return self._type_affinity is other._type_affinity
+
+ def compile(self, dialect=None):
+ """Produce a string-compiled form of this :class:`.TypeEngine`.
+
+ When called with no arguments, uses a "default" dialect
+ to produce a string result.
+
+ :param dialect: a :class:`.Dialect` instance.
+
+ """
+ # arg, return value is inconsistent with
+ # ClauseElement.compile()....this is a mistake.
+
+ if not dialect:
+ dialect = self._default_dialect()
+
+ return dialect.type_compiler.process(self)
+
+ @util.dependencies("sqlalchemy.engine.default")
+ def _default_dialect(self, default):
+ if self.__class__.__module__.startswith("sqlalchemy.dialects"):
+ tokens = self.__class__.__module__.split(".")[0:3]
+ mod = ".".join(tokens)
+ return getattr(__import__(mod).dialects, tokens[-1]).dialect()
+ else:
+ return default.DefaultDialect()
+
+ def __str__(self):
+ if util.py2k:
+ return unicode(self.compile()).\
+ encode('ascii', 'backslashreplace')
+ else:
+ return str(self.compile())
+
+ def __repr__(self):
+ return util.generic_repr(self)
+
+class UserDefinedType(TypeEngine):
+ """Base for user defined types.
+
+ This should be the base of new types. Note that
+ for most cases, :class:`.TypeDecorator` is probably
+ more appropriate::
+
+ import sqlalchemy.types as types
+
+ class MyType(types.UserDefinedType):
+ def __init__(self, precision = 8):
+ self.precision = precision
+
+ def get_col_spec(self):
+ return "MYTYPE(%s)" % self.precision
+
+ def bind_processor(self, dialect):
+ def process(value):
+ return value
+ return process
+
+ def result_processor(self, dialect, coltype):
+ def process(value):
+ return value
+ return process
+
+ Once the type is made, it's immediately usable::
+
+ table = Table('foo', meta,
+ Column('id', Integer, primary_key=True),
+ Column('data', MyType(16))
+ )
+
+ """
+ __visit_name__ = "user_defined"
+
+
+ class Comparator(TypeEngine.Comparator):
+ def _adapt_expression(self, op, other_comparator):
+ if hasattr(self.type, 'adapt_operator'):
+ util.warn_deprecated(
+ "UserDefinedType.adapt_operator is deprecated. Create "
+ "a UserDefinedType.Comparator subclass instead which "
+ "generates the desired expression constructs, given a "
+ "particular operator."
+ )
+ return self.type.adapt_operator(op), self.type
+ else:
+ return op, self.type
+
+ comparator_factory = Comparator
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ Default behavior for :class:`.UserDefinedType` is the
+ same as that of :class:`.TypeDecorator`; by default it returns
+ ``self``, assuming the compared value should be coerced into
+ the same type as this one. See
+ :meth:`.TypeDecorator.coerce_compared_value` for more detail.
+
+ .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
+ now returns ``self`` by default, rather than falling onto the
+ more fundamental behavior of
+ :meth:`.TypeEngine.coerce_compared_value`.
+
+ """
+
+ return self
+
+
+class TypeDecorator(TypeEngine):
+ """Allows the creation of types which add additional functionality
+ to an existing type.
+
+ This method is preferred to direct subclassing of SQLAlchemy's
+ built-in types as it ensures that all required functionality of
+ the underlying type is kept in place.
+
+ Typical usage::
+
+ import sqlalchemy.types as types
+
+ class MyType(types.TypeDecorator):
+ '''Prefixes Unicode values with "PREFIX:" on the way in and
+ strips it off on the way out.
+ '''
+
+ impl = types.Unicode
+
+ def process_bind_param(self, value, dialect):
+ return "PREFIX:" + value
+
+ def process_result_value(self, value, dialect):
+ return value[7:]
+
+ def copy(self):
+ return MyType(self.impl.length)
+
+ The class-level "impl" attribute is required, and can reference any
+ TypeEngine class. Alternatively, the load_dialect_impl() method
+ can be used to provide different type classes based on the dialect
+ given; in this case, the "impl" variable can reference
+ ``TypeEngine`` as a placeholder.
+
+ Types that receive a Python type that isn't similar to the ultimate type
+ used may want to define the :meth:`TypeDecorator.coerce_compared_value`
+ method. This is used to give the expression system a hint when coercing
+ Python objects into bind parameters within expressions. Consider this
+ expression::
+
+ mytable.c.somecol + datetime.date(2009, 5, 15)
+
+ Above, if "somecol" is an ``Integer`` variant, it makes sense that
+ we're doing date arithmetic, where above is usually interpreted
+ by databases as adding a number of days to the given date.
+ The expression system does the right thing by not attempting to
+ coerce the "date()" value into an integer-oriented bind parameter.
+
+ However, in the case of ``TypeDecorator``, we are usually changing an
+ incoming Python type to something new - ``TypeDecorator`` by default will
+ "coerce" the non-typed side to be the same type as itself. Such as below,
+ we define an "epoch" type that stores a date value as an integer::
+
+ class MyEpochType(types.TypeDecorator):
+ impl = types.Integer
+
+ epoch = datetime.date(1970, 1, 1)
+
+ def process_bind_param(self, value, dialect):
+ return (value - self.epoch).days
+
+ def process_result_value(self, value, dialect):
+ return self.epoch + timedelta(days=value)
+
+ Our expression of ``somecol + date`` with the above type will coerce the
+ "date" on the right side to also be treated as ``MyEpochType``.
+
+ This behavior can be overridden via the
+ :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
+ that should be used for the value of the expression. Below we set it such
+ that an integer value will be treated as an ``Integer``, and any other
+ value is assumed to be a date and will be treated as a ``MyEpochType``::
+
+ def coerce_compared_value(self, op, value):
+ if isinstance(value, int):
+ return Integer()
+ else:
+ return self
+
+ """
+
+ __visit_name__ = "type_decorator"
+
+ def __init__(self, *args, **kwargs):
+ """Construct a :class:`.TypeDecorator`.
+
+ Arguments sent here are passed to the constructor
+ of the class assigned to the ``impl`` class level attribute,
+ assuming the ``impl`` is a callable, and the resulting
+ object is assigned to the ``self.impl`` instance attribute
+ (thus overriding the class attribute of the same name).
+
+ If the class level ``impl`` is not a callable (the unusual case),
+ it will be assigned to the same instance attribute 'as-is',
+ ignoring those arguments passed to the constructor.
+
+ Subclasses can override this to customize the generation
+ of ``self.impl`` entirely.
+
+ """
+
+ if not hasattr(self.__class__, 'impl'):
+ raise AssertionError("TypeDecorator implementations "
+ "require a class-level variable "
+ "'impl' which refers to the class of "
+ "type being decorated")
+ self.impl = to_instance(self.__class__.impl, *args, **kwargs)
+
+ coerce_to_is_types = (util.NoneType, )
+ """Specify those Python types which should be coerced at the expression
+ level to "IS <constant>" when compared using ``==`` (and same for
+ ``IS NOT`` in conjunction with ``!=``.
+
+ For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``.
+
+ :class:`.TypeDecorator` modifies this list to only include ``NoneType``,
+ as typedecorator implementations that deal with boolean types are common.
+
+ Custom :class:`.TypeDecorator` classes can override this attribute to
+ return an empty tuple, in which case no values will be coerced to
+ constants.
+
+ ..versionadded:: 0.8.2
+ Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
+ control of ``__eq__()`` ``__ne__()`` operations.
+
+ """
+
+ class Comparator(TypeEngine.Comparator):
+
+ def operate(self, op, *other, **kwargs):
+ kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
+ return super(TypeDecorator.Comparator, self).operate(
+ op, *other, **kwargs)
+
+ def reverse_operate(self, op, other, **kwargs):
+ kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
+ return super(TypeDecorator.Comparator, self).reverse_operate(
+ op, other, **kwargs)
+
+ @property
+ def comparator_factory(self):
+ return type("TDComparator",
+ (TypeDecorator.Comparator, self.impl.comparator_factory),
+ {})
+
+ def _gen_dialect_impl(self, dialect):
+ """
+ #todo
+ """
+ adapted = dialect.type_descriptor(self)
+ if adapted is not self:
+ return adapted
+
+ # otherwise adapt the impl type, link
+ # to a copy of this TypeDecorator and return
+ # that.
+ typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
+ tt = self.copy()
+ if not isinstance(tt, self.__class__):
+ raise AssertionError('Type object %s does not properly '
+ 'implement the copy() method, it must '
+ 'return an object of type %s' % (self,
+ self.__class__))
+ tt.impl = typedesc
+ return tt
+
+ @property
+ def _type_affinity(self):
+ """
+ #todo
+ """
+ return self.impl._type_affinity
+
+ def type_engine(self, dialect):
+ """Return a dialect-specific :class:`.TypeEngine` instance
+ for this :class:`.TypeDecorator`.
+
+ In most cases this returns a dialect-adapted form of
+ the :class:`.TypeEngine` type represented by ``self.impl``.
+ Makes usage of :meth:`dialect_impl` but also traverses
+ into wrapped :class:`.TypeDecorator` instances.
+ Behavior can be customized here by overriding
+ :meth:`load_dialect_impl`.
+
+ """
+ adapted = dialect.type_descriptor(self)
+ if type(adapted) is not type(self):
+ return adapted
+ elif isinstance(self.impl, TypeDecorator):
+ return self.impl.type_engine(dialect)
+ else:
+ return self.load_dialect_impl(dialect)
+
+ def load_dialect_impl(self, dialect):
+ """Return a :class:`.TypeEngine` object corresponding to a dialect.
+
+ This is an end-user override hook that can be used to provide
+ differing types depending on the given dialect. It is used
+ by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
+ to help determine what type should ultimately be returned
+ for a given :class:`.TypeDecorator`.
+
+ By default returns ``self.impl``.
+
+ """
+ return self.impl
+
+ def __getattr__(self, key):
+ """Proxy all other undefined accessors to the underlying
+ implementation."""
+ return getattr(self.impl, key)
+
+ def process_literal_param(self, value, dialect):
+ """Receive a literal parameter value to be rendered inline within
+ a statement.
+
+ This method is used when the compiler renders a
+ literal value without using binds, typically within DDL
+ such as in the "server default" of a column or an expression
+ within a CHECK constraint.
+
+ The returned string will be rendered into the output string.
+
+ .. versionadded:: 0.9.0
+
+ """
+ raise NotImplementedError()
+
+ def process_bind_param(self, value, dialect):
+ """Receive a bound parameter value to be converted.
+
+ Subclasses override this method to return the
+ value that should be passed along to the underlying
+ :class:`.TypeEngine` object, and from there to the
+ DBAPI ``execute()`` method.
+
+ The operation could be anything desired to perform custom
+ behavior, such as transforming or serializing data.
+ This could also be used as a hook for validating logic.
+
+ This operation should be designed with the reverse operation
+ in mind, which would be the process_result_value method of
+ this class.
+
+ :param value: Data to operate upon, of any type expected by
+ this method in the subclass. Can be ``None``.
+ :param dialect: the :class:`.Dialect` in use.
+
+ """
+
+ raise NotImplementedError()
+
+ def process_result_value(self, value, dialect):
+ """Receive a result-row column value to be converted.
+
+ Subclasses should implement this method to operate on data
+ fetched from the database.
+
+ Subclasses override this method to return the
+ value that should be passed back to the application,
+ given a value that is already processed by
+ the underlying :class:`.TypeEngine` object, originally
+ from the DBAPI cursor method ``fetchone()`` or similar.
+
+ The operation could be anything desired to perform custom
+ behavior, such as transforming or serializing data.
+ This could also be used as a hook for validating logic.
+
+ :param value: Data to operate upon, of any type expected by
+ this method in the subclass. Can be ``None``.
+ :param dialect: the :class:`.Dialect` in use.
+
+ This operation should be designed to be reversible by
+ the "process_bind_param" method of this class.
+
+ """
+
+ raise NotImplementedError()
+
+ @util.memoized_property
+ def _has_bind_processor(self):
+ """memoized boolean, check if process_bind_param is implemented.
+
+ Allows the base process_bind_param to raise
+ NotImplementedError without needing to test an expensive
+ exception throw.
+
+ """
+
+ return self.__class__.process_bind_param.__code__ \
+ is not TypeDecorator.process_bind_param.__code__
+
+ @util.memoized_property
+ def _has_literal_processor(self):
+ """memoized boolean, check if process_literal_param is implemented.
+
+
+ """
+
+ return self.__class__.process_literal_param.__code__ \
+ is not TypeDecorator.process_literal_param.__code__
+
+ def literal_processor(self, dialect):
+ """Provide a literal processing function for the given
+ :class:`.Dialect`.
+
+ Subclasses here will typically override :meth:`.TypeDecorator.process_literal_param`
+ instead of this method directly.
+
+ By default, this method makes use of :meth:`.TypeDecorator.process_bind_param`
+ if that method is implemented, where :meth:`.TypeDecorator.process_literal_param`
+ is not. The rationale here is that :class:`.TypeDecorator` typically deals
+ with Python conversions of data that are above the layer of database
+ presentation. With the value converted by :meth:`.TypeDecorator.process_bind_param`,
+ the underlying type will then handle whether it needs to be presented to the
+ DBAPI as a bound parameter or to the database as an inline SQL value.
+
+ .. versionadded:: 0.9.0
+
+ """
+ if self._has_literal_processor:
+ process_param = self.process_literal_param
+ elif self._has_bind_processor:
+ # the bind processor should normally be OK
+ # for TypeDecorator since it isn't doing DB-level
+ # handling, the handling here won't be different for bound vs.
+ # literals.
+ process_param = self.process_bind_param
+ else:
+ process_param = None
+
+ if process_param:
+ impl_processor = self.impl.literal_processor(dialect)
+ if impl_processor:
+ def process(value):
+ return impl_processor(process_param(value, dialect))
+ else:
+ def process(value):
+ return process_param(value, dialect)
+
+ return process
+ else:
+ return self.impl.literal_processor(dialect)
+
+ def bind_processor(self, dialect):
+ """Provide a bound value processing function for the
+ given :class:`.Dialect`.
+
+ This is the method that fulfills the :class:`.TypeEngine`
+ contract for bound value conversion. :class:`.TypeDecorator`
+ will wrap a user-defined implementation of
+ :meth:`process_bind_param` here.
+
+ User-defined code can override this method directly,
+ though its likely best to use :meth:`process_bind_param` so that
+ the processing provided by ``self.impl`` is maintained.
+
+ :param dialect: Dialect instance in use.
+
+ This method is the reverse counterpart to the
+ :meth:`result_processor` method of this class.
+
+ """
+ if self._has_bind_processor:
+ process_param = self.process_bind_param
+ impl_processor = self.impl.bind_processor(dialect)
+ if impl_processor:
+ def process(value):
+ return impl_processor(process_param(value, dialect))
+
+ else:
+ def process(value):
+ return process_param(value, dialect)
+
+ return process
+ else:
+ return self.impl.bind_processor(dialect)
+
+ @util.memoized_property
+ def _has_result_processor(self):
+ """memoized boolean, check if process_result_value is implemented.
+
+ Allows the base process_result_value to raise
+ NotImplementedError without needing to test an expensive
+ exception throw.
+
+ """
+ return self.__class__.process_result_value.__code__ \
+ is not TypeDecorator.process_result_value.__code__
+
+ def result_processor(self, dialect, coltype):
+ """Provide a result value processing function for the given
+ :class:`.Dialect`.
+
+ This is the method that fulfills the :class:`.TypeEngine`
+ contract for result value conversion. :class:`.TypeDecorator`
+ will wrap a user-defined implementation of
+ :meth:`process_result_value` here.
+
+ User-defined code can override this method directly,
+ though its likely best to use :meth:`process_result_value` so that
+ the processing provided by ``self.impl`` is maintained.
+
+ :param dialect: Dialect instance in use.
+ :param coltype: An SQLAlchemy data type
+
+ This method is the reverse counterpart to the
+ :meth:`bind_processor` method of this class.
+
+ """
+ if self._has_result_processor:
+ process_value = self.process_result_value
+ impl_processor = self.impl.result_processor(dialect,
+ coltype)
+ if impl_processor:
+ def process(value):
+ return process_value(impl_processor(value), dialect)
+
+ else:
+ def process(value):
+ return process_value(value, dialect)
+
+ return process
+ else:
+ return self.impl.result_processor(dialect, coltype)
+
+ def coerce_compared_value(self, op, value):
+ """Suggest a type for a 'coerced' Python value in an expression.
+
+ By default, returns self. This method is called by
+ the expression system when an object using this type is
+ on the left or right side of an expression against a plain Python
+ object which does not yet have a SQLAlchemy type assigned::
+
+ expr = table.c.somecolumn + 35
+
+ Where above, if ``somecolumn`` uses this type, this method will
+ be called with the value ``operator.add``
+ and ``35``. The return value is whatever SQLAlchemy type should
+ be used for ``35`` for this particular operation.
+
+ """
+ return self
+
+ def copy(self):
+ """Produce a copy of this :class:`.TypeDecorator` instance.
+
+ This is a shallow copy and is provided to fulfill part of
+ the :class:`.TypeEngine` contract. It usually does not
+ need to be overridden unless the user-defined :class:`.TypeDecorator`
+ has local state that should be deep-copied.
+
+ """
+
+ instance = self.__class__.__new__(self.__class__)
+ instance.__dict__.update(self.__dict__)
+ return instance
+
+ def get_dbapi_type(self, dbapi):
+ """Return the DBAPI type object represented by this
+ :class:`.TypeDecorator`.
+
+ By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
+ underlying "impl".
+ """
+ return self.impl.get_dbapi_type(dbapi)
+
+ def compare_values(self, x, y):
+ """Given two values, compare them for equality.
+
+ By default this calls upon :meth:`.TypeEngine.compare_values`
+ of the underlying "impl", which in turn usually
+ uses the Python equals operator ``==``.
+
+ This function is used by the ORM to compare
+ an original-loaded value with an intercepted
+ "changed" value, to determine if a net change
+ has occurred.
+
+ """
+ return self.impl.compare_values(x, y)
+
+ def __repr__(self):
+ return util.generic_repr(self, to_inspect=self.impl)
+
+
+class Variant(TypeDecorator):
+ """A wrapping type that selects among a variety of
+ implementations based on dialect in use.
+
+ The :class:`.Variant` type is typically constructed
+ using the :meth:`.TypeEngine.with_variant` method.
+
+ .. versionadded:: 0.7.2
+
+ .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use.
+
+ """
+
+ def __init__(self, base, mapping):
+ """Construct a new :class:`.Variant`.
+
+ :param base: the base 'fallback' type
+ :param mapping: dictionary of string dialect names to
+ :class:`.TypeEngine` instances.
+
+ """
+ self.impl = base
+ self.mapping = mapping
+
+ def load_dialect_impl(self, dialect):
+ if dialect.name in self.mapping:
+ return self.mapping[dialect.name]
+ else:
+ return self.impl
+
+ def with_variant(self, type_, dialect_name):
+ """Return a new :class:`.Variant` which adds the given
+ type + dialect name to the mapping, in addition to the
+ mapping present in this :class:`.Variant`.
+
+ :param type_: a :class:`.TypeEngine` that will be selected
+ as a variant from the originating type, when a dialect
+ of the given name is in use.
+ :param dialect_name: base name of the dialect which uses
+ this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
+
+ """
+
+ if dialect_name in self.mapping:
+ raise exc.ArgumentError(
+ "Dialect '%s' is already present in "
+ "the mapping for this Variant" % dialect_name)
+ mapping = self.mapping.copy()
+ mapping[dialect_name] = type_
+ return Variant(self.impl, mapping)
+
+def _reconstitute_comparator(expression):
+ return expression.comparator
+
+
+def to_instance(typeobj, *arg, **kw):
+ if typeobj is None:
+ return NULLTYPE
+
+ if util.callable(typeobj):
+ return typeobj(*arg, **kw)
+ else:
+ return typeobj
+
+
+def adapt_type(typeobj, colspecs):
+ if isinstance(typeobj, type):
+ typeobj = typeobj()
+ for t in typeobj.__class__.__mro__[0:-1]:
+ try:
+ impltype = colspecs[t]
+ break
+ except KeyError:
+ pass
+ else:
+ # couldnt adapt - so just return the type itself
+ # (it may be a user-defined type)
+ return typeobj
+ # if we adapted the given generic type to a database-specific type,
+ # but it turns out the originally given "generic" type
+ # is actually a subclass of our resulting type, then we were already
+ # given a more specific type than that required; so use that.
+ if (issubclass(typeobj.__class__, impltype)):
+ return typeobj
+ return typeobj.adapt(impltype)
+
+
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index 6796d7edb..50ce30aaf 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -1,48 +1,32 @@
# sql/util.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-from .. import exc, schema, util, sql
-from ..util import topological
-from . import expression, operators, visitors
+"""High level utilities which build upon other modules here.
+
+"""
+
+from .. import exc, util
+from .base import _from_objects, ColumnSet
+from . import operators, visitors
from itertools import chain
from collections import deque
-"""Utility functions that build upon SQL and Schema constructs."""
+from .elements import BindParameter, ColumnClause, ColumnElement, \
+ Null, UnaryExpression, literal_column, Label
+from .selectable import ScalarSelect, Join, FromClause, FromGrouping
+from .schema import Column
+join_condition = util.langhelpers.public_factory(
+ Join._join_condition,
+ ".sql.util.join_condition")
-def sort_tables(tables, skip_fn=None, extra_dependencies=None):
- """sort a collection of Table objects in order of
- their foreign-key dependency."""
-
- tables = list(tables)
- tuples = []
- if extra_dependencies is not None:
- tuples.extend(extra_dependencies)
-
- def visit_foreign_key(fkey):
- if fkey.use_alter:
- return
- elif skip_fn and skip_fn(fkey):
- return
- parent_table = fkey.column.table
- if parent_table in tables:
- child_table = fkey.parent.table
- if parent_table is not child_table:
- tuples.append((parent_table, child_table))
-
- for table in tables:
- visitors.traverse(table,
- {'schema_visitor': True},
- {'foreign_key': visit_foreign_key})
-
- tuples.extend(
- [parent, table] for parent in table._extra_dependencies
- )
-
- return list(topological.sort(tuples, tables))
+# names that are still being imported from the outside
+from .annotation import _shallow_annotate, _deep_annotate, _deep_deannotate
+from .elements import _find_columns
+from .ddl import sort_tables
def find_join_source(clauses, join_to):
@@ -62,7 +46,7 @@ def find_join_source(clauses, join_to):
"""
- selectables = list(expression._from_objects(join_to))
+ selectables = list(_from_objects(join_to))
for i, f in enumerate(clauses):
for s in selectables:
if f.is_derived_from(s):
@@ -109,7 +93,7 @@ def visit_binary_product(fn, expr):
stack = []
def visit(element):
- if isinstance(element, (expression.ScalarSelect)):
+ if isinstance(element, ScalarSelect):
# we dont want to dig into correlated subqueries,
# those are just column elements by themselves
yield element
@@ -123,7 +107,7 @@ def visit_binary_product(fn, expr):
for elem in element.get_children():
visit(elem)
else:
- if isinstance(element, expression.ColumnClause):
+ if isinstance(element, ColumnClause):
yield element
for elem in element.get_children():
for e in visit(elem):
@@ -163,13 +147,6 @@ def find_tables(clause, check_columns=False,
return tables
-def find_columns(clause):
- """locate Column objects within the given expression."""
-
- cols = util.column_set()
- visitors.traverse(clause, {}, {'column': cols.add})
- return cols
-
def unwrap_order_by(clause):
"""Break up an 'order by' expression into individual column-expressions,
@@ -179,9 +156,9 @@ def unwrap_order_by(clause):
stack = deque([clause])
while stack:
t = stack.popleft()
- if isinstance(t, expression.ColumnElement) and \
+ if isinstance(t, ColumnElement) and \
(
- not isinstance(t, expression.UnaryExpression) or \
+ not isinstance(t, UnaryExpression) or \
not operators.is_ordering_modifier(t.modifier)
):
cols.add(t)
@@ -211,9 +188,9 @@ def surface_selectables(clause):
while stack:
elem = stack.pop()
yield elem
- if isinstance(elem, expression.Join):
+ if isinstance(elem, Join):
stack.extend((elem.left, elem.right))
- elif isinstance(elem, expression.FromGrouping):
+ elif isinstance(elem, FromGrouping):
stack.append(elem.element)
def selectables_overlap(left, right):
@@ -277,27 +254,6 @@ class _repr_params(object):
return repr(self.params)
-def expression_as_ddl(clause):
- """Given a SQL expression, convert for usage in DDL, such as
- CREATE INDEX and CHECK CONSTRAINT.
-
- Converts bind params into quoted literals, column identifiers
- into detached column constructs so that the parent table
- identifier is not included.
-
- """
- def repl(element):
- if isinstance(element, expression.BindParameter):
- return expression.literal_column(_quote_ddl_expr(element.value))
- elif isinstance(element, expression.ColumnClause) and \
- element.table is not None:
- col = expression.column(element.name)
- col.quote = element.quote
- return col
- else:
- return None
-
- return visitors.replacement_traverse(clause, {}, repl)
def adapt_criterion_to_null(crit, nulls):
@@ -307,308 +263,22 @@ def adapt_criterion_to_null(crit, nulls):
"""
def visit_binary(binary):
- if isinstance(binary.left, expression.BindParameter) \
+ if isinstance(binary.left, BindParameter) \
and binary.left._identifying_key in nulls:
# reverse order if the NULL is on the left side
binary.left = binary.right
- binary.right = expression.null()
+ binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
- elif isinstance(binary.right, expression.BindParameter) \
+ elif isinstance(binary.right, BindParameter) \
and binary.right._identifying_key in nulls:
- binary.right = expression.null()
+ binary.right = Null()
binary.operator = operators.is_
binary.negate = operators.isnot
return visitors.cloned_traverse(crit, {}, {'binary': visit_binary})
-def join_condition(a, b, ignore_nonexistent_tables=False,
- a_subset=None,
- consider_as_foreign_keys=None):
- """create a join condition between two tables or selectables.
-
- e.g.::
-
- join_condition(tablea, tableb)
-
- would produce an expression along the lines of::
-
- tablea.c.id==tableb.c.tablea_id
-
- The join is determined based on the foreign key relationships
- between the two selectables. If there are multiple ways
- to join, or no way to join, an error is raised.
-
- :param ignore_nonexistent_tables: Deprecated - this
- flag is no longer used. Only resolution errors regarding
- the two given tables are propagated.
-
- :param a_subset: An optional expression that is a sub-component
- of ``a``. An attempt will be made to join to just this sub-component
- first before looking at the full ``a`` construct, and if found
- will be successful even if there are other ways to join to ``a``.
- This allows the "right side" of a join to be passed thereby
- providing a "natural join".
-
- """
- crit = []
- constraints = set()
-
- for left in (a_subset, a):
- if left is None:
- continue
- for fk in sorted(
- b.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
- if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
- continue
- try:
- col = fk.get_referent(left)
- except exc.NoReferenceError as nrte:
- if nrte.table_name == left.name:
- raise
- else:
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if left is not b:
- for fk in sorted(
- left.foreign_keys,
- key=lambda fk: fk.parent._creation_order):
- if consider_as_foreign_keys is not None and \
- fk.parent not in consider_as_foreign_keys:
- continue
- try:
- col = fk.get_referent(b)
- except exc.NoReferenceError as nrte:
- if nrte.table_name == b.name:
- raise
- else:
- # this is totally covered. can't get
- # coverage to mark it.
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if crit:
- break
-
- if len(crit) == 0:
- if isinstance(b, expression.FromGrouping):
- hint = " Perhaps you meant to convert the right side to a "\
- "subquery using alias()?"
- else:
- hint = ""
- raise exc.NoForeignKeysError(
- "Can't find any foreign key relationships "
- "between '%s' and '%s'.%s" % (a.description, b.description, hint))
- elif len(constraints) > 1:
- raise exc.AmbiguousForeignKeysError(
- "Can't determine join between '%s' and '%s'; "
- "tables have more than one foreign key "
- "constraint relationship between them. "
- "Please specify the 'onclause' of this "
- "join explicitly." % (a.description, b.description))
- elif len(crit) == 1:
- return (crit[0])
- else:
- return sql.and_(*crit)
-
-
-class Annotated(object):
- """clones a ClauseElement and applies an 'annotations' dictionary.
-
- Unlike regular clones, this clone also mimics __hash__() and
- __cmp__() of the original element so that it takes its place
- in hashed collections.
-
- A reference to the original element is maintained, for the important
- reason of keeping its hash value current. When GC'ed, the
- hash value may be reused, causing conflicts.
-
- """
-
- def __new__(cls, *args):
- if not args:
- # clone constructor
- return object.__new__(cls)
- else:
- element, values = args
- # pull appropriate subclass from registry of annotated
- # classes
- try:
- cls = annotated_classes[element.__class__]
- except KeyError:
- cls = annotated_classes[element.__class__] = type.__new__(type,
- "Annotated%s" % element.__class__.__name__,
- (cls, element.__class__), {})
- return object.__new__(cls)
-
- def __init__(self, element, values):
- # force FromClause to generate their internal
- # collections into __dict__
- if isinstance(element, expression.FromClause):
- element.c
-
- self.__dict__ = element.__dict__.copy()
- expression.ColumnElement.comparator._reset(self)
- self.__element = element
- self._annotations = values
-
- def _annotate(self, values):
- _values = self._annotations.copy()
- _values.update(values)
- return self._with_annotations(_values)
-
- def _with_annotations(self, values):
- clone = self.__class__.__new__(self.__class__)
- clone.__dict__ = self.__dict__.copy()
- expression.ColumnElement.comparator._reset(clone)
- clone._annotations = values
- return clone
-
- def _deannotate(self, values=None, clone=True):
- if values is None:
- return self.__element
- else:
- _values = self._annotations.copy()
- for v in values:
- _values.pop(v, None)
- return self._with_annotations(_values)
-
- def _compiler_dispatch(self, visitor, **kw):
- return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
-
- @property
- def _constructor(self):
- return self.__element._constructor
-
- def _clone(self):
- clone = self.__element._clone()
- if clone is self.__element:
- # detect immutable, don't change anything
- return self
- else:
- # update the clone with any changes that have occurred
- # to this object's __dict__.
- clone.__dict__.update(self.__dict__)
- return self.__class__(clone, self._annotations)
-
- def __hash__(self):
- return hash(self.__element)
-
- def __eq__(self, other):
- if isinstance(self.__element, expression.ColumnOperators):
- return self.__element.__class__.__eq__(self, other)
- else:
- return hash(other) == hash(self)
-
-
-class AnnotatedColumnElement(Annotated):
- def __init__(self, element, values):
- Annotated.__init__(self, element, values)
- for attr in ('name', 'key'):
- if self.__dict__.get(attr, False) is None:
- self.__dict__.pop(attr)
-
- @util.memoized_property
- def name(self):
- """pull 'name' from parent, if not present"""
- return self._Annotated__element.name
-
- @util.memoized_property
- def key(self):
- """pull 'key' from parent, if not present"""
- return self._Annotated__element.key
-
- @util.memoized_property
- def info(self):
- return self._Annotated__element.info
-
-# hard-generate Annotated subclasses. this technique
-# is used instead of on-the-fly types (i.e. type.__new__())
-# so that the resulting objects are pickleable.
-annotated_classes = {}
-
-for cls in list(expression.__dict__.values()) + [schema.Column, schema.Table]:
- if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
- if issubclass(cls, expression.ColumnElement):
- annotation_cls = "AnnotatedColumnElement"
- else:
- annotation_cls = "Annotated"
- exec("class Annotated%s(%s, cls):\n" \
- " pass" % (cls.__name__, annotation_cls), locals())
- exec("annotated_classes[cls] = Annotated%s" % (cls.__name__,))
-
-
-def _deep_annotate(element, annotations, exclude=None):
- """Deep copy the given ClauseElement, annotating each element
- with the given annotations dictionary.
-
- Elements within the exclude collection will be cloned but not annotated.
-
- """
- def clone(elem):
- if exclude and \
- hasattr(elem, 'proxy_set') and \
- elem.proxy_set.intersection(exclude):
- newelem = elem._clone()
- elif annotations != elem._annotations:
- newelem = elem._annotate(annotations)
- else:
- newelem = elem
- newelem._copy_internals(clone=clone)
- return newelem
-
- if element is not None:
- element = clone(element)
- return element
-
-
-def _deep_deannotate(element, values=None):
- """Deep copy the given element, removing annotations."""
-
- cloned = util.column_dict()
-
- def clone(elem):
- # if a values dict is given,
- # the elem must be cloned each time it appears,
- # as there may be different annotations in source
- # elements that are remaining. if totally
- # removing all annotations, can assume the same
- # slate...
- if values or elem not in cloned:
- newelem = elem._deannotate(values=values, clone=True)
- newelem._copy_internals(clone=clone)
- if not values:
- cloned[elem] = newelem
- return newelem
- else:
- return cloned[elem]
-
- if element is not None:
- element = clone(element)
- return element
-
-
-def _shallow_annotate(element, annotations):
- """Annotate the given ClauseElement and copy its internals so that
- internal objects refer to the new annotated object.
-
- Basically used to apply a "dont traverse" annotation to a
- selectable, without digging throughout the whole
- structure wasting time.
- """
- element = element._annotate(annotations)
- element._copy_internals()
- return element
-
-
def splice_joins(left, right, stop_on=None):
if left is None:
return right
@@ -619,7 +289,7 @@ def splice_joins(left, right, stop_on=None):
ret = None
while stack:
(right, prevright) = stack.pop()
- if isinstance(right, expression.Join) and right is not stop_on:
+ if isinstance(right, Join) and right is not stop_on:
right = right._clone()
right._reset_exported()
right.onclause = adapter.traverse(right.onclause)
@@ -703,7 +373,7 @@ def reduce_columns(columns, *clauses, **kw):
if clause is not None:
visitors.traverse(clause, {}, {'binary': visit_binary})
- return expression.ColumnSet(columns.difference(omit))
+ return ColumnSet(columns.difference(omit))
def criterion_as_pairs(expression, consider_as_foreign_keys=None,
@@ -722,8 +392,8 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
def visit_binary(binary):
if not any_operator and binary.operator is not operators.eq:
return
- if not isinstance(binary.left, sql.ColumnElement) or \
- not isinstance(binary.right, sql.ColumnElement):
+ if not isinstance(binary.left, ColumnElement) or \
+ not isinstance(binary.right, ColumnElement):
return
if consider_as_foreign_keys:
@@ -745,8 +415,8 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
binary.left not in consider_as_referenced_keys):
pairs.append((binary.right, binary.left))
else:
- if isinstance(binary.left, schema.Column) and \
- isinstance(binary.right, schema.Column):
+ if isinstance(binary.left, Column) and \
+ isinstance(binary.right, Column):
if binary.left.references(binary.right):
pairs.append((binary.right, binary.left))
elif binary.right.references(binary.left):
@@ -756,6 +426,7 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None,
return pairs
+
class AliasedRow(object):
"""Wrap a RowProxy with a translation map.
@@ -848,10 +519,10 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor):
magic_flag = False
def replace(self, col):
- if not self.magic_flag and isinstance(col, expression.FromClause) and \
+ if not self.magic_flag and isinstance(col, FromClause) and \
self.selectable.is_derived_from(col):
return self.selectable
- elif not isinstance(col, expression.ColumnElement):
+ elif not isinstance(col, ColumnElement):
return None
elif self.include_fn and not self.include_fn(col):
return None
@@ -903,7 +574,7 @@ class ColumnAdapter(ClauseAdapter):
c = self.adapt_clause(col)
# anonymize labels in case they have a hardcoded name
- if isinstance(c, expression.Label):
+ if isinstance(c, Label):
c = c.label(None)
# adapt_required used by eager loading to indicate that
@@ -927,3 +598,4 @@ class ColumnAdapter(ClauseAdapter):
def __setstate__(self, state):
self.__dict__.update(state)
self.columns = util.PopulateDict(self._locate_col)
+
diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py
index 7b729bf7f..d9ad04fc0 100644
--- a/lib/sqlalchemy/sql/visitors.py
+++ b/lib/sqlalchemy/sql/visitors.py
@@ -1,5 +1,5 @@
# sql/visitors.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py
index a87829499..8ad856e2b 100644
--- a/lib/sqlalchemy/testing/__init__.py
+++ b/lib/sqlalchemy/testing/__init__.py
@@ -1,3 +1,8 @@
+# testing/__init__.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .warnings import testing_warn, assert_warnings, resetwarnings
@@ -11,7 +16,7 @@ from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\
from .assertions import emits_warning, emits_warning_on, uses_deprecated, \
eq_, ne_, is_, is_not_, startswith_, assert_raises, \
assert_raises_message, AssertsCompiledSQL, ComparesTables, \
- AssertsExecutionResults
+ AssertsExecutionResults, expect_deprecated
from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict
diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py
index 96a8bc023..61649e5e3 100644
--- a/lib/sqlalchemy/testing/assertions.py
+++ b/lib/sqlalchemy/testing/assertions.py
@@ -1,8 +1,14 @@
+# testing/assertions.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
from . import util as testutil
from sqlalchemy import pool, orm, util
-from sqlalchemy.engine import default, create_engine
+from sqlalchemy.engine import default, create_engine, url
from sqlalchemy import exc as sa_exc
from sqlalchemy.util import decorator
from sqlalchemy import types as sqltypes, schema
@@ -92,30 +98,36 @@ def uses_deprecated(*messages):
@decorator
def decorate(fn, *args, **kw):
- # todo: should probably be strict about this, too
- filters = [dict(action='ignore',
- category=sa_exc.SAPendingDeprecationWarning)]
- if not messages:
- filters.append(dict(action='ignore',
- category=sa_exc.SADeprecationWarning))
- else:
- filters.extend(
- [dict(action='ignore',
- message=message,
- category=sa_exc.SADeprecationWarning)
- for message in
- [(m.startswith('//') and
- ('Call to deprecated function ' + m[2:]) or m)
- for m in messages]])
-
- for f in filters:
- warnings.filterwarnings(**f)
- try:
+ with expect_deprecated(*messages):
return fn(*args, **kw)
- finally:
- resetwarnings()
return decorate
+@contextlib.contextmanager
+def expect_deprecated(*messages):
+ # todo: should probably be strict about this, too
+ filters = [dict(action='ignore',
+ category=sa_exc.SAPendingDeprecationWarning)]
+ if not messages:
+ filters.append(dict(action='ignore',
+ category=sa_exc.SADeprecationWarning))
+ else:
+ filters.extend(
+ [dict(action='ignore',
+ message=message,
+ category=sa_exc.SADeprecationWarning)
+ for message in
+ [(m.startswith('//') and
+ ('Call to deprecated function ' + m[2:]) or m)
+ for m in messages]])
+
+ for f in filters:
+ warnings.filterwarnings(**f)
+ try:
+ yield
+ finally:
+ resetwarnings()
+
+
def global_cleanup_assertions():
"""Check things that have to be finalized at the end of a test suite.
@@ -181,7 +193,8 @@ class AssertsCompiledSQL(object):
checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
- allow_dialect_select=False):
+ allow_dialect_select=False,
+ literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
@@ -195,26 +208,36 @@ class AssertsCompiledSQL(object):
elif dialect == 'default':
dialect = default.DefaultDialect()
elif isinstance(dialect, util.string_types):
- dialect = create_engine("%s://" % dialect).dialect
+ dialect = url.URL(dialect).get_dialect()()
kw = {}
+ compile_kwargs = {}
+
if params is not None:
kw['column_keys'] = list(params)
+ if literal_binds:
+ compile_kwargs['literal_binds'] = True
+
if isinstance(clause, orm.Query):
context = clause._compile_context()
context.statement.use_labels = True
clause = context.statement
+ if compile_kwargs:
+ kw['compile_kwargs'] = compile_kwargs
+
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
+ print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
+ else:
+ print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
- print("\nSQL String:\n" + util.text_type(c) + param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py
index a6b63b2c3..3e0d4c9d3 100644
--- a/lib/sqlalchemy/testing/assertsql.py
+++ b/lib/sqlalchemy/testing/assertsql.py
@@ -1,3 +1,8 @@
+# testing/assertsql.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from ..engine.default import DefaultDialect
from .. import util
diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py
index ae4f585e1..64f578dab 100644
--- a/lib/sqlalchemy/testing/config.py
+++ b/lib/sqlalchemy/testing/config.py
@@ -1,2 +1,8 @@
+# testing/config.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
requirements = None
db = None
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 29c8b6a03..d85771f8a 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -1,3 +1,9 @@
+# testing/engines.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
import types
@@ -26,6 +32,9 @@ class ConnectionKiller(object):
def checkout(self, dbapi_con, con_record, con_proxy):
self.proxy_refs[con_proxy] = True
+ def invalidate(self, dbapi_con, con_record, exception):
+ self.conns.discard((dbapi_con, con_record))
+
def _safe(self, fn):
try:
fn()
@@ -43,7 +52,7 @@ class ConnectionKiller(object):
def close_all(self):
for rec in list(self.proxy_refs):
- if rec is not None:
+ if rec is not None and rec.is_valid:
self._safe(rec._close)
def _after_test_ctx(self):
@@ -52,7 +61,7 @@ class ConnectionKiller(object):
# is collecting in finalize_fairy, deadlock.
# not sure if this should be if pypy/jython only.
# note that firebird/fdb definitely needs this though
- for conn, rec in self.conns:
+ for conn, rec in list(self.conns):
self._safe(conn.rollback)
def _stop_test_ctx(self):
@@ -72,10 +81,10 @@ class ConnectionKiller(object):
def _stop_test_ctx_aggressive(self):
self.close_all()
- for conn, rec in self.conns:
+ for conn, rec in list(self.conns):
self._safe(conn.close)
rec.connection = None
-
+
self.conns = set()
for rec in list(self.testing_engines):
rec.dispose()
@@ -220,6 +229,7 @@ def testing_engine(url=None, options=None):
if use_reaper:
event.listen(engine.pool, 'connect', testing_reaper.connect)
event.listen(engine.pool, 'checkout', testing_reaper.checkout)
+ event.listen(engine.pool, 'invalidate', testing_reaper.invalidate)
testing_reaper.add_engine(engine)
return engine
diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py
index c0dd58650..0553e0e22 100644
--- a/lib/sqlalchemy/testing/entities.py
+++ b/lib/sqlalchemy/testing/entities.py
@@ -1,3 +1,9 @@
+# testing/entities.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
import sqlalchemy as sa
from sqlalchemy import exc as sa_exc
diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py
index f580f3fde..f868f6396 100644
--- a/lib/sqlalchemy/testing/exclusions.py
+++ b/lib/sqlalchemy/testing/exclusions.py
@@ -1,3 +1,8 @@
+# testing/exclusions.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
import operator
@@ -19,6 +24,11 @@ class skip_if(object):
def enabled(self):
return not self.predicate()
+ def __add__(self, other):
+ def decorate(fn):
+ return other(self(fn))
+ return decorate
+
@contextlib.contextmanager
def fail_if(self, name='block'):
try:
@@ -93,7 +103,14 @@ class Predicate(object):
elif isinstance(predicate, tuple):
return SpecPredicate(*predicate)
elif isinstance(predicate, util.string_types):
- return SpecPredicate(predicate, None, None)
+ tokens = predicate.split(" ", 2)
+ op = spec = None
+ db = tokens.pop(0)
+ if tokens:
+ op = tokens.pop(0)
+ if tokens:
+ spec = tuple(int(d) for d in tokens.pop(0).split("."))
+ return SpecPredicate(db, op, spec)
elif util.callable(predicate):
return LambdaPredicate(predicate)
else:
diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py
index daa779ae3..464a723d2 100644
--- a/lib/sqlalchemy/testing/fixtures.py
+++ b/lib/sqlalchemy/testing/fixtures.py
@@ -1,3 +1,9 @@
+# testing/fixtures.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from . import config
from . import assertions, schema
from .util import adict
diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py
index 650962384..18ba053ea 100644
--- a/lib/sqlalchemy/testing/mock.py
+++ b/lib/sqlalchemy/testing/mock.py
@@ -1,13 +1,19 @@
+# testing/mock.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Import stub for mock library.
"""
from __future__ import absolute_import
from ..util import py33
if py33:
- from unittest.mock import MagicMock, Mock, call
+ from unittest.mock import MagicMock, Mock, call, patch
else:
try:
- from mock import MagicMock, Mock, call
+ from mock import MagicMock, Mock, call, patch
except ImportError:
raise ImportError(
"SQLAlchemy's test suite requires the "
diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py
index 09d51b5fa..9a41034bf 100644
--- a/lib/sqlalchemy/testing/pickleable.py
+++ b/lib/sqlalchemy/testing/pickleable.py
@@ -1,3 +1,9 @@
+# testing/pickleable.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Classes used in pickling tests, need to be at the module level for
unpickling.
"""
diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py
index b3cd3a4e3..27a028cd4 100644
--- a/lib/sqlalchemy/testing/plugin/noseplugin.py
+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py
@@ -1,3 +1,9 @@
+# plugin/noseplugin.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Enhance nose with extra options and behaviors for running SQLAlchemy tests.
When running ./sqla_nose.py, this module is imported relative to the
@@ -351,6 +357,8 @@ class NoseSQLAlchemy(Plugin):
return ""
def wantFunction(self, fn):
+ if fn.__module__ is None:
+ return False
if fn.__module__.startswith('sqlalchemy.testing'):
return False
@@ -385,8 +393,9 @@ class NoseSQLAlchemy(Plugin):
check.reason if check.reason
else
(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info
)
)
)
@@ -395,16 +404,18 @@ class NoseSQLAlchemy(Plugin):
spec = exclusions.db_spec(*cls.__unsupported_on__)
if spec(config.db):
raise SkipTest(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name)
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info)
)
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
if not spec(config.db):
raise SkipTest(
- "'%s' unsupported on DB implementation '%s'" % (
- cls.__name__, config.db.name)
+ "'%s' unsupported on DB implementation '%s' == %s" % (
+ cls.__name__, config.db.name,
+ config.db.dialect.server_version_info)
)
if getattr(cls, '__skip_if__', False):
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index bda44d80c..fa2490649 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -1,3 +1,9 @@
+# testing/profiling.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Profiling support for unit and performance tests.
These are special purpose profiling methods which operate
@@ -45,12 +51,10 @@ def profiled(target=None, **target_opts):
if target is None:
target = 'anonymous_target'
- filename = "%s.prof" % target
-
@decorator
def decorate(fn, *args, **kw):
elapsed, load_stats, result = _profile(
- filename, fn, *args, **kw)
+ fn, *args, **kw)
graphic = target_opts.get('graphic', profile_config['graphic'])
if graphic:
@@ -60,8 +64,8 @@ def profiled(target=None, **target_opts):
if report:
sort_ = target_opts.get('sort', profile_config['sort'])
limit = target_opts.get('limit', profile_config['limit'])
- print(("Profile report for target '%s' (%s)" % (
- target, filename)
+ print(("Profile report for target '%s'" % (
+ target, )
))
stats = load_stats()
@@ -81,7 +85,6 @@ def profiled(target=None, **target_opts):
if print_callees:
stats.print_callees()
- os.unlink(filename)
return result
return decorate
@@ -162,6 +165,15 @@ class ProfileStatsFile(object):
per_platform['current_count'] += 1
return result
+ def replace(self, callcount):
+ test_key = _current_test
+ per_fn = self.data[test_key]
+ per_platform = per_fn[self.platform_key]
+ counts = per_platform['counts']
+ counts[-1] = callcount
+ if self.write:
+ self._write()
+
def _header(self):
return \
"# %s\n"\
@@ -263,16 +275,19 @@ def function_call_count(variance=0.05):
if expected_count:
deviance = int(callcount * variance)
- if abs(callcount - expected_count) > deviance:
- raise AssertionError(
- "Adjusted function call count %s not within %s%% "
- "of expected %s. (Delete line %d of file %s to "
- "regenerate this callcount, when tests are run "
- "with --write-profiles.)"
- % (
- callcount, (variance * 100),
- expected_count, line_no,
- _profile_stats.fname))
+ failed = abs(callcount - expected_count) > deviance
+
+ if failed:
+ if _profile_stats.write:
+ _profile_stats.replace(callcount)
+ else:
+ raise AssertionError(
+ "Adjusted function call count %s not within %s%% "
+ "of expected %s. Rerun with --write-profiles to "
+ "regenerate this callcount."
+ % (
+ callcount, (variance * 100),
+ expected_count))
return fn_result
return update_wrapper(wrap, fn)
return decorate
diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py
index d301dc69f..706d6d060 100644
--- a/lib/sqlalchemy/testing/requirements.py
+++ b/lib/sqlalchemy/testing/requirements.py
@@ -1,3 +1,9 @@
+# testing/requirements.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
"""Global database feature support policy.
Provides decorators to mark tests requiring specific feature support from the
@@ -133,6 +139,20 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def fetch_rows_post_commit(self):
+ """target platform will allow cursor.fetchone() to proceed after a
+ COMMIT.
+
+ Typically this refers to an INSERT statement with RETURNING which
+ is invoked within "autocommit". If the row can be returned
+ after the autocommit, then this rule can be open.
+
+ """
+
+ return exclusions.open()
+
+
+ @property
def empty_inserts(self):
"""target platform supports INSERT with no values, i.e.
INSERT DEFAULT VALUES or equivalent."""
@@ -296,6 +316,15 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
@property
+ def datetime_literals(self):
+ """target dialect supports rendering of a date, time, or datetime as a
+ literal string, e.g. via the TypeEngine.literal_processor() method.
+
+ """
+
+ return exclusions.closed()
+
+ @property
def datetime(self):
"""target dialect supports representation of Python
datetime.datetime() objects."""
@@ -379,6 +408,14 @@ class SuiteRequirements(Requirements):
return exclusions.closed()
@property
+ def precision_generic_float_type(self):
+ """target backend will return native floating point numbers with at
+ least seven decimal places when using the generic Float type.
+
+ """
+ return exclusions.open()
+
+ @property
def floats_to_four_decimals(self):
"""target backend can return a floating-point number with four
significant digits (such as 15.7563) accurately
@@ -388,6 +425,16 @@ class SuiteRequirements(Requirements):
return exclusions.open()
@property
+ def fetch_null_from_numeric(self):
+ """target backend doesn't crash when you try to select a NUMERIC
+ value that has a value of NULL.
+
+ Added to support Pyodbc bug #351.
+ """
+
+ return exclusions.open()
+
+ @property
def text_type(self):
"""Target database must support an unbounded Text() "
"type such as TEXT or CLOB"""
diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py
index 2bdbaebd1..19aba53df 100644
--- a/lib/sqlalchemy/testing/runner.py
+++ b/lib/sqlalchemy/testing/runner.py
@@ -1,4 +1,9 @@
#!/usr/bin/env python
+# testing/runner.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Nose test runner module.
diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py
index 025bbaabe..ec0085219 100644
--- a/lib/sqlalchemy/testing/schema.py
+++ b/lib/sqlalchemy/testing/schema.py
@@ -1,3 +1,8 @@
+# testing/schema.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
from . import exclusions
from .. import schema, event
diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py
index e671eeb7a..5732e37ec 100644
--- a/lib/sqlalchemy/testing/suite/test_insert.py
+++ b/lib/sqlalchemy/testing/suite/test_insert.py
@@ -56,8 +56,9 @@ class LastrowidTest(fixtures.TablesTest):
[pk]
)
- @exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after "
- "connection close")
+ # failed on pypy1.9 but seems to be OK on pypy 2.1
+ #@exclusions.fails_if(lambda: util.pypy, "lastrowid not maintained after "
+ # "connection close")
@requirements.dbapi_lastrowid
def test_native_lastrowid_autoinc(self):
r = config.db.execute(
@@ -81,6 +82,10 @@ class InsertBehaviorTest(fixtures.TablesTest):
test_needs_autoincrement=True),
Column('data', String(50))
)
+ Table('manual_pk', metadata,
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('data', String(50))
+ )
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
@@ -123,13 +128,13 @@ class InsertBehaviorTest(fixtures.TablesTest):
@requirements.insert_from_select
def test_insert_from_select(self):
- table = self.tables.autoinc_pk
+ table = self.tables.manual_pk
config.db.execute(
table.insert(),
[
- dict(data="data1"),
- dict(data="data2"),
- dict(data="data3"),
+ dict(id=1, data="data1"),
+ dict(id=2, data="data2"),
+ dict(id=3, data="data3"),
]
)
@@ -171,7 +176,8 @@ class ReturningTest(fixtures.TablesTest):
Column('data', String(50))
)
- def test_explicit_returning_pk(self):
+ @requirements.fetch_rows_post_commit
+ def test_explicit_returning_pk_autocommit(self):
engine = config.db
table = self.tables.autoinc_pk
r = engine.execute(
@@ -183,6 +189,19 @@ class ReturningTest(fixtures.TablesTest):
fetched_pk = config.db.scalar(select([table.c.id]))
eq_(fetched_pk, pk)
+ def test_explicit_returning_pk_no_autocommit(self):
+ engine = config.db
+ table = self.tables.autoinc_pk
+ with engine.begin() as conn:
+ r = conn.execute(
+ table.insert().returning(
+ table.c.id),
+ data="some data"
+ )
+ pk = r.first()[0]
+ fetched_pk = config.db.scalar(select([table.c.id]))
+ eq_(fetched_pk, pk)
+
def test_autoincrement_on_insert_implcit_returning(self):
config.db.execute(
diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py
index 5a8a54c46..9f737bc64 100644
--- a/lib/sqlalchemy/testing/suite/test_reflection.py
+++ b/lib/sqlalchemy/testing/suite/test_reflection.py
@@ -147,6 +147,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
table_names = insp.get_view_names(schema)
table_names.sort()
answer = ['email_addresses_v', 'users_v']
+ eq_(sorted(table_names), answer)
else:
table_names = insp.get_table_names(schema,
order_by=order_by)
@@ -180,6 +181,12 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_view_names_with_schema(self):
self._test_get_table_names('test_schema', table_type='view')
+ @testing.requires.table_reflection
+ @testing.requires.view_reflection
+ def test_get_tables_and_views(self):
+ self._test_get_table_names()
+ self._test_get_table_names(table_type='view')
+
def _test_get_columns(self, schema=None, table_type='table'):
meta = MetaData(testing.db)
users, addresses, dingalings = self.tables.users, \
@@ -448,6 +455,7 @@ class ComponentReflectionTest(fixtures.TablesTest):
def test_get_table_oid_with_schema(self):
self._test_get_table_oid('users', schema='test_schema')
+ @testing.requires.table_reflection
@testing.provide_metadata
def test_autoincrement_col(self):
"""test that 'autoincrement' is reflected according to sqla's policy.
diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py
index 0de462eb7..a6e937e8e 100644
--- a/lib/sqlalchemy/testing/suite/test_types.py
+++ b/lib/sqlalchemy/testing/suite/test_types.py
@@ -5,7 +5,7 @@ from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
- Text, Numeric, Float
+ Text, Numeric, Float, literal
from ..schema import Table, Column
from ... import testing
import decimal
@@ -13,7 +13,34 @@ import datetime
from ...util import u
from ... import util
-class _UnicodeFixture(object):
+
+class _LiteralRoundTripFixture(object):
+ @testing.provide_metadata
+ def _literal_round_trip(self, type_, input_, output, filter_=None):
+ """test literal rendering """
+
+ # for literal, we test the literal render in an INSERT
+ # into a typed column. we can then SELECT it back as it's
+ # official type; ideally we'd be able to use CAST here
+ # but MySQL in particular can't CAST fully
+ t = Table('t', self.metadata, Column('x', type_))
+ t.create()
+
+ for value in input_:
+ ins = t.insert().values(x=literal(value)).compile(
+ dialect=testing.db.dialect,
+ compile_kwargs=dict(literal_binds=True)
+ )
+ testing.db.execute(ins)
+
+ for row in t.select().execute():
+ value = row[0]
+ if filter_ is not None:
+ value = filter_(value)
+ assert value in output
+
+
+class _UnicodeFixture(_LiteralRoundTripFixture):
__requires__ = 'unicode_data',
data = u("Alors vous imaginez ma surprise, au lever du jour, "\
@@ -87,6 +114,9 @@ class _UnicodeFixture(object):
).first()
eq_(row, (u(''),))
+ def test_literal(self):
+ self._literal_round_trip(self.datatype, [self.data], [self.data])
+
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data',
@@ -107,7 +137,7 @@ class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest):
def test_empty_strings_text(self):
self._test_empty_strings()
-class TextTest(fixtures.TablesTest):
+class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table('text_table', metadata,
@@ -140,8 +170,18 @@ class TextTest(fixtures.TablesTest):
).first()
eq_(row, ('',))
+ def test_literal(self):
+ self._literal_round_trip(Text, ["some text"], ["some text"])
-class StringTest(fixtures.TestBase):
+ def test_literal_quoting(self):
+ data = '''some 'text' hey "hi there" that's text'''
+ self._literal_round_trip(Text, [data], [data])
+
+ def test_literal_backslashes(self):
+ data = r'backslash one \ backslash two \\ end'
+ self._literal_round_trip(Text, [data], [data])
+
+class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
@requirements.unbounded_varchar
def test_nolength_string(self):
metadata = MetaData()
@@ -152,8 +192,19 @@ class StringTest(fixtures.TestBase):
foo.create(config.db)
foo.drop(config.db)
+ def test_literal(self):
+ self._literal_round_trip(String(40), ["some text"], ["some text"])
+
+ def test_literal_quoting(self):
+ data = '''some 'text' hey "hi there" that's text'''
+ self._literal_round_trip(String(40), [data], [data])
+
+ def test_literal_backslashes(self):
+ data = r'backslash one \ backslash two \\ end'
+ self._literal_round_trip(Text, [data], [data])
-class _DateFixture(object):
+
+class _DateFixture(_LiteralRoundTripFixture):
compare = None
@classmethod
@@ -198,6 +249,12 @@ class _DateFixture(object):
).first()
eq_(row, (None,))
+ @testing.requires.datetime_literals
+ def test_literal(self):
+ compare = self.compare or self.data
+ self._literal_round_trip(self.datatype, [self.data], [compare])
+
+
class DateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime',
@@ -247,7 +304,12 @@ class DateHistoricTest(_DateFixture, fixtures.TablesTest):
datatype = Date
data = datetime.date(1727, 4, 1)
-class NumericTest(fixtures.TestBase):
+
+class IntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
+ def test_literal(self):
+ self._literal_round_trip(Integer, [5], [5])
+
+class NumericTest(_LiteralRoundTripFixture, fixtures.TestBase):
@testing.emits_warning(r".*does \*not\* support Decimal objects natively")
@testing.provide_metadata
@@ -269,18 +331,69 @@ class NumericTest(fixtures.TestBase):
[str(x) for x in output],
)
+
+ @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
+ def test_render_literal_numeric(self):
+ self._literal_round_trip(
+ Numeric(precision=8, scale=4),
+ [15.7563, decimal.Decimal("15.7563")],
+ [decimal.Decimal("15.7563")],
+ )
+
+ @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
+ def test_render_literal_numeric_asfloat(self):
+ self._literal_round_trip(
+ Numeric(precision=8, scale=4, asdecimal=False),
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563],
+ )
+
+ def test_render_literal_float(self):
+ self._literal_round_trip(
+ Float(4),
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563,],
+ filter_=lambda n: n is not None and round(n, 5) or None
+ )
+
+
+ @testing.requires.precision_generic_float_type
+ def test_float_custom_scale(self):
+ self._do_test(
+ Float(None, decimal_return_scale=7, asdecimal=True),
+ [15.7563827, decimal.Decimal("15.7563827")],
+ [decimal.Decimal("15.7563827"),],
+ check_scale=True
+ )
+
def test_numeric_as_decimal(self):
self._do_test(
Numeric(precision=8, scale=4),
- [15.7563, decimal.Decimal("15.7563"), None],
- [decimal.Decimal("15.7563"), None],
+ [15.7563, decimal.Decimal("15.7563")],
+ [decimal.Decimal("15.7563")],
)
def test_numeric_as_float(self):
self._do_test(
Numeric(precision=8, scale=4, asdecimal=False),
- [15.7563, decimal.Decimal("15.7563"), None],
- [15.7563, None],
+ [15.7563, decimal.Decimal("15.7563")],
+ [15.7563],
+ )
+
+ @testing.requires.fetch_null_from_numeric
+ def test_numeric_null_as_decimal(self):
+ self._do_test(
+ Numeric(precision=8, scale=4),
+ [None],
+ [None],
+ )
+
+ @testing.requires.fetch_null_from_numeric
+ def test_numeric_null_as_float(self):
+ self._do_test(
+ Numeric(precision=8, scale=4, asdecimal=False),
+ [None],
+ [None],
)
@testing.requires.floats_to_four_decimals
@@ -291,6 +404,7 @@ class NumericTest(fixtures.TestBase):
[decimal.Decimal("15.7563"), None],
)
+
def test_float_as_float(self):
self._do_test(
Float(precision=8),
@@ -299,6 +413,7 @@ class NumericTest(fixtures.TestBase):
filter_=lambda n: n is not None and round(n, 5) or None
)
+
@testing.requires.precision_numerics_general
def test_precision_decimal(self):
numbers = set([
@@ -313,6 +428,7 @@ class NumericTest(fixtures.TestBase):
numbers,
)
+
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal(self):
"""test exceedingly small decimals.
@@ -342,6 +458,7 @@ class NumericTest(fixtures.TestBase):
numbers
)
+
@testing.requires.precision_numerics_enotation_large
def test_enotation_decimal_large(self):
"""test exceedingly large decimals.
@@ -389,7 +506,7 @@ class NumericTest(fixtures.TestBase):
__all__ = ('UnicodeVarcharTest', 'UnicodeTextTest',
'DateTest', 'DateTimeTest', 'TextTest',
- 'NumericTest',
+ 'NumericTest', 'IntegerTest',
'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest',
'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest',
'DateHistoricTest', 'StringTest')
diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py
index 1288902f2..bde11a356 100644
--- a/lib/sqlalchemy/testing/util.py
+++ b/lib/sqlalchemy/testing/util.py
@@ -1,3 +1,9 @@
+# testing/util.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from ..util import jython, pypy, defaultdict, decorator, py2k
import decimal
import gc
diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py
index 6193acd88..74a8933a6 100644
--- a/lib/sqlalchemy/testing/warnings.py
+++ b/lib/sqlalchemy/testing/warnings.py
@@ -1,3 +1,9 @@
+# testing/warnings.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
from __future__ import absolute_import
import warnings
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index 017c8dd04..3994bd4a8 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -1,17 +1,14 @@
-# sqlalchemy/types.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# types.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""defines genericized SQL types, each represented by a subclass of
-:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses
-of these types.
-
-For more information see the SQLAlchemy documentation on types.
+"""Compatiblity namespace for sqlalchemy.sql.types.
"""
-__all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
+
+__all__ = ['TypeEngine', 'TypeDecorator', 'UserDefinedType',
'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
@@ -20,2478 +17,61 @@ __all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
-import datetime as dt
-import codecs
-
-from . import exc, schema, util, processors, events, event
-from .sql import operators
-from .sql.expression import _DefaultColumnComparator
-from .util import pickle
-from .sql.visitors import Visitable
-import decimal
-default = util.importlater("sqlalchemy.engine", "default")
-
-NoneType = type(None)
-if util.jython:
- import array
-
-
-class AbstractType(Visitable):
- """Base for all types - not needed except for backwards
- compatibility."""
-
-
-class TypeEngine(AbstractType):
- """Base for built-in types."""
-
- class Comparator(_DefaultColumnComparator):
- """Base class for custom comparison operations defined at the
- type level. See :attr:`.TypeEngine.comparator_factory`.
-
- The public base class for :class:`.TypeEngine.Comparator`
- is :class:`.ColumnOperators`.
-
- """
-
- def __init__(self, expr):
- self.expr = expr
-
- def __reduce__(self):
- return _reconstitute_comparator, (self.expr, )
-
- hashable = True
- """Flag, if False, means values from this type aren't hashable.
-
- Used by the ORM when uniquing result lists.
-
- """
-
- comparator_factory = Comparator
- """A :class:`.TypeEngine.Comparator` class which will apply
- to operations performed by owning :class:`.ColumnElement` objects.
-
- The :attr:`.comparator_factory` attribute is a hook consulted by
- the core expression system when column and SQL expression operations
- are performed. When a :class:`.TypeEngine.Comparator` class is
- associated with this attribute, it allows custom re-definition of
- all existing operators, as well as definition of new operators.
- Existing operators include those provided by Python operator overloading
- such as :meth:`.operators.ColumnOperators.__add__` and
- :meth:`.operators.ColumnOperators.__eq__`,
- those provided as standard
- attributes of :class:`.operators.ColumnOperators` such as
- :meth:`.operators.ColumnOperators.like`
- and :meth:`.operators.ColumnOperators.in_`.
-
- Rudimentary usage of this hook is allowed through simple subclassing
- of existing types, or alternatively by using :class:`.TypeDecorator`.
- See the documentation section :ref:`types_operators` for examples.
-
- .. versionadded:: 0.8 The expression system was enhanced to support
- customization of operators on a per-type level.
-
- """
-
- def copy_value(self, value):
- return value
-
- def bind_processor(self, dialect):
- """Return a conversion function for processing bind values.
-
- Returns a callable which will receive a bind parameter value
- as the sole positional argument and will return a value to
- send to the DB-API.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- """
- return None
-
- def result_processor(self, dialect, coltype):
- """Return a conversion function for processing result row values.
-
- Returns a callable which will receive a result row column
- value as the sole positional argument and will return a value
- to return to the user.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- :param coltype: DBAPI coltype argument received in cursor.description.
-
- """
- return None
-
- def column_expression(self, colexpr):
- """Given a SELECT column expression, return a wrapping SQL expression.
-
- This is typically a SQL function that wraps a column expression
- as rendered in the columns clause of a SELECT statement.
- It is used for special data types that require
- columns to be wrapped in some special database function in order
- to coerce the value before being sent back to the application.
- It is the SQL analogue of the :meth:`.TypeEngine.result_processor`
- method.
-
- The method is evaluated at statement compile time, as opposed
- to statement construction time.
-
- See also:
-
- :ref:`types_sql_value_processing`
-
- """
-
- return None
-
- @util.memoized_property
- def _has_column_expression(self):
- """memoized boolean, check if column_expression is implemented.
-
- Allows the method to be skipped for the vast majority of expression
- types that don't use this feature.
-
- """
-
- return self.__class__.column_expression.__code__ \
- is not TypeEngine.column_expression.__code__
-
- def bind_expression(self, bindvalue):
- """"Given a bind value (i.e. a :class:`.BindParameter` instance),
- return a SQL expression in its place.
-
- This is typically a SQL function that wraps the existing bound
- parameter within the statement. It is used for special data types
- that require literals being wrapped in some special database function
- in order to coerce an application-level value into a database-specific
- format. It is the SQL analogue of the
- :meth:`.TypeEngine.bind_processor` method.
-
- The method is evaluated at statement compile time, as opposed
- to statement construction time.
-
- Note that this method, when implemented, should always return
- the exact same structure, without any conditional logic, as it
- may be used in an executemany() call against an arbitrary number
- of bound parameter sets.
-
- See also:
-
- :ref:`types_sql_value_processing`
-
- """
- return None
-
- @util.memoized_property
- def _has_bind_expression(self):
- """memoized boolean, check if bind_expression is implemented.
-
- Allows the method to be skipped for the vast majority of expression
- types that don't use this feature.
-
- """
-
- return self.__class__.bind_expression.__code__ \
- is not TypeEngine.bind_expression.__code__
-
- def compare_values(self, x, y):
- """Compare two values for equality."""
-
- return x == y
-
- def get_dbapi_type(self, dbapi):
- """Return the corresponding type object from the underlying DB-API, if
- any.
-
- This can be useful for calling ``setinputsizes()``, for example.
-
- """
- return None
-
- @property
- def python_type(self):
- """Return the Python type object expected to be returned
- by instances of this type, if known.
-
- Basically, for those types which enforce a return type,
- or are known across the board to do such for all common
- DBAPIs (like ``int`` for example), will return that type.
-
- If a return type is not defined, raises
- ``NotImplementedError``.
-
- Note that any type also accommodates NULL in SQL which
- means you can also get back ``None`` from any type
- in practice.
-
- """
- raise NotImplementedError()
-
- def with_variant(self, type_, dialect_name):
- """Produce a new type object that will utilize the given
- type when applied to the dialect of the given name.
-
- e.g.::
-
- from sqlalchemy.types import String
- from sqlalchemy.dialects import mysql
-
- s = String()
-
- s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql')
-
- The construction of :meth:`.TypeEngine.with_variant` is always
- from the "fallback" type to that which is dialect specific.
- The returned type is an instance of :class:`.Variant`, which
- itself provides a :meth:`~sqlalchemy.types.Variant.with_variant`
- that can be called repeatedly.
-
- :param type_: a :class:`.TypeEngine` that will be selected
- as a variant from the originating type, when a dialect
- of the given name is in use.
- :param dialect_name: base name of the dialect which uses
- this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
-
- .. versionadded:: 0.7.2
-
- """
- return Variant(self, {dialect_name: type_})
-
- @util.memoized_property
- def _type_affinity(self):
- """Return a rudimental 'affinity' value expressing the general class
- of type."""
-
- typ = None
- for t in self.__class__.__mro__:
- if t is TypeEngine or t is UserDefinedType:
- return typ
- elif issubclass(t, TypeEngine):
- typ = t
- else:
- return self.__class__
-
- def dialect_impl(self, dialect):
- """Return a dialect-specific implementation for this
- :class:`.TypeEngine`.
-
- """
- try:
- return dialect._type_memos[self]['impl']
- except KeyError:
- return self._dialect_info(dialect)['impl']
-
- def _cached_bind_processor(self, dialect):
- """Return a dialect-specific bind processor for this type."""
-
- try:
- return dialect._type_memos[self]['bind']
- except KeyError:
- d = self._dialect_info(dialect)
- d['bind'] = bp = d['impl'].bind_processor(dialect)
- return bp
-
- def _cached_result_processor(self, dialect, coltype):
- """Return a dialect-specific result processor for this type."""
-
- try:
- return dialect._type_memos[self][coltype]
- except KeyError:
- d = self._dialect_info(dialect)
- # key assumption: DBAPI type codes are
- # constants. Else this dictionary would
- # grow unbounded.
- d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
- return rp
-
- def _dialect_info(self, dialect):
- """Return a dialect-specific registry which
- caches a dialect-specific implementation, bind processing
- function, and one or more result processing functions."""
-
- if self in dialect._type_memos:
- return dialect._type_memos[self]
- else:
- impl = self._gen_dialect_impl(dialect)
- if impl is self:
- impl = self.adapt(type(self))
- # this can't be self, else we create a cycle
- assert impl is not self
- dialect._type_memos[self] = d = {'impl': impl}
- return d
-
- def _gen_dialect_impl(self, dialect):
- return dialect.type_descriptor(self)
-
- def adapt(self, cls, **kw):
- """Produce an "adapted" form of this type, given an "impl" class
- to work with.
-
- This method is used internally to associate generic
- types with "implementation" types that are specific to a particular
- dialect.
- """
- return util.constructor_copy(self, cls, **kw)
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- Given an operator and value, gives the type a chance
- to return a type which the value should be coerced into.
-
- The default behavior here is conservative; if the right-hand
- side is already coerced into a SQL type based on its
- Python type, it is usually left alone.
-
- End-user functionality extension here should generally be via
- :class:`.TypeDecorator`, which provides more liberal behavior in that
- it defaults to coercing the other side of the expression into this
- type, thus applying special Python conversions above and beyond those
- needed by the DBAPI to both ides. It also provides the public method
- :meth:`.TypeDecorator.coerce_compared_value` which is intended for
- end-user customization of this behavior.
-
- """
- _coerced_type = _type_map.get(type(value), NULLTYPE)
- if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
- is self._type_affinity:
- return self
- else:
- return _coerced_type
-
- def _compare_type_affinity(self, other):
- return self._type_affinity is other._type_affinity
-
- def compile(self, dialect=None):
- """Produce a string-compiled form of this :class:`.TypeEngine`.
-
- When called with no arguments, uses a "default" dialect
- to produce a string result.
-
- :param dialect: a :class:`.Dialect` instance.
-
- """
- # arg, return value is inconsistent with
- # ClauseElement.compile()....this is a mistake.
-
- if not dialect:
- dialect = self._default_dialect
-
- return dialect.type_compiler.process(self)
-
- @property
- def _default_dialect(self):
- if self.__class__.__module__.startswith("sqlalchemy.dialects"):
- tokens = self.__class__.__module__.split(".")[0:3]
- mod = ".".join(tokens)
- return getattr(__import__(mod).dialects, tokens[-1]).dialect()
- else:
- return default.DefaultDialect()
-
- def __str__(self):
- if util.py2k:
- return unicode(self.compile()).\
- encode('ascii', 'backslashreplace')
- else:
- return str(self.compile())
-
- def __init__(self, *args, **kwargs):
- """Support implementations that were passing arguments"""
- if args or kwargs:
- util.warn_deprecated("Passing arguments to type object "
- "constructor %s is deprecated" % self.__class__)
-
- def __repr__(self):
- return util.generic_repr(self)
-
-
-def _reconstitute_comparator(expression):
- return expression.comparator
-
-
-class UserDefinedType(TypeEngine):
- """Base for user defined types.
-
- This should be the base of new types. Note that
- for most cases, :class:`.TypeDecorator` is probably
- more appropriate::
-
- import sqlalchemy.types as types
-
- class MyType(types.UserDefinedType):
- def __init__(self, precision = 8):
- self.precision = precision
-
- def get_col_spec(self):
- return "MYTYPE(%s)" % self.precision
-
- def bind_processor(self, dialect):
- def process(value):
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- return value
- return process
-
- Once the type is made, it's immediately usable::
-
- table = Table('foo', meta,
- Column('id', Integer, primary_key=True),
- Column('data', MyType(16))
- )
-
- """
- __visit_name__ = "user_defined"
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if hasattr(self.type, 'adapt_operator'):
- util.warn_deprecated(
- "UserDefinedType.adapt_operator is deprecated. Create "
- "a UserDefinedType.Comparator subclass instead which "
- "generates the desired expression constructs, given a "
- "particular operator."
- )
- return self.type.adapt_operator(op), self.type
- else:
- return op, self.type
-
- comparator_factory = Comparator
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- Default behavior for :class:`.UserDefinedType` is the
- same as that of :class:`.TypeDecorator`; by default it returns
- ``self``, assuming the compared value should be coerced into
- the same type as this one. See
- :meth:`.TypeDecorator.coerce_compared_value` for more detail.
-
- .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
- now returns ``self`` by default, rather than falling onto the
- more fundamental behavior of
- :meth:`.TypeEngine.coerce_compared_value`.
-
- """
-
- return self
-
-
-class TypeDecorator(TypeEngine):
- """Allows the creation of types which add additional functionality
- to an existing type.
-
- This method is preferred to direct subclassing of SQLAlchemy's
- built-in types as it ensures that all required functionality of
- the underlying type is kept in place.
-
- Typical usage::
-
- import sqlalchemy.types as types
-
- class MyType(types.TypeDecorator):
- '''Prefixes Unicode values with "PREFIX:" on the way in and
- strips it off on the way out.
- '''
-
- impl = types.Unicode
-
- def process_bind_param(self, value, dialect):
- return "PREFIX:" + value
-
- def process_result_value(self, value, dialect):
- return value[7:]
-
- def copy(self):
- return MyType(self.impl.length)
-
- The class-level "impl" attribute is required, and can reference any
- TypeEngine class. Alternatively, the load_dialect_impl() method
- can be used to provide different type classes based on the dialect
- given; in this case, the "impl" variable can reference
- ``TypeEngine`` as a placeholder.
-
- Types that receive a Python type that isn't similar to the ultimate type
- used may want to define the :meth:`TypeDecorator.coerce_compared_value`
- method. This is used to give the expression system a hint when coercing
- Python objects into bind parameters within expressions. Consider this
- expression::
-
- mytable.c.somecol + datetime.date(2009, 5, 15)
-
- Above, if "somecol" is an ``Integer`` variant, it makes sense that
- we're doing date arithmetic, where above is usually interpreted
- by databases as adding a number of days to the given date.
- The expression system does the right thing by not attempting to
- coerce the "date()" value into an integer-oriented bind parameter.
-
- However, in the case of ``TypeDecorator``, we are usually changing an
- incoming Python type to something new - ``TypeDecorator`` by default will
- "coerce" the non-typed side to be the same type as itself. Such as below,
- we define an "epoch" type that stores a date value as an integer::
-
- class MyEpochType(types.TypeDecorator):
- impl = types.Integer
-
- epoch = datetime.date(1970, 1, 1)
-
- def process_bind_param(self, value, dialect):
- return (value - self.epoch).days
-
- def process_result_value(self, value, dialect):
- return self.epoch + timedelta(days=value)
-
- Our expression of ``somecol + date`` with the above type will coerce the
- "date" on the right side to also be treated as ``MyEpochType``.
-
- This behavior can be overridden via the
- :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
- that should be used for the value of the expression. Below we set it such
- that an integer value will be treated as an ``Integer``, and any other
- value is assumed to be a date and will be treated as a ``MyEpochType``::
-
- def coerce_compared_value(self, op, value):
- if isinstance(value, int):
- return Integer()
- else:
- return self
-
- """
-
- __visit_name__ = "type_decorator"
-
- def __init__(self, *args, **kwargs):
- """Construct a :class:`.TypeDecorator`.
-
- Arguments sent here are passed to the constructor
- of the class assigned to the ``impl`` class level attribute,
- assuming the ``impl`` is a callable, and the resulting
- object is assigned to the ``self.impl`` instance attribute
- (thus overriding the class attribute of the same name).
-
- If the class level ``impl`` is not a callable (the unusual case),
- it will be assigned to the same instance attribute 'as-is',
- ignoring those arguments passed to the constructor.
-
- Subclasses can override this to customize the generation
- of ``self.impl`` entirely.
-
- """
-
- if not hasattr(self.__class__, 'impl'):
- raise AssertionError("TypeDecorator implementations "
- "require a class-level variable "
- "'impl' which refers to the class of "
- "type being decorated")
- self.impl = to_instance(self.__class__.impl, *args, **kwargs)
-
- coerce_to_is_types = (util.NoneType, )
- """Specify those Python types which should be coerced at the expression
- level to "IS <constant>" when compared using ``==`` (and same for
- ``IS NOT`` in conjunction with ``!=``.
-
- For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``.
-
- :class:`.TypeDecorator` modifies this list to only include ``NoneType``,
- as typedecorator implementations that deal with boolean types are common.
-
- Custom :class:`.TypeDecorator` classes can override this attribute to
- return an empty tuple, in which case no values will be coerced to
- constants.
-
- ..versionadded:: 0.8.2
- Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier
- control of ``__eq__()`` ``__ne__()`` operations.
-
- """
-
- class Comparator(TypeEngine.Comparator):
- def operate(self, op, *other, **kwargs):
- kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
- return super(TypeDecorator.Comparator, self).operate(
- op, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types
- return super(TypeDecorator.Comparator, self).reverse_operate(
- op, other, **kwargs)
-
- @property
- def comparator_factory(self):
- return type("TDComparator",
- (TypeDecorator.Comparator, self.impl.comparator_factory),
- {})
-
- def _gen_dialect_impl(self, dialect):
- """
- #todo
- """
- adapted = dialect.type_descriptor(self)
- if adapted is not self:
- return adapted
-
- # otherwise adapt the impl type, link
- # to a copy of this TypeDecorator and return
- # that.
- typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
- tt = self.copy()
- if not isinstance(tt, self.__class__):
- raise AssertionError('Type object %s does not properly '
- 'implement the copy() method, it must '
- 'return an object of type %s' % (self,
- self.__class__))
- tt.impl = typedesc
- return tt
-
- @property
- def _type_affinity(self):
- """
- #todo
- """
- return self.impl._type_affinity
-
- def type_engine(self, dialect):
- """Return a dialect-specific :class:`.TypeEngine` instance
- for this :class:`.TypeDecorator`.
-
- In most cases this returns a dialect-adapted form of
- the :class:`.TypeEngine` type represented by ``self.impl``.
- Makes usage of :meth:`dialect_impl` but also traverses
- into wrapped :class:`.TypeDecorator` instances.
- Behavior can be customized here by overriding
- :meth:`load_dialect_impl`.
-
- """
- adapted = dialect.type_descriptor(self)
- if type(adapted) is not type(self):
- return adapted
- elif isinstance(self.impl, TypeDecorator):
- return self.impl.type_engine(dialect)
- else:
- return self.load_dialect_impl(dialect)
-
- def load_dialect_impl(self, dialect):
- """Return a :class:`.TypeEngine` object corresponding to a dialect.
-
- This is an end-user override hook that can be used to provide
- differing types depending on the given dialect. It is used
- by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
- to help determine what type should ultimately be returned
- for a given :class:`.TypeDecorator`.
-
- By default returns ``self.impl``.
-
- """
- return self.impl
-
- def __getattr__(self, key):
- """Proxy all other undefined accessors to the underlying
- implementation."""
- return getattr(self.impl, key)
-
- def process_bind_param(self, value, dialect):
- """Receive a bound parameter value to be converted.
-
- Subclasses override this method to return the
- value that should be passed along to the underlying
- :class:`.TypeEngine` object, and from there to the
- DBAPI ``execute()`` method.
-
- The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
- This could also be used as a hook for validating logic.
-
- This operation should be designed with the reverse operation
- in mind, which would be the process_result_value method of
- this class.
-
- :param value: Data to operate upon, of any type expected by
- this method in the subclass. Can be ``None``.
- :param dialect: the :class:`.Dialect` in use.
-
- """
-
- raise NotImplementedError()
-
- def process_result_value(self, value, dialect):
- """Receive a result-row column value to be converted.
-
- Subclasses should implement this method to operate on data
- fetched from the database.
-
- Subclasses override this method to return the
- value that should be passed back to the application,
- given a value that is already processed by
- the underlying :class:`.TypeEngine` object, originally
- from the DBAPI cursor method ``fetchone()`` or similar.
-
- The operation could be anything desired to perform custom
- behavior, such as transforming or serializing data.
- This could also be used as a hook for validating logic.
-
- :param value: Data to operate upon, of any type expected by
- this method in the subclass. Can be ``None``.
- :param dialect: the :class:`.Dialect` in use.
-
- This operation should be designed to be reversible by
- the "process_bind_param" method of this class.
-
- """
-
- raise NotImplementedError()
-
- @util.memoized_property
- def _has_bind_processor(self):
- """memoized boolean, check if process_bind_param is implemented.
-
- Allows the base process_bind_param to raise
- NotImplementedError without needing to test an expensive
- exception throw.
-
- """
-
- return self.__class__.process_bind_param.__code__ \
- is not TypeDecorator.process_bind_param.__code__
-
- def bind_processor(self, dialect):
- """Provide a bound value processing function for the
- given :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for bound value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_bind_param` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_bind_param` so that
- the processing provided by ``self.impl`` is maintained.
-
- :param dialect: Dialect instance in use.
-
- This method is the reverse counterpart to the
- :meth:`result_processor` method of this class.
-
- """
- if self._has_bind_processor:
- process_param = self.process_bind_param
- impl_processor = self.impl.bind_processor(dialect)
- if impl_processor:
- def process(value):
- return impl_processor(process_param(value, dialect))
-
- else:
- def process(value):
- return process_param(value, dialect)
-
- return process
- else:
- return self.impl.bind_processor(dialect)
-
- @util.memoized_property
- def _has_result_processor(self):
- """memoized boolean, check if process_result_value is implemented.
-
- Allows the base process_result_value to raise
- NotImplementedError without needing to test an expensive
- exception throw.
-
- """
- return self.__class__.process_result_value.__code__ \
- is not TypeDecorator.process_result_value.__code__
-
- def result_processor(self, dialect, coltype):
- """Provide a result value processing function for the given
- :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for result value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_result_value` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_result_value` so that
- the processing provided by ``self.impl`` is maintained.
-
- :param dialect: Dialect instance in use.
- :param coltype: An SQLAlchemy data type
-
- This method is the reverse counterpart to the
- :meth:`bind_processor` method of this class.
-
- """
- if self._has_result_processor:
- process_value = self.process_result_value
- impl_processor = self.impl.result_processor(dialect,
- coltype)
- if impl_processor:
- def process(value):
- return process_value(impl_processor(value), dialect)
-
- else:
- def process(value):
- return process_value(value, dialect)
-
- return process
- else:
- return self.impl.result_processor(dialect, coltype)
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- By default, returns self. This method is called by
- the expression system when an object using this type is
- on the left or right side of an expression against a plain Python
- object which does not yet have a SQLAlchemy type assigned::
-
- expr = table.c.somecolumn + 35
-
- Where above, if ``somecolumn`` uses this type, this method will
- be called with the value ``operator.add``
- and ``35``. The return value is whatever SQLAlchemy type should
- be used for ``35`` for this particular operation.
-
- """
- return self
-
- def copy(self):
- """Produce a copy of this :class:`.TypeDecorator` instance.
-
- This is a shallow copy and is provided to fulfill part of
- the :class:`.TypeEngine` contract. It usually does not
- need to be overridden unless the user-defined :class:`.TypeDecorator`
- has local state that should be deep-copied.
-
- """
-
- instance = self.__class__.__new__(self.__class__)
- instance.__dict__.update(self.__dict__)
- return instance
-
- def get_dbapi_type(self, dbapi):
- """Return the DBAPI type object represented by this
- :class:`.TypeDecorator`.
-
- By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
- underlying "impl".
- """
- return self.impl.get_dbapi_type(dbapi)
-
- def compare_values(self, x, y):
- """Given two values, compare them for equality.
-
- By default this calls upon :meth:`.TypeEngine.compare_values`
- of the underlying "impl", which in turn usually
- uses the Python equals operator ``==``.
-
- This function is used by the ORM to compare
- an original-loaded value with an intercepted
- "changed" value, to determine if a net change
- has occurred.
-
- """
- return self.impl.compare_values(x, y)
-
- def __repr__(self):
- return util.generic_repr(self, to_inspect=self.impl)
-
-
-class Variant(TypeDecorator):
- """A wrapping type that selects among a variety of
- implementations based on dialect in use.
-
- The :class:`.Variant` type is typically constructed
- using the :meth:`.TypeEngine.with_variant` method.
-
- .. versionadded:: 0.7.2
-
- .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use.
-
- """
-
- def __init__(self, base, mapping):
- """Construct a new :class:`.Variant`.
-
- :param base: the base 'fallback' type
- :param mapping: dictionary of string dialect names to
- :class:`.TypeEngine` instances.
-
- """
- self.impl = base
- self.mapping = mapping
-
- def load_dialect_impl(self, dialect):
- if dialect.name in self.mapping:
- return self.mapping[dialect.name]
- else:
- return self.impl
-
- def with_variant(self, type_, dialect_name):
- """Return a new :class:`.Variant` which adds the given
- type + dialect name to the mapping, in addition to the
- mapping present in this :class:`.Variant`.
-
- :param type_: a :class:`.TypeEngine` that will be selected
- as a variant from the originating type, when a dialect
- of the given name is in use.
- :param dialect_name: base name of the dialect which uses
- this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.)
-
- """
-
- if dialect_name in self.mapping:
- raise exc.ArgumentError(
- "Dialect '%s' is already present in "
- "the mapping for this Variant" % dialect_name)
- mapping = self.mapping.copy()
- mapping[dialect_name] = type_
- return Variant(self.impl, mapping)
-
-
-def to_instance(typeobj, *arg, **kw):
- if typeobj is None:
- return NULLTYPE
-
- if util.callable(typeobj):
- return typeobj(*arg, **kw)
- else:
- return typeobj
-
-
-def adapt_type(typeobj, colspecs):
- if isinstance(typeobj, type):
- typeobj = typeobj()
- for t in typeobj.__class__.__mro__[0:-1]:
- try:
- impltype = colspecs[t]
- break
- except KeyError:
- pass
- else:
- # couldnt adapt - so just return the type itself
- # (it may be a user-defined type)
- return typeobj
- # if we adapted the given generic type to a database-specific type,
- # but it turns out the originally given "generic" type
- # is actually a subclass of our resulting type, then we were already
- # given a more specific type than that required; so use that.
- if (issubclass(typeobj.__class__, impltype)):
- return typeobj
- return typeobj.adapt(impltype)
-
-
-class NullType(TypeEngine):
- """An unknown type.
-
- :class:`.NullType` is used as a default type for those cases where
- a type cannot be determined, including:
-
- * During table reflection, when the type of a column is not recognized
- by the :class:`.Dialect`
- * When constructing SQL expressions using plain Python objects of
- unknown types (e.g. ``somecolumn == my_special_object``)
- * When a new :class:`.Column` is created, and the given type is passed
- as ``None`` or is not passed at all.
-
- The :class:`.NullType` can be used within SQL expression invocation
- without issue, it just has no behavior either at the expression construction
- level or at the bind-parameter/result processing level. :class:`.NullType`
- will result in a :class:`.CompileException` if the compiler is asked to render
- the type itself, such as if it is used in a :func:`.cast` operation
- or within a schema creation operation such as that invoked by
- :meth:`.MetaData.create_all` or the :class:`.CreateTable` construct.
-
- """
- __visit_name__ = 'null'
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if isinstance(other_comparator, NullType.Comparator) or \
- not operators.is_commutative(op):
- return op, self.expr.type
- else:
- return other_comparator._adapt_expression(op, self)
- comparator_factory = Comparator
-
-NullTypeEngine = NullType
-
-
-class Concatenable(object):
- """A mixin that marks a type as supporting 'concatenation',
- typically strings."""
-
- class Comparator(TypeEngine.Comparator):
- def _adapt_expression(self, op, other_comparator):
- if op is operators.add and isinstance(other_comparator,
- (Concatenable.Comparator, NullType.Comparator)):
- return operators.concat_op, self.expr.type
- else:
- return op, self.expr.type
-
- comparator_factory = Comparator
-
-
-class _DateAffinity(object):
- """Mixin date/time specific expression adaptations.
-
- Rules are implemented within Date,Time,Interval,DateTime, Numeric,
- Integer. Based on http://www.postgresql.org/docs/current/static
- /functions-datetime.html.
-
- """
-
- @property
- def _expression_adaptations(self):
- raise NotImplementedError()
-
- class Comparator(TypeEngine.Comparator):
- _blank_dict = util.immutabledict()
-
- def _adapt_expression(self, op, other_comparator):
- othertype = other_comparator.type._type_affinity
- return op, \
- self.type._expression_adaptations.get(op, self._blank_dict).\
- get(othertype, NULLTYPE)
- comparator_factory = Comparator
-
-
-class String(Concatenable, TypeEngine):
- """The base for all string and character types.
-
- In SQL, corresponds to VARCHAR. Can also take Python unicode objects
- and encode to the database's encoding in bind params (and the reverse for
- result sets.)
-
- The `length` field is usually required when the `String` type is
- used within a CREATE TABLE statement, as VARCHAR requires a length
- on most databases.
-
- """
-
- __visit_name__ = 'string'
-
- def __init__(self, length=None, collation=None,
- convert_unicode=False,
- unicode_error=None,
- _warn_on_bytestring=False
- ):
- """
- Create a string-holding type.
-
- :param length: optional, a length for the column for use in
- DDL and CAST expressions. May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- ``length`` for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
- with no length is included. Whether the value is
- interpreted as bytes or characters is database specific.
-
- :param collation: Optional, a column-level collation for
- use in DDL and CAST expressions. Renders using the
- COLLATE keyword supported by SQLite, MySQL, and Postgresql.
- E.g.::
-
- >>> from sqlalchemy import cast, select, String
- >>> print select([cast('some string', String(collation='utf8'))])
- SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
-
- .. versionadded:: 0.8 Added support for COLLATE to all
- string types.
-
- :param convert_unicode: When set to ``True``, the
- :class:`.String` type will assume that
- input is to be passed as Python ``unicode`` objects,
- and results returned as Python ``unicode`` objects.
- If the DBAPI in use does not support Python unicode
- (which is fewer and fewer these days), SQLAlchemy
- will encode/decode the value, using the
- value of the ``encoding`` parameter passed to
- :func:`.create_engine` as the encoding.
-
- When using a DBAPI that natively supports Python
- unicode objects, this flag generally does not
- need to be set. For columns that are explicitly
- intended to store non-ASCII data, the :class:`.Unicode`
- or :class:`UnicodeText`
- types should be used regardless, which feature
- the same behavior of ``convert_unicode`` but
- also indicate an underlying column type that
- directly supports unicode, such as ``NVARCHAR``.
-
- For the extremely rare case that Python ``unicode``
- is to be encoded/decoded by SQLAlchemy on a backend
- that does natively support Python ``unicode``,
- the value ``force`` can be passed here which will
- cause SQLAlchemy's encode/decode services to be
- used unconditionally.
-
- :param unicode_error: Optional, a method to use to handle Unicode
- conversion errors. Behaves like the ``errors`` keyword argument to
- the standard library's ``string.decode()`` functions. This flag
- requires that ``convert_unicode`` is set to ``force`` - otherwise,
- SQLAlchemy is not guaranteed to handle the task of unicode
- conversion. Note that this flag adds significant performance
- overhead to row-fetching operations for backends that already
- return unicode objects natively (which most DBAPIs do). This
- flag should only be used as a last resort for reading
- strings from a column with varied or corrupted encodings.
-
- """
- if unicode_error is not None and convert_unicode != 'force':
- raise exc.ArgumentError("convert_unicode must be 'force' "
- "when unicode_error is set.")
-
- self.length = length
- self.collation = collation
- self.convert_unicode = convert_unicode
- self.unicode_error = unicode_error
- self._warn_on_bytestring = _warn_on_bytestring
-
- def bind_processor(self, dialect):
- if self.convert_unicode or dialect.convert_unicode:
- if dialect.supports_unicode_binds and \
- self.convert_unicode != 'force':
- if self._warn_on_bytestring:
- def process(value):
- if isinstance(value, util.binary_type):
- util.warn("Unicode type received non-unicode bind "
- "param value.")
- return value
- return process
- else:
- return None
- else:
- encoder = codecs.getencoder(dialect.encoding)
- warn_on_bytestring = self._warn_on_bytestring
-
- def process(value):
- if isinstance(value, util.text_type):
- return encoder(value, self.unicode_error)[0]
- elif warn_on_bytestring and value is not None:
- util.warn("Unicode type received non-unicode bind "
- "param value")
- return value
- return process
- else:
- return None
-
- def result_processor(self, dialect, coltype):
- wants_unicode = self.convert_unicode or dialect.convert_unicode
- needs_convert = wants_unicode and \
- (dialect.returns_unicode_strings is not True or
- self.convert_unicode == 'force')
-
- if needs_convert:
- to_unicode = processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
-
- if dialect.returns_unicode_strings:
- # we wouldn't be here unless convert_unicode='force'
- # was specified, or the driver has erratic unicode-returning
- # habits. since we will be getting back unicode
- # in most cases, we check for it (decode will fail).
- def process(value):
- if isinstance(value, util.text_type):
- return value
- else:
- return to_unicode(value)
- return process
- else:
- # here, we assume that the object is not unicode,
- # avoiding expensive isinstance() check.
- return to_unicode
- else:
- return None
-
- @property
- def python_type(self):
- if self.convert_unicode:
- return util.text_type
- else:
- return str
-
- def get_dbapi_type(self, dbapi):
- return dbapi.STRING
-
-
-class Text(String):
- """A variably sized string type.
-
- In SQL, usually corresponds to CLOB or TEXT. Can also take Python
- unicode objects and encode to the database's encoding in bind
- params (and the reverse for result sets.) In general, TEXT objects
- do not have a length; while some databases will accept a length
- argument here, it will be rejected by others.
-
- """
- __visit_name__ = 'text'
-
-
-class Unicode(String):
- """A variable length Unicode string type.
-
- The :class:`.Unicode` type is a :class:`.String` subclass
- that assumes input and output as Python ``unicode`` data,
- and in that regard is equivalent to the usage of the
- ``convert_unicode`` flag with the :class:`.String` type.
- However, unlike plain :class:`.String`, it also implies an
- underlying column type that is explicitly supporting of non-ASCII
- data, such as ``NVARCHAR`` on Oracle and SQL Server.
- This can impact the output of ``CREATE TABLE`` statements
- and ``CAST`` functions at the dialect level, and can
- also affect the handling of bound parameters in some
- specific DBAPI scenarios.
-
- The encoding used by the :class:`.Unicode` type is usually
- determined by the DBAPI itself; most modern DBAPIs
- feature support for Python ``unicode`` objects as bound
- values and result set values, and the encoding should
- be configured as detailed in the notes for the target
- DBAPI in the :ref:`dialect_toplevel` section.
-
- For those DBAPIs which do not support, or are not configured
- to accommodate Python ``unicode`` objects
- directly, SQLAlchemy does the encoding and decoding
- outside of the DBAPI. The encoding in this scenario
- is determined by the ``encoding`` flag passed to
- :func:`.create_engine`.
-
- When using the :class:`.Unicode` type, it is only appropriate
- to pass Python ``unicode`` objects, and not plain ``str``.
- If a plain ``str`` is passed under Python 2, a warning
- is emitted. If you notice your application emitting these warnings but
- you're not sure of the source of them, the Python
- ``warnings`` filter, documented at
- http://docs.python.org/library/warnings.html,
- can be used to turn these warnings into exceptions
- which will illustrate a stack trace::
-
- import warnings
- warnings.simplefilter('error')
-
- For an application that wishes to pass plain bytestrings
- and Python ``unicode`` objects to the ``Unicode`` type
- equally, the bytestrings must first be decoded into
- unicode. The recipe at :ref:`coerce_to_unicode` illustrates
- how this is done.
-
- See also:
-
- :class:`.UnicodeText` - unlengthed textual counterpart
- to :class:`.Unicode`.
-
- """
-
- __visit_name__ = 'unicode'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a :class:`.Unicode` object.
-
- Parameters are the same as that of :class:`.String`,
- with the exception that ``convert_unicode``
- defaults to ``True``.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(Unicode, self).__init__(length=length, **kwargs)
-
-
-class UnicodeText(Text):
- """An unbounded-length Unicode string type.
-
- See :class:`.Unicode` for details on the unicode
- behavior of this object.
-
- Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
- unicode-capable type being used on the backend, such as
- ``NCLOB``, ``NTEXT``.
-
- """
-
- __visit_name__ = 'unicode_text'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a Unicode-converting Text type.
-
- Parameters are the same as that of :class:`.Text`,
- with the exception that ``convert_unicode``
- defaults to ``True``.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(UnicodeText, self).__init__(length=length, **kwargs)
-
-
-class Integer(_DateAffinity, TypeEngine):
- """A type for ``int`` integers."""
-
- __visit_name__ = 'integer'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- @property
- def python_type(self):
- return int
-
- @util.memoized_property
- def _expression_adaptations(self):
- # TODO: need a dictionary object that will
- # handle operators generically here, this is incomplete
- return {
- operators.add: {
- Date: Date,
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.mul: {
- Interval: Interval,
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.div: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.truediv: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- operators.sub: {
- Integer: self.__class__,
- Numeric: Numeric,
- },
- }
-
-
-class SmallInteger(Integer):
- """A type for smaller ``int`` integers.
-
- Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'small_integer'
-
-
-class BigInteger(Integer):
- """A type for bigger ``int`` integers.
-
- Typically generates a ``BIGINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'big_integer'
-
-
-class Numeric(_DateAffinity, TypeEngine):
- """A type for fixed precision numbers.
-
- Typically generates DECIMAL or NUMERIC. Returns
- ``decimal.Decimal`` objects by default, applying
- conversion as needed.
-
- .. note::
-
- The `cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library
- is a high performing alternative to Python's built-in
- ``decimal.Decimal`` type, which performs very poorly in high volume
- situations. SQLAlchemy 0.7 is tested against ``cdecimal`` and supports
- it fully. The type is not necessarily supported by DBAPI
- implementations however, most of which contain an import for plain
- ``decimal`` in their source code, even though some such as psycopg2
- provide hooks for alternate adapters. SQLAlchemy imports ``decimal``
- globally as well. The most straightforward and
- foolproof way to use "cdecimal" given current DBAPI and Python support
- is to patch it directly into sys.modules before anything else is
- imported::
-
- import sys
- import cdecimal
- sys.modules["decimal"] = cdecimal
-
- While the global patch is a little ugly, it's particularly
- important to use just one decimal library at a time since
- Python Decimal and cdecimal Decimal objects
- are not currently compatible *with each other*::
-
- >>> import cdecimal
- >>> import decimal
- >>> decimal.Decimal("10") == cdecimal.Decimal("10")
- False
-
- SQLAlchemy will provide more natural support of
- cdecimal if and when it becomes a standard part of Python
- installations and is supported by all DBAPIs.
-
- """
-
- __visit_name__ = 'numeric'
-
- def __init__(self, precision=None, scale=None, asdecimal=True):
- """
- Construct a Numeric.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
-
- :param asdecimal: default True. Return whether or not
- values should be sent as Python Decimal objects, or
- as floats. Different DBAPIs send one or the other based on
- datatypes - the Numeric type will ensure that return values
- are one or the other across DBAPIs consistently.
-
- When using the ``Numeric`` type, care should be taken to ensure
- that the asdecimal setting is apppropriate for the DBAPI in use -
- when Numeric applies a conversion from Decimal->float or float->
- Decimal, this conversion incurs an additional performance overhead
- for all result columns received.
-
- DBAPIs that return Decimal natively (e.g. psycopg2) will have
- better accuracy and higher performance with a setting of ``True``,
- as the native translation to Decimal reduces the amount of floating-
- point issues at play, and the Numeric type itself doesn't need
- to apply any further conversions. However, another DBAPI which
- returns floats natively *will* incur an additional conversion
- overhead, and is still subject to floating point data loss - in
- which case ``asdecimal=False`` will at least remove the extra
- conversion overhead.
-
- """
- self.precision = precision
- self.scale = scale
- self.asdecimal = asdecimal
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- @property
- def python_type(self):
- if self.asdecimal:
- return decimal.Decimal
- else:
- return float
-
- def bind_processor(self, dialect):
- if dialect.supports_native_decimal:
- return None
- else:
- return processors.to_float
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- if dialect.supports_native_decimal:
- # we're a "numeric", DBAPI will give us Decimal directly
- return None
- else:
- util.warn('Dialect %s+%s does *not* support Decimal '
- 'objects natively, and SQLAlchemy must '
- 'convert from floating point - rounding '
- 'errors and other issues may occur. Please '
- 'consider storing Decimal numbers as strings '
- 'or integers on this platform for lossless '
- 'storage.' % (dialect.name, dialect.driver))
-
- # we're a "numeric", DBAPI returns floats, convert.
- if self.scale is not None:
- return processors.to_decimal_processor_factory(
- decimal.Decimal, self.scale)
- else:
- return processors.to_decimal_processor_factory(
- decimal.Decimal)
- else:
- if dialect.supports_native_decimal:
- return processors.to_float
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul: {
- Interval: Interval,
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.div: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.truediv: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.add: {
- Numeric: self.__class__,
- Integer: self.__class__,
- },
- operators.sub: {
- Numeric: self.__class__,
- Integer: self.__class__,
- }
- }
-
-
-class Float(Numeric):
- """A type for ``float`` numbers.
-
- Returns Python ``float`` objects by default, applying
- conversion as needed.
-
- """
-
- __visit_name__ = 'float'
-
- scale = None
-
- def __init__(self, precision=None, asdecimal=False, **kwargs):
- """
- Construct a Float.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param asdecimal: the same flag as that of :class:`.Numeric`, but
- defaults to ``False``. Note that setting this flag to ``True``
- results in floating point conversion.
-
- :param \**kwargs: deprecated. Additional arguments here are ignored
- by the default :class:`.Float` type. For database specific
- floats that support additional arguments, see that dialect's
- documentation for details, such as
- :class:`sqlalchemy.dialects.mysql.FLOAT`.
-
- """
- self.precision = precision
- self.asdecimal = asdecimal
- if kwargs:
- util.warn_deprecated("Additional keyword arguments "
- "passed to Float ignored.")
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- return processors.to_decimal_processor_factory(decimal.Decimal)
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul: {
- Interval: Interval,
- Numeric: self.__class__,
- },
- operators.div: {
- Numeric: self.__class__,
- },
- operators.truediv: {
- Numeric: self.__class__,
- },
- operators.add: {
- Numeric: self.__class__,
- },
- operators.sub: {
- Numeric: self.__class__,
- }
- }
-
-
-class DateTime(_DateAffinity, TypeEngine):
- """A type for ``datetime.datetime()`` objects.
-
- Date and time types return objects from the Python ``datetime``
- module. Most DBAPIs have built in support for the datetime
- module, with the noted exception of SQLite. In the case of
- SQLite, date and time types are stored as strings which are then
- converted back to datetime objects when rows are returned.
-
- """
-
- __visit_name__ = 'datetime'
-
- def __init__(self, timezone=False):
- """Construct a new :class:`.DateTime`.
-
- :param timezone: boolean. If True, and supported by the
- backend, will produce 'TIMESTAMP WITH TIMEZONE'. For backends
- that don't support timezone aware timestamps, has no
- effect.
-
- """
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.datetime
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Interval: self.__class__,
- },
- operators.sub: {
- Interval: self.__class__,
- DateTime: Interval,
- },
- }
-
-
-class Date(_DateAffinity, TypeEngine):
- """A type for ``datetime.date()`` objects."""
-
- __visit_name__ = 'date'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.date
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Integer: self.__class__,
- Interval: DateTime,
- Time: DateTime,
- },
- operators.sub: {
- # date - integer = date
- Integer: self.__class__,
-
- # date - date = integer.
- Date: Integer,
-
- Interval: DateTime,
-
- # date - datetime = interval,
- # this one is not in the PG docs
- # but works
- DateTime: Interval,
- },
- }
-
-
-class Time(_DateAffinity, TypeEngine):
- """A type for ``datetime.time()`` objects."""
-
- __visit_name__ = 'time'
-
- def __init__(self, timezone=False):
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @property
- def python_type(self):
- return dt.time
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Date: DateTime,
- Interval: self.__class__
- },
- operators.sub: {
- Time: Interval,
- Interval: self.__class__,
- },
- }
-
-
-class _Binary(TypeEngine):
- """Define base behavior for binary types."""
-
- def __init__(self, length=None):
- self.length = length
-
- @property
- def python_type(self):
- return util.binary_type
-
- # Python 3 - sqlite3 doesn't need the `Binary` conversion
- # here, though pg8000 does to indicate "bytea"
- def bind_processor(self, dialect):
- DBAPIBinary = dialect.dbapi.Binary
-
- def process(value):
- x = self
- if value is not None:
- return DBAPIBinary(value)
- else:
- return None
- return process
-
- # Python 3 has native bytes() type
- # both sqlite3 and pg8000 seem to return it,
- # psycopg2 as of 2.5 returns 'memoryview'
- if util.py2k:
- def result_processor(self, dialect, coltype):
- if util.jython:
- def process(value):
- if value is not None:
- if isinstance(value, array.array):
- return value.tostring()
- return str(value)
- else:
- return None
- else:
- process = processors.to_str
- return process
- else:
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is not None:
- value = bytes(value)
- return value
- return process
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
- if isinstance(value, util.string_types):
- return self
- else:
- return super(_Binary, self).coerce_compared_value(op, value)
-
- def get_dbapi_type(self, dbapi):
- return dbapi.BINARY
-
-
-class LargeBinary(_Binary):
- """A type for large binary byte data.
-
- The Binary type generates BLOB or BYTEA when tables are created,
- and also converts incoming values using the ``Binary`` callable
- provided by each DB-API.
-
- """
-
- __visit_name__ = 'large_binary'
-
- def __init__(self, length=None):
- """
- Construct a LargeBinary type.
-
- :param length: optional, a length for the column for use in
- DDL statements, for those BLOB types that accept a length
- (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
- type - use the BINARY/VARBINARY types specifically for those.
- May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued.
-
- """
- _Binary.__init__(self, length=length)
-
-
-class Binary(LargeBinary):
- """Deprecated. Renamed to LargeBinary."""
-
- def __init__(self, *arg, **kw):
- util.warn_deprecated('The Binary type has been renamed to '
- 'LargeBinary.')
- LargeBinary.__init__(self, *arg, **kw)
-
-
-class SchemaType(events.SchemaEventTarget):
- """Mark a type as possibly requiring schema-level DDL for usage.
-
- Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
- as well as types that are complimented by table or schema level
- constraints, triggers, and other rules.
-
- :class:`.SchemaType` classes can also be targets for the
- :meth:`.DDLEvents.before_parent_attach` and
- :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
- surrounding the association of the type object with a parent
- :class:`.Column`.
-
- .. seealso::
-
- :class:`.Enum`
-
- :class:`.Boolean`
-
-
- """
-
- def __init__(self, **kw):
- self.name = kw.pop('name', None)
- self.quote = kw.pop('quote', None)
- self.schema = kw.pop('schema', None)
- self.metadata = kw.pop('metadata', None)
- self.inherit_schema = kw.pop('inherit_schema', False)
- if self.metadata:
- event.listen(
- self.metadata,
- "before_create",
- util.portable_instancemethod(self._on_metadata_create)
- )
- event.listen(
- self.metadata,
- "after_drop",
- util.portable_instancemethod(self._on_metadata_drop)
- )
-
- def _set_parent(self, column):
- column._on_table_attach(util.portable_instancemethod(self._set_table))
-
- def _set_table(self, column, table):
- if self.inherit_schema:
- self.schema = table.schema
-
- event.listen(
- table,
- "before_create",
- util.portable_instancemethod(
- self._on_table_create)
- )
- event.listen(
- table,
- "after_drop",
- util.portable_instancemethod(self._on_table_drop)
- )
- if self.metadata is None:
- # TODO: what's the difference between self.metadata
- # and table.metadata here ?
- event.listen(
- table.metadata,
- "before_create",
- util.portable_instancemethod(self._on_metadata_create)
- )
- event.listen(
- table.metadata,
- "after_drop",
- util.portable_instancemethod(self._on_metadata_drop)
- )
-
- def copy(self, **kw):
- return self.adapt(self.__class__)
-
- def adapt(self, impltype, **kw):
- schema = kw.pop('schema', self.schema)
- metadata = kw.pop('metadata', self.metadata)
- return impltype(name=self.name,
- quote=self.quote,
- schema=schema,
- metadata=metadata,
- inherit_schema=self.inherit_schema,
- **kw
- )
-
- @property
- def bind(self):
- return self.metadata and self.metadata.bind or None
-
- def create(self, bind=None, checkfirst=False):
- """Issue CREATE ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.create(bind=bind, checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue DROP ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.drop(bind=bind, checkfirst=checkfirst)
-
- def _on_table_create(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_create(target, bind, **kw)
-
- def _on_table_drop(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_drop(target, bind, **kw)
-
- def _on_metadata_create(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_create(target, bind, **kw)
-
- def _on_metadata_drop(self, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_drop(target, bind, **kw)
-
-
-class Enum(String, SchemaType):
- """Generic Enum Type.
-
- The Enum type provides a set of possible string values which the
- column is constrained towards.
-
- By default, uses the backend's native ENUM type if available,
- else uses VARCHAR + a CHECK constraint.
-
- .. seealso::
-
- :class:`~.postgresql.ENUM` - PostgreSQL-specific type,
- which has additional functionality.
-
- """
-
- __visit_name__ = 'enum'
-
- def __init__(self, *enums, **kw):
- """Construct an enum.
-
- Keyword arguments which don't apply to a specific backend are ignored
- by that backend.
-
- :param \*enums: string or unicode enumeration labels. If unicode
- labels are present, the `convert_unicode` flag is auto-enabled.
-
- :param convert_unicode: Enable unicode-aware bind parameter and
- result-set processing for this Enum's data. This is set
- automatically based on the presence of unicode label strings.
-
- :param metadata: Associate this type directly with a ``MetaData``
- object. For types that exist on the target database as an
- independent schema construct (Postgresql), this type will be
- created and dropped within ``create_all()`` and ``drop_all()``
- operations. If the type is not associated with any ``MetaData``
- object, it will associate itself with each ``Table`` in which it is
- used, and will be created when any of those individual tables are
- created, after a check is performed for it's existence. The type is
- only dropped when ``drop_all()`` is called for that ``Table``
- object's metadata, however.
-
- :param name: The name of this type. This is required for Postgresql
- and any future supported database which requires an explicitly
- named type, or an explicitly named constraint in order to generate
- the type and/or a table that uses it.
-
- :param native_enum: Use the database's native ENUM type when
- available. Defaults to True. When False, uses VARCHAR + check
- constraint for all backends.
-
- :param schema: Schema name of this type. For types that exist on the
- target database as an independent schema construct (Postgresql),
- this parameter specifies the named schema in which the type is
- present.
-
- .. note::
-
- The ``schema`` of the :class:`.Enum` type does not
- by default make use of the ``schema`` established on the
- owning :class:`.Table`. If this behavior is desired,
- set the ``inherit_schema`` flag to ``True``.
-
- :param quote: Force quoting to be on or off on the type's name. If
- left as the default of `None`, the usual schema-level "case
- sensitive"/"reserved name" rules are used to determine if this
- type's name should be quoted.
-
- :param inherit_schema: When ``True``, the "schema" from the owning
- :class:`.Table` will be copied to the "schema" attribute of this
- :class:`.Enum`, replacing whatever value was passed for the
- ``schema`` attribute. This also takes effect when using the
- :meth:`.Table.tometadata` operation.
-
- .. versionadded:: 0.8
-
- """
- self.enums = enums
- self.native_enum = kw.pop('native_enum', True)
- convert_unicode = kw.pop('convert_unicode', None)
- if convert_unicode is None:
- for e in enums:
- if isinstance(e, util.text_type):
- convert_unicode = True
- break
- else:
- convert_unicode = False
-
- if self.enums:
- length = max(len(x) for x in self.enums)
- else:
- length = 0
- String.__init__(self,
- length=length,
- convert_unicode=convert_unicode,
- )
- SchemaType.__init__(self, **kw)
-
- def __repr__(self):
- return util.generic_repr(self, [
- ("native_enum", True),
- ("name", None)
- ])
-
- def _should_create_constraint(self, compiler):
- return not self.native_enum or \
- not compiler.dialect.supports_native_enum
-
- def _set_table(self, column, table):
- if self.native_enum:
- SchemaType._set_table(self, column, table)
-
- e = schema.CheckConstraint(
- column.in_(self.enums),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- def adapt(self, impltype, **kw):
- schema = kw.pop('schema', self.schema)
- metadata = kw.pop('metadata', self.metadata)
- if issubclass(impltype, Enum):
- return impltype(name=self.name,
- quote=self.quote,
- schema=schema,
- metadata=metadata,
- convert_unicode=self.convert_unicode,
- native_enum=self.native_enum,
- inherit_schema=self.inherit_schema,
- *self.enums,
- **kw
- )
- else:
- return super(Enum, self).adapt(impltype, **kw)
-
-
-class PickleType(TypeDecorator):
- """Holds Python objects, which are serialized using pickle.
-
- PickleType builds upon the Binary type to apply Python's
- ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
- the way out, allowing any pickleable Python object to be stored as
- a serialized binary field.
-
- To allow ORM change events to propagate for elements associated
- with :class:`.PickleType`, see :ref:`mutable_toplevel`.
-
- """
-
- impl = LargeBinary
-
- def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
- pickler=None, comparator=None):
- """
- Construct a PickleType.
-
- :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
-
- :param pickler: defaults to cPickle.pickle or pickle.pickle if
- cPickle is not available. May be any object with
- pickle-compatible ``dumps` and ``loads`` methods.
-
- :param comparator: a 2-arg callable predicate used
- to compare values of this type. If left as ``None``,
- the Python "equals" operator is used to compare values.
-
- """
- self.protocol = protocol
- self.pickler = pickler or pickle
- self.comparator = comparator
- super(PickleType, self).__init__()
-
- def __reduce__(self):
- return PickleType, (self.protocol,
- None,
- self.comparator)
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- dumps = self.pickler.dumps
- protocol = self.protocol
- if impl_processor:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- loads = self.pickler.loads
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return loads(value)
- else:
- def process(value):
- if value is None:
- return None
- return loads(value)
- return process
-
- def compare_values(self, x, y):
- if self.comparator:
- return self.comparator(x, y)
- else:
- return x == y
-
-
-class Boolean(TypeEngine, SchemaType):
- """A bool datatype.
-
- Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
- the Python side deals in ``True`` or ``False``.
-
- """
-
- __visit_name__ = 'boolean'
-
- def __init__(self, create_constraint=True, name=None):
- """Construct a Boolean.
-
- :param create_constraint: defaults to True. If the boolean
- is generated as an int/smallint, also create a CHECK constraint
- on the table that ensures 1 or 0 as a value.
-
- :param name: if a CHECK constraint is generated, specify
- the name of the constraint.
-
- """
- self.create_constraint = create_constraint
- self.name = name
-
- def _should_create_constraint(self, compiler):
- return not compiler.dialect.supports_native_boolean
-
- def _set_table(self, column, table):
- if not self.create_constraint:
- return
-
- e = schema.CheckConstraint(
- column.in_([0, 1]),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- @property
- def python_type(self):
- return bool
-
- def bind_processor(self, dialect):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.boolean_to_int
-
- def result_processor(self, dialect, coltype):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.int_to_boolean
-
-
-class Interval(_DateAffinity, TypeDecorator):
- """A type for ``datetime.timedelta()`` objects.
-
- The Interval type deals with ``datetime.timedelta`` objects. In
- PostgreSQL, the native ``INTERVAL`` type is used; for others, the
- value is stored as a date which is relative to the "epoch"
- (Jan. 1, 1970).
-
- Note that the ``Interval`` type does not currently provide date arithmetic
- operations on platforms which do not support interval types natively. Such
- operations usually require transformation of both sides of the expression
- (such as, conversion of both sides into integer epoch values first) which
- currently is a manual procedure (such as via
- :attr:`~sqlalchemy.sql.expression.func`).
-
- """
-
- impl = DateTime
- epoch = dt.datetime.utcfromtimestamp(0)
-
- def __init__(self, native=True,
- second_precision=None,
- day_precision=None):
- """Construct an Interval object.
-
- :param native: when True, use the actual
- INTERVAL type provided by the database, if
- supported (currently Postgresql, Oracle).
- Otherwise, represent the interval data as
- an epoch value regardless.
-
- :param second_precision: For native interval types
- which support a "fractional seconds precision" parameter,
- i.e. Oracle and Postgresql
-
- :param day_precision: for native interval types which
- support a "day precision" parameter, i.e. Oracle.
-
- """
- super(Interval, self).__init__()
- self.native = native
- self.second_precision = second_precision
- self.day_precision = day_precision
-
- def adapt(self, cls, **kw):
- if self.native and hasattr(cls, '_adapt_from_generic_interval'):
- return cls._adapt_from_generic_interval(self, **kw)
- else:
- return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
- day_precision=self.day_precision,
- **kw)
-
- @property
- def python_type(self):
- return dt.timedelta
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- if value is not None:
- value = epoch + value
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = epoch + value
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return value - epoch
- else:
- def process(value):
- if value is None:
- return None
- return value - epoch
- return process
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add: {
- Date: DateTime,
- Interval: self.__class__,
- DateTime: DateTime,
- Time: Time,
- },
- operators.sub: {
- Interval: self.__class__
- },
- operators.mul: {
- Numeric: self.__class__
- },
- operators.truediv: {
- Numeric: self.__class__
- },
- operators.div: {
- Numeric: self.__class__
- }
- }
-
- @property
- def _type_affinity(self):
- return Interval
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
- return self.impl.coerce_compared_value(op, value)
-
-
-class REAL(Float):
- """The SQL REAL type."""
-
- __visit_name__ = 'REAL'
-
-
-class FLOAT(Float):
- """The SQL FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
-
-class NUMERIC(Numeric):
- """The SQL NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
-
-class DECIMAL(Numeric):
- """The SQL DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
-
-class INTEGER(Integer):
- """The SQL INT or INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-INT = INTEGER
-
-
-class SMALLINT(SmallInteger):
- """The SQL SMALLINT type."""
-
- __visit_name__ = 'SMALLINT'
-
-
-class BIGINT(BigInteger):
- """The SQL BIGINT type."""
-
- __visit_name__ = 'BIGINT'
-
-
-class TIMESTAMP(DateTime):
- """The SQL TIMESTAMP type."""
-
- __visit_name__ = 'TIMESTAMP'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.TIMESTAMP
-
-
-class DATETIME(DateTime):
- """The SQL DATETIME type."""
-
- __visit_name__ = 'DATETIME'
-
-
-class DATE(Date):
- """The SQL DATE type."""
-
- __visit_name__ = 'DATE'
-
-
-class TIME(Time):
- """The SQL TIME type."""
-
- __visit_name__ = 'TIME'
-
-
-class TEXT(Text):
- """The SQL TEXT type."""
-
- __visit_name__ = 'TEXT'
-
-
-class CLOB(Text):
- """The CLOB type.
-
- This type is found in Oracle and Informix.
- """
-
- __visit_name__ = 'CLOB'
-
-
-class VARCHAR(String):
- """The SQL VARCHAR type."""
-
- __visit_name__ = 'VARCHAR'
-
-
-class NVARCHAR(Unicode):
- """The SQL NVARCHAR type."""
-
- __visit_name__ = 'NVARCHAR'
-
-
-class CHAR(String):
- """The SQL CHAR type."""
-
- __visit_name__ = 'CHAR'
-
-
-class NCHAR(Unicode):
- """The SQL NCHAR type."""
-
- __visit_name__ = 'NCHAR'
-
-
-class BLOB(LargeBinary):
- """The SQL BLOB type."""
-
- __visit_name__ = 'BLOB'
-
-
-class BINARY(_Binary):
- """The SQL BINARY type."""
-
- __visit_name__ = 'BINARY'
-
-
-class VARBINARY(_Binary):
- """The SQL VARBINARY type."""
-
- __visit_name__ = 'VARBINARY'
-
-
-class BOOLEAN(Boolean):
- """The SQL BOOLEAN type."""
-
- __visit_name__ = 'BOOLEAN'
-
-NULLTYPE = NullType()
-BOOLEANTYPE = Boolean()
-STRINGTYPE = String()
-
-_type_map = {
- int: Integer(),
- float: Numeric(),
- bool: BOOLEANTYPE,
- decimal.Decimal: Numeric(),
- dt.date: Date(),
- dt.datetime: DateTime(),
- dt.time: Time(),
- dt.timedelta: Interval(),
- NoneType: NULLTYPE
-}
-
-if util.py3k:
- _type_map[bytes] = LargeBinary()
- _type_map[str] = Unicode()
-else:
- _type_map[unicode] = Unicode()
- _type_map[str] = String()
-
+from .sql.type_api import (
+ adapt_type,
+ TypeEngine,
+ TypeDecorator,
+ Variant,
+ to_instance,
+ UserDefinedType
+)
+from .sql.sqltypes import (
+ BIGINT,
+ BINARY,
+ BLOB,
+ BOOLEAN,
+ BigInteger,
+ Binary,
+ _Binary,
+ Boolean,
+ CHAR,
+ CLOB,
+ Concatenable,
+ DATE,
+ DATETIME,
+ DECIMAL,
+ Date,
+ DateTime,
+ Enum,
+ FLOAT,
+ Float,
+ INT,
+ INTEGER,
+ Integer,
+ Interval,
+ LargeBinary,
+ NCHAR,
+ NVARCHAR,
+ NullType,
+ NULLTYPE,
+ NUMERIC,
+ Numeric,
+ PickleType,
+ REAL,
+ SchemaType,
+ SMALLINT,
+ SmallInteger,
+ String,
+ STRINGTYPE,
+ TEXT,
+ TIME,
+ TIMESTAMP,
+ Text,
+ Time,
+ Unicode,
+ UnicodeText,
+ VARBINARY,
+ VARCHAR,
+ _type_map
+ )
diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py
index 104566215..eba64ed15 100644
--- a/lib/sqlalchemy/util/__init__.py
+++ b/lib/sqlalchemy/util/__init__.py
@@ -1,16 +1,16 @@
# util/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .compat import callable, cmp, reduce, \
threading, py3k, py33, py2k, jython, pypy, cpython, win32, \
- pickle, dottedgetter, parse_qsl, namedtuple, next, WeakSet, reraise, \
+ pickle, dottedgetter, parse_qsl, namedtuple, next, reraise, \
raise_from_cause, text_type, string_types, int_types, binary_type, \
quote_plus, with_metaclass, print_, itertools_filterfalse, u, ue, b,\
- unquote_plus, b64decode, b64encode, byte_buffer, itertools_filter,\
- StringIO, inspect_getargspec
+ unquote_plus, unquote, b64decode, b64encode, byte_buffer, itertools_filter,\
+ iterbytes, StringIO, inspect_getargspec
from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
@@ -18,14 +18,15 @@ from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \
column_dict, ordered_column_set, populate_column_dict, unique_list, \
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
to_column_set, update_copy, flatten_iterator, \
- LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence
+ LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \
+ coerce_generator_arg
from .langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
memoized_property, memoized_instancemethod, md5_hex, \
- group_expirable_memoized_property, importlater, decode_slice, \
+ group_expirable_memoized_property, dependencies, decode_slice, \
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index 86a90828a..c0a24ba4f 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -1,16 +1,17 @@
# util/_collections.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Collection classes and helpers."""
-import itertools
+from __future__ import absolute_import
import weakref
import operator
from .compat import threading, itertools_filterfalse
from . import py2k
+import types
EMPTY_SET = frozenset()
@@ -650,19 +651,31 @@ class IdentitySet(object):
class WeakSequence(object):
- def __init__(self, elements):
- self._storage = weakref.WeakValueDictionary(
- (idx, element) for idx, element in enumerate(elements)
- )
+ def __init__(self, __elements=()):
+ self._storage = [
+ weakref.ref(element, self._remove) for element in __elements
+ ]
+
+ def append(self, item):
+ self._storage.append(weakref.ref(item, self._remove))
+
+ def _remove(self, ref):
+ self._storage.remove(ref)
+
+ def __len__(self):
+ return len(self._storage)
def __iter__(self):
- return iter(self._storage.values())
+ return (obj for obj in
+ (ref() for ref in self._storage) if obj is not None)
def __getitem__(self, index):
try:
- return self._storage[index]
+ obj = self._storage[index]
except KeyError:
raise IndexError("Index %s out of range" % index)
+ else:
+ return obj()
class OrderedIdentitySet(IdentitySet):
@@ -743,6 +756,11 @@ class UniqueAppender(object):
def __iter__(self):
return iter(self.data)
+def coerce_generator_arg(arg):
+ if len(arg) == 1 and isinstance(arg[0], types.GeneratorType):
+ return list(arg[0])
+ else:
+ return arg
def to_list(x, default=None):
if x is None:
diff --git a/lib/sqlalchemy/util/compat.py b/lib/sqlalchemy/util/compat.py
index a89762b4e..f1346406e 100644
--- a/lib/sqlalchemy/util/compat.py
+++ b/lib/sqlalchemy/util/compat.py
@@ -1,5 +1,5 @@
# util/compat.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -40,7 +40,7 @@ if py3k:
import builtins
from inspect import getfullargspec as inspect_getfullargspec
- from urllib.parse import quote_plus, unquote_plus, parse_qsl
+ from urllib.parse import quote_plus, unquote_plus, parse_qsl, quote, unquote
import configparser
from io import StringIO
@@ -95,7 +95,7 @@ if py3k:
else:
from inspect import getargspec as inspect_getfullargspec
inspect_getargspec = inspect_getfullargspec
- from urllib import quote_plus, unquote_plus
+ from urllib import quote_plus, unquote_plus, quote, unquote
from urlparse import parse_qsl
import ConfigParser as configparser
from StringIO import StringIO
@@ -149,29 +149,6 @@ else:
itertools_imap = itertools.imap
-
-try:
- from weakref import WeakSet
-except:
- import weakref
-
- class WeakSet(object):
- """Implement the small subset of set() which SQLAlchemy needs
- here. """
- def __init__(self, values=None):
- self._storage = weakref.WeakKeyDictionary()
- if values is not None:
- self._storage.update((value, None) for value in values)
-
- def __iter__(self):
- return iter(self._storage)
-
- def union(self, other):
- return WeakSet(set(self).union(other))
-
- def add(self, other):
- self._storage[other] = True
-
import time
if win32 or jython:
time_func = time.clock
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index c315d2da6..c8854dc32 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -1,5 +1,5 @@
# util/deprecations.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py
index ef1ff881d..82e37ce99 100644
--- a/lib/sqlalchemy/util/langhelpers.py
+++ b/lib/sqlalchemy/util/langhelpers.py
@@ -1,5 +1,5 @@
# util/langhelpers.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -19,6 +19,7 @@ from functools import update_wrapper
from .. import exc
import hashlib
from . import compat
+from . import _collections
def md5_hex(x):
if compat.py3k:
@@ -98,14 +99,60 @@ def decorator(target):
metadata = dict(target=targ_name, fn=fn_name)
metadata.update(format_argspec_plus(spec, grouped=False))
-
- code = 'lambda %(args)s: %(target)s(%(fn)s, %(apply_kw)s)' % (
- metadata)
- decorated = eval(code, {targ_name: target, fn_name: fn})
+ metadata['name'] = fn.__name__
+ code = """\
+def %(name)s(%(args)s):
+ return %(target)s(%(fn)s, %(apply_kw)s)
+""" % metadata
+ decorated = _exec_code_in_env(code,
+ {targ_name: target, fn_name: fn},
+ fn.__name__)
decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
return update_wrapper(decorated, fn)
return update_wrapper(decorate, target)
+def _exec_code_in_env(code, env, fn_name):
+ exec(code, env)
+ return env[fn_name]
+
+def public_factory(target, location):
+ """Produce a wrapping function for the given cls or classmethod.
+
+ Rationale here is so that the __init__ method of the
+ class can serve as documentation for the function.
+
+ """
+ if isinstance(target, type):
+ fn = target.__init__
+ callable_ = target
+ doc = "Construct a new :class:`.%s` object. \n\n"\
+ "This constructor is mirrored as a public API function; see :func:`~%s` "\
+ "for a full usage and argument description." % (
+ target.__name__, location, )
+ else:
+ fn = callable_ = target
+ doc = "This function is mirrored; see :func:`~%s` "\
+ "for a description of arguments." % location
+
+ location_name = location.split(".")[-1]
+ spec = compat.inspect_getfullargspec(fn)
+ del spec[0][0]
+ metadata = format_argspec_plus(spec, grouped=False)
+ metadata['name'] = location_name
+ code = """\
+def %(name)s(%(args)s):
+ return cls(%(apply_kw)s)
+""" % metadata
+ env = {'cls': callable_, 'symbol': symbol}
+ exec(code, env)
+ decorated = env[location_name]
+ decorated.__doc__ = fn.__doc__
+ if compat.py2k or hasattr(fn, '__func__'):
+ fn.__func__.__doc__ = doc
+ else:
+ fn.__doc__ = doc
+ return decorated
+
class PluginLoader(object):
@@ -134,8 +181,7 @@ class PluginLoader(object):
self.impls[name] = impl.load
return impl.load()
- from sqlalchemy import exc
- raise exc.ArgumentError(
+ raise exc.NoSuchModuleError(
"Can't load plugin: %s:%s" %
(self.group, name))
@@ -187,6 +233,7 @@ def get_cls_kwargs(cls, _set=None):
try:
+ # TODO: who doesn't have this constant?
from inspect import CO_VARKEYWORDS
def inspect_func_args(fn):
@@ -221,7 +268,9 @@ def get_callable_argspec(fn, no_self=False):
return compat.ArgSpec(spec.args[1:], spec.varargs, spec.keywords, spec.defaults)
elif hasattr(fn, '__func__'):
return compat.inspect_getargspec(fn.__func__)
- elif hasattr(fn, '__call__'):
+ elif hasattr(fn, '__call__') and \
+ not hasattr(fn.__call__, '__call__'): # functools.partial does this;
+ # not much we can do
return get_callable_argspec(fn.__call__)
else:
raise ValueError("Can't inspect function: %s" % fn)
@@ -360,44 +409,66 @@ def generic_repr(obj, additional_kw=(), to_inspect=None):
"""
if to_inspect is None:
- to_inspect = obj
+ to_inspect = [obj]
+ else:
+ to_inspect = _collections.to_list(to_inspect)
missing = object()
- def genargs():
+ pos_args = []
+ kw_args = _collections.OrderedDict()
+ vargs = None
+ for i, insp in enumerate(to_inspect):
try:
- (args, vargs, vkw, defaults) = \
- inspect.getargspec(to_inspect.__init__)
+ (_args, _vargs, vkw, defaults) = \
+ inspect.getargspec(insp.__init__)
except TypeError:
- return
+ continue
+ else:
+ default_len = defaults and len(defaults) or 0
+ if i == 0:
+ if _vargs:
+ vargs = _vargs
+ if default_len:
+ pos_args.extend(_args[1:-default_len])
+ else:
+ pos_args.extend(_args[1:])
+ else:
+ kw_args.update([
+ (arg, missing) for arg in _args[1:-default_len]
+ ])
+
+ if default_len:
+ kw_args.update([
+ (arg, default)
+ for arg, default
+ in zip(_args[-default_len:], defaults)
+ ])
+ output = []
+
+ output.extend(repr(getattr(obj, arg, None)) for arg in pos_args)
+
+ if vargs is not None and hasattr(obj, vargs):
+ output.extend([repr(val) for val in getattr(obj, vargs)])
+
+ for arg, defval in kw_args.items():
+ try:
+ val = getattr(obj, arg, missing)
+ if val is not missing and val != defval:
+ output.append('%s=%r' % (arg, val))
+ except:
+ pass
- default_len = defaults and len(defaults) or 0
+ if additional_kw:
+ for arg, defval in additional_kw:
+ try:
+ val = getattr(obj, arg, missing)
+ if val is not missing and val != defval:
+ output.append('%s=%r' % (arg, val))
+ except:
+ pass
- if not default_len:
- for arg in args[1:]:
- yield repr(getattr(obj, arg, None))
- if vargs is not None and hasattr(obj, vargs):
- yield ', '.join(repr(val) for val in getattr(obj, vargs))
- else:
- for arg in args[1:-default_len]:
- yield repr(getattr(obj, arg, None))
- for (arg, defval) in zip(args[-default_len:], defaults):
- try:
- val = getattr(obj, arg, missing)
- if val is not missing and val != defval:
- yield '%s=%r' % (arg, val)
- except:
- pass
- if additional_kw:
- for arg, defval in additional_kw:
- try:
- val = getattr(obj, arg, missing)
- if val is not missing and val != defval:
- yield '%s=%r' % (arg, val)
- except:
- pass
-
- return "%s(%s)" % (obj.__class__.__name__, ", ".join(genargs()))
+ return "%s(%s)" % (obj.__class__.__name__, ", ".join(output))
class portable_instancemethod(object):
@@ -619,7 +690,11 @@ class memoized_property(object):
return result
def _reset(self, obj):
- obj.__dict__.pop(self.__name__, None)
+ memoized_property.reset(obj, self.__name__)
+
+ @classmethod
+ def reset(cls, obj, name):
+ obj.__dict__.pop(name, None)
class memoized_instancemethod(object):
@@ -675,84 +750,135 @@ class group_expirable_memoized_property(object):
return memoized_instancemethod(fn)
-class importlater(object):
- """Deferred import object.
- e.g.::
-
- somesubmod = importlater("mypackage.somemodule", "somesubmod")
+def dependency_for(modulename):
+ def decorate(obj):
+ # TODO: would be nice to improve on this import silliness,
+ # unfortunately importlib doesn't work that great either
+ tokens = modulename.split(".")
+ mod = compat.import_(".".join(tokens[0:-1]), globals(), locals(), tokens[-1])
+ mod = getattr(mod, tokens[-1])
+ setattr(mod, obj.__name__, obj)
+ return obj
+ return decorate
- is equivalent to::
+class dependencies(object):
+ """Apply imported dependencies as arguments to a function.
- from mypackage.somemodule import somesubmod
+ E.g.::
- except evaluted upon attribute access to "somesubmod".
+ @util.dependencies(
+ "sqlalchemy.sql.widget",
+ "sqlalchemy.engine.default"
+ );
+ def some_func(self, widget, default, arg1, arg2, **kw):
+ # ...
- importlater() currently requires that resolve_all() be
- called, typically at the bottom of a package's __init__.py.
- This is so that __import__ still called only at
- module import time, and not potentially within
- a non-main thread later on.
+ Rationale is so that the impact of a dependency cycle can be
+ associated directly with the few functions that cause the cycle,
+ and not pollute the module-level namespace.
"""
- _unresolved = set()
+ def __init__(self, *deps):
+ self.import_deps = []
+ for dep in deps:
+ tokens = dep.split(".")
+ self.import_deps.append(
+ dependencies._importlater(
+ ".".join(tokens[0:-1]),
+ tokens[-1]
+ )
+ )
+
+ def __call__(self, fn):
+ import_deps = self.import_deps
+ spec = compat.inspect_getfullargspec(fn)
+
+ spec_zero = list(spec[0])
+ hasself = spec_zero[0] in ('self', 'cls')
+
+ for i in range(len(import_deps)):
+ spec[0][i + (1 if hasself else 0)] = "import_deps[%r]" % i
+
+ inner_spec = format_argspec_plus(spec, grouped=False)
+
+ for impname in import_deps:
+ del spec_zero[1 if hasself else 0]
+ spec[0][:] = spec_zero
+
+ outer_spec = format_argspec_plus(spec, grouped=False)
- def __init__(self, path, addtl=None):
- self._il_path = path
- self._il_addtl = addtl
- importlater._unresolved.add(self)
+ code = 'lambda %(args)s: fn(%(apply_kw)s)' % {
+ "args": outer_spec['args'],
+ "apply_kw": inner_spec['apply_kw']
+ }
+
+ decorated = eval(code, locals())
+ decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
+ return update_wrapper(decorated, fn)
@classmethod
- def resolve_all(cls):
- for m in list(importlater._unresolved):
- m._resolve()
+ def resolve_all(cls, path):
+ for m in list(dependencies._unresolved):
+ if m._full_path.startswith(path):
+ m._resolve()
+
+ _unresolved = set()
+ _by_key = {}
+
+ class _importlater(object):
+ _unresolved = set()
- @property
- def _full_path(self):
- if self._il_addtl:
+ _by_key = {}
+
+ def __new__(cls, path, addtl):
+ key = path + "." + addtl
+ if key in dependencies._by_key:
+ return dependencies._by_key[key]
+ else:
+ dependencies._by_key[key] = imp = object.__new__(cls)
+ return imp
+
+ def __init__(self, path, addtl):
+ self._il_path = path
+ self._il_addtl = addtl
+ dependencies._unresolved.add(self)
+
+
+ @property
+ def _full_path(self):
return self._il_path + "." + self._il_addtl
- else:
- return self._il_path
-
- @memoized_property
- def module(self):
- if self in importlater._unresolved:
- raise ImportError(
- "importlater.resolve_all() hasn't "
- "been called (this is %s %s)"
- % (self._il_path, self._il_addtl))
-
- m = self._initial_import
- if self._il_addtl:
- m = getattr(m, self._il_addtl)
- else:
- for token in self._il_path.split(".")[1:]:
- m = getattr(m, token)
- return m
- def _resolve(self):
- importlater._unresolved.discard(self)
- if self._il_addtl:
+ @memoized_property
+ def module(self):
+ if self in dependencies._unresolved:
+ raise ImportError(
+ "importlater.resolve_all() hasn't "
+ "been called (this is %s %s)"
+ % (self._il_path, self._il_addtl))
+
+ return getattr(self._initial_import, self._il_addtl)
+
+ def _resolve(self):
+ dependencies._unresolved.discard(self)
self._initial_import = compat.import_(
self._il_path, globals(), locals(),
[self._il_addtl])
- else:
- self._initial_import = compat.import_(self._il_path)
- def __getattr__(self, key):
- if key == 'module':
- raise ImportError("Could not resolve module %s"
- % self._full_path)
- try:
- attr = getattr(self.module, key)
- except AttributeError:
- raise AttributeError(
- "Module %s has no attribute '%s'" %
- (self._full_path, key)
- )
- self.__dict__[key] = attr
- return attr
+ def __getattr__(self, key):
+ if key == 'module':
+ raise ImportError("Could not resolve module %s"
+ % self._full_path)
+ try:
+ attr = getattr(self.module, key)
+ except AttributeError:
+ raise AttributeError(
+ "Module %s has no attribute '%s'" %
+ (self._full_path, key)
+ )
+ self.__dict__[key] = attr
+ return attr
# from paste.deploy.converters
@@ -956,7 +1082,7 @@ class _symbol(int):
return repr(self)
def __repr__(self):
- return "<symbol '%s>" % self.name
+ return "symbol(%r)" % self.name
_symbol.__name__ = 'symbol'
diff --git a/lib/sqlalchemy/util/queue.py b/lib/sqlalchemy/util/queue.py
index 537526bef..82ff55a5d 100644
--- a/lib/sqlalchemy/util/queue.py
+++ b/lib/sqlalchemy/util/queue.py
@@ -1,5 +1,5 @@
# util/queue.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -25,14 +25,7 @@ within QueuePool.
from collections import deque
from time import time as _time
from .compat import threading
-import sys
-if sys.version_info < (2, 6):
- def notify_all(condition):
- condition.notify()
-else:
- def notify_all(condition):
- condition.notify_all()
__all__ = ['Empty', 'Full', 'Queue', 'SAAbort']
@@ -158,7 +151,6 @@ class Queue:
return an item if one is immediately available, else raise the
``Empty`` exception (`timeout` is ignored in that case).
"""
-
self.not_empty.acquire()
try:
if not block:
@@ -166,7 +158,11 @@ class Queue:
raise Empty
elif timeout is None:
while self._empty():
- self.not_empty.wait()
+ # wait for only half a second, then
+ # loop around, so that we can see a change in
+ # _sqla_abort_context in case we missed the notify_all()
+ # called by abort()
+ self.not_empty.wait(.5)
if self._sqla_abort_context:
raise SAAbort(self._sqla_abort_context)
else:
@@ -195,7 +191,10 @@ class Queue:
if not self.not_full.acquire(False):
return
try:
- notify_all(self.not_empty)
+ # note that this is now optional
+ # as the waiters in get() both loop around
+ # to check the _sqla_abort_context flag periodically
+ self.not_empty.notify_all()
finally:
self.not_full.release()
diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py
index de3dfd0ae..fe7e76896 100644
--- a/lib/sqlalchemy/util/topological.py
+++ b/lib/sqlalchemy/util/topological.py
@@ -1,5 +1,5 @@
# util/topological.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php