summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2020-09-25 22:31:16 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2020-10-31 13:44:53 -0400
commit654b462d668a2ced4e87077b9babb2590acbf983 (patch)
tree8b6023480423e990c9bbca7c280cb1cb58e012fc
parent841eb216644202567ebddfc0badc51a3a35e98c3 (diff)
downloadsqlalchemy-review/mike_bayer/tutorial20.tar.gz
Add SelectBase.exists() method as it seems strange this is not available already. The Exists construct itself does not provide full SELECT-building capabilities so it makes sense this should be used more like a scalar_subquery. Make sure stream_results is getting set up when yield_per is used, for 2.0 style statements as well. this was hardcoded inside of Query.yield_per() and is now moved to take place within QueryContext. Change-Id: Icafcd4fd9b708772343d56edf40995c9e8f835d6
-rw-r--r--doc/build/changelog/migration_14.rst2
-rw-r--r--doc/build/changelog/migration_20.rst2
-rw-r--r--doc/build/conf.py2
-rw-r--r--doc/build/contents.rst1
-rw-r--r--doc/build/core/expression_api.rst1
-rw-r--r--doc/build/core/operators.rst662
-rw-r--r--doc/build/core/selectable.rst3
-rw-r--r--doc/build/core/tutorial.rst34
-rw-r--r--doc/build/dialects/postgresql.rst2
-rw-r--r--doc/build/errors.rst2
-rw-r--r--doc/build/faq/sessions.rst5
-rw-r--r--doc/build/glossary.rst116
-rw-r--r--doc/build/index.rst17
-rw-r--r--doc/build/orm/extensions/declarative/api.rst11
-rw-r--r--doc/build/orm/extensions/declarative/index.rst13
-rw-r--r--doc/build/orm/extensions/declarative/inheritance.rst3
-rw-r--r--doc/build/orm/loading_columns.rst13
-rw-r--r--doc/build/orm/loading_objects.rst22
-rw-r--r--doc/build/orm/loading_relationships.rst7
-rw-r--r--doc/build/orm/mapping_styles.rst2
-rw-r--r--doc/build/orm/query.rst4
-rw-r--r--doc/build/orm/queryguide.rst882
-rw-r--r--doc/build/orm/session_basics.rst12
-rw-r--r--doc/build/orm/tutorial.rst43
-rw-r--r--doc/build/queryguide.rst7
-rw-r--r--doc/build/tutorial.rst6
-rw-r--r--doc/build/tutorial/data.rst1509
-rw-r--r--doc/build/tutorial/dbapi_transactions.rst518
-rw-r--r--doc/build/tutorial/engine.rst67
-rw-r--r--doc/build/tutorial/further_reading.rst44
-rw-r--r--doc/build/tutorial/index.rst165
-rw-r--r--doc/build/tutorial/metadata.rst526
-rw-r--r--doc/build/tutorial/orm_data_manipulation.rst577
-rw-r--r--doc/build/tutorial/orm_related_objects.rst896
-rw-r--r--doc/build/tutorial/tutorial_nav_include.rst14
-rw-r--r--lib/sqlalchemy/engine/interfaces.py3
-rw-r--r--lib/sqlalchemy/engine/result.py4
-rw-r--r--lib/sqlalchemy/engine/row.py12
-rw-r--r--lib/sqlalchemy/orm/context.py10
-rw-r--r--lib/sqlalchemy/orm/interfaces.py2
-rw-r--r--lib/sqlalchemy/orm/query.py87
-rw-r--r--lib/sqlalchemy/orm/session.py21
-rw-r--r--lib/sqlalchemy/sql/dml.py81
-rw-r--r--lib/sqlalchemy/sql/elements.py1
-rw-r--r--lib/sqlalchemy/sql/schema.py55
-rw-r--r--lib/sqlalchemy/sql/selectable.py251
-rw-r--r--lib/sqlalchemy/sql/type_api.py22
-rw-r--r--lib/sqlalchemy/testing/suite/test_unicode_ddl.py4
-rw-r--r--lib/sqlalchemy/util/deprecations.py24
-rw-r--r--test/base/test_tutorials.py51
-rw-r--r--test/orm/test_query.py48
-rw-r--r--test/sql/test_compiler.py40
-rw-r--r--test/sql/test_metadata.py6
53 files changed, 6624 insertions, 288 deletions
diff --git a/doc/build/changelog/migration_14.rst b/doc/build/changelog/migration_14.rst
index 1949b91d5..7d0ecf4f6 100644
--- a/doc/build/changelog/migration_14.rst
+++ b/doc/build/changelog/migration_14.rst
@@ -1,3 +1,5 @@
+.. _migration_14_toplevel:
+
=============================
What's New in SQLAlchemy 1.4?
=============================
diff --git a/doc/build/changelog/migration_20.rst b/doc/build/changelog/migration_20.rst
index a1b20c670..39ca2099c 100644
--- a/doc/build/changelog/migration_20.rst
+++ b/doc/build/changelog/migration_20.rst
@@ -1221,7 +1221,7 @@ necessary when using joined eager loading, the :meth:`_engine.Result.unique`
modifier must be called first.
Documentation for all new features of :func:`_sql.select` including execution
-options, etc. are at :doc:`/queryguide`.
+options, etc. are at :doc:`/orm/queryguide`.
Below are some examples of how to migrate to :func:`_sql.select`::
diff --git a/doc/build/conf.py b/doc/build/conf.py
index 99308b435..5f81c532a 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -88,7 +88,7 @@ changelog_render_pullreq = {
changelog_render_changeset = "http://www.sqlalchemy.org/trac/changeset/%s"
-exclude_patterns = ["build", "**/unreleased*/*"]
+exclude_patterns = ["build", "**/unreleased*/*", "*_include.rst"]
# zzzeeksphinx makes these conversions when it is rendering the
# docstrings classes, methods, and functions within the scope of
diff --git a/doc/build/contents.rst b/doc/build/contents.rst
index 15dfe6ec9..d44240390 100644
--- a/doc/build/contents.rst
+++ b/doc/build/contents.rst
@@ -11,6 +11,7 @@ documentation, see :ref:`index_toplevel`.
:includehidden:
intro
+ tutorial/index
orm/index
core/index
dialects/index
diff --git a/doc/build/core/expression_api.rst b/doc/build/core/expression_api.rst
index 944222fbd..7d455d200 100644
--- a/doc/build/core/expression_api.rst
+++ b/doc/build/core/expression_api.rst
@@ -13,6 +13,7 @@ see :ref:`sqlexpression_toplevel`.
:maxdepth: 3
sqlelement
+ operators
selectable
dml
functions
diff --git a/doc/build/core/operators.rst b/doc/build/core/operators.rst
new file mode 100644
index 000000000..5972b53d5
--- /dev/null
+++ b/doc/build/core/operators.rst
@@ -0,0 +1,662 @@
+Operator Reference
+===============================
+
+.. Setup code, not for display
+
+ >>> from sqlalchemy import column, select
+ >>> from sqlalchemy import create_engine
+ >>> engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
+ >>> from sqlalchemy import MetaData, Table, Column, Integer, String
+ >>> metadata = MetaData()
+ >>> user_table = Table(
+ ... "user_account",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('name', String(30)),
+ ... Column('fullname', String)
+ ... )
+ >>> from sqlalchemy import ForeignKey
+ >>> address_table = Table(
+ ... "address",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('user_id', None, ForeignKey('user_account.id')),
+ ... Column('email_address', String, nullable=False)
+ ... )
+ >>> metadata.create_all(engine)
+ BEGIN (implicit)
+ ...
+ >>> from sqlalchemy.orm import declarative_base
+ >>> Base = declarative_base()
+ >>> from sqlalchemy.orm import relationship
+ >>> class User(Base):
+ ... __tablename__ = 'user_account'
+ ...
+ ... id = Column(Integer, primary_key=True)
+ ... name = Column(String(30))
+ ... fullname = Column(String)
+ ...
+ ... addresses = relationship("Address", back_populates="user")
+ ...
+ ... def __repr__(self):
+ ... return f"User(id={self.id!r}, name={self.name!r}, fullname={self.fullname!r})"
+
+ >>> class Address(Base):
+ ... __tablename__ = 'address'
+ ...
+ ... id = Column(Integer, primary_key=True)
+ ... email_address = Column(String, nullable=False)
+ ... user_id = Column(Integer, ForeignKey('user_account.id'))
+ ...
+ ... user = relationship("User", back_populates="addresses")
+ ...
+ ... def __repr__(self):
+ ... return f"Address(id={self.id!r}, email_address={self.email_address!r})"
+ >>> conn = engine.connect()
+ >>> from sqlalchemy.orm import Session
+ >>> session = Session(conn)
+ >>> session.add_all([
+ ... User(name="spongebob", fullname="Spongebob Squarepants", addresses=[
+ ... Address(email_address="spongebob@sqlalchemy.org")
+ ... ]),
+ ... User(name="sandy", fullname="Sandy Cheeks", addresses=[
+ ... Address(email_address="sandy@sqlalchemy.org"),
+ ... Address(email_address="squirrel@squirrelpower.org")
+ ... ]),
+ ... User(name="patrick", fullname="Patrick Star", addresses=[
+ ... Address(email_address="pat999@aol.com")
+ ... ]),
+ ... User(name="squidward", fullname="Squidward Tentacles", addresses=[
+ ... Address(email_address="stentcl@sqlalchemy.org")
+ ... ]),
+ ... User(name="ehkrabs", fullname="Eugene H. Krabs"),
+ ... ])
+ >>> session.commit()
+ BEGIN ...
+ >>> conn.begin()
+ BEGIN ...
+
+
+This section details usage of the operators that are available
+to construct SQL expressions.
+
+These methods are presented in terms of the :class:`_sql.Operators`
+and :class:`_sql.ColumnOperators` base classes. The methods are then
+available on descendants of these classes, including:
+
+* :class:`_schema.Column` objects
+
+* :class:`_sql.ColumnElement` objects more generally, which are the root
+ of all Core SQL Expression language column-level expressions
+
+* :class:`_orm.InstrumentedAttribute` objects, which are ORM level mapped
+ attributes.
+
+The operators are first introduced in the tutorial sections, including:
+
+* :doc:`/tutorial/index` - unified tutorial in :term:`2.0 style`
+
+* :doc:`/orm/tutorial` - ORM tutorial in :term:`1.x style`
+
+* :doc:`/core/tutorial` - Core tutorial in :term:`1.x style`
+
+Comparison Operators
+^^^^^^^^^^^^^^^^^^^^
+
+Basic comparisons which apply to many datatypes, including numerics,
+strings, dates, and many others:
+
+* :meth:`_sql.ColumnOperators.__eq__` (Python "``==``" operator)::
+
+ >>> print(column('x') == 5)
+ x = :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__ne__` (Python "``!=``" operator)::
+
+ >>> print(column('x') != 5)
+ x != :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__gt__` (Python "``>``" operator)::
+
+ >>> print(column('x') > 5)
+ x > :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__lt__` (Python "``<``" operator)::
+
+ >>> print(column('x') < 5)
+ x < :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__ge__` (Python "``>=``" operator)::
+
+ >>> print(column('x') >= 5)
+ x >= :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__le__` (Python "``<=``" operator)::
+
+ >>> print(column('x') <= 5)
+ x <= :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.between`::
+
+ >>> print(column('x').between(5, 10))
+ x BETWEEN :x_1 AND :x_2
+
+ ..
+
+IN Comparisons
+^^^^^^^^^^^^^^
+The SQL IN operator is a subject all its own in SQLAlchemy. As the IN
+operator is usually used against a list of fixed values, SQLAlchemy's
+feature of bound parameter coercion makes use of a special form of SQL
+compilation that renders an interim SQL string for compilation that's formed
+into the final list of bound parameters in a second step. In other words,
+"it just works".
+
+IN against a list of values
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+IN is available most typically by passing a list of
+values to the :meth:`_sql.ColumnOperators.in_` method::
+
+
+ >>> print(column('x').in_([1, 2, 3]))
+ x IN ([POSTCOMPILE_x_1])
+
+The special bound form ``POSTCOMPILE`` is rendered into individual parameters
+at execution time, illustrated below:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select(User.id).where(User.id.in_([1, 2, 3]))
+ >>> result = conn.execute(stmt)
+ {opensql}SELECT user_account.id
+ FROM user_account
+ WHERE user_account.id IN (?, ?, ?)
+ [...] (1, 2, 3){stop}
+
+Empty IN Expressions
+~~~~~~~~~~~~~~~~~~~~
+
+SQLAlchemy produces a mathematically valid result for an empty IN expression
+by rendering a backend-specific subquery that returns no rows. Again
+in other words, "it just works":
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select(User.id).where(User.id.in_([]))
+ >>> result = conn.execute(stmt)
+ {opensql}SELECT user_account.id
+ FROM user_account
+ WHERE user_account.id IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1)
+ [...] ()
+
+The "empty set" subquery above generalizes correctly and is also rendered
+in terms of the IN operator which remains in place.
+
+
+NOT IN
+~~~~~~~
+
+"NOT IN" is available via the :meth:`_sql.ColumnOperators.not_in` operator::
+
+ >>> print(column('x').not_in([1, 2, 3]))
+ x NOT IN ([POSTCOMPILE_x_1])
+
+This is typically more easily available by negating with the ``~`` operator::
+
+ >>> print(~column('x').in_([1, 2, 3]))
+ x NOT IN ([POSTCOMPILE_x_1])
+
+Tuple IN Expressions
+~~~~~~~~~~~~~~~~~~~~
+
+Comparison of tuples to tuples is common with IN, as among other use cases
+accommodates for the case when matching rows to a set of potential composite
+primary key values. The :func:`_sql.tuple_` construct provides the basic
+building block for tuple comparisons. The :meth:`_sql.Tuple.in_` operator
+then receives a list of tuples::
+
+ >>> from sqlalchemy import tuple_
+ >>> tup = tuple_(column('x', Integer), column('y', Integer))
+ >>> expr = tup.in_([(1, 2), (3, 4)])
+ >>> print(expr)
+ (x, y) IN ([POSTCOMPILE_param_1])
+
+To illustrate the parameters rendered:
+
+.. sourcecode:: pycon+sql
+
+
+ >>> tup = tuple_(User.id, Address.id)
+ >>> stmt = select(User.name).join(Address).where(tup.in_([(1, 1), (2, 2)]))
+ >>> conn.execute(stmt).all()
+ {opensql}SELECT user_account.name
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ WHERE (user_account.id, address.id) IN (VALUES (?, ?), (?, ?))
+ [...] (1, 1, 2, 2){stop}
+ [('spongebob',), ('sandy',)]
+
+Subuqery IN
+~~~~~~~~~~~
+
+Finally, the :meth:`_sql.ColumnOperators.in_` and :meth:`_sql.ColumnOperators.not_in`
+operators work with subqueries. The form provides that a :class:`_sql.Select`
+construct is passed in directly, without any explicit conversion to a named
+subquery::
+
+ >>> print(column('x').in_(select(user_table.c.id)))
+ x IN (SELECT user_account.id
+ FROM user_account)
+
+Tuples work as expected::
+
+ >>> print(
+ ... tuple_(column('x'), column('y')).in_(
+ ... select(user_table.c.id, address_table.c.id).join(address_table)
+ ... )
+ ... )
+ (x, y) IN (SELECT user_account.id, address.id
+ FROM user_account JOIN address ON user_account.id = address.user_id)
+
+Identity Comparisons
+^^^^^^^^^^^^^^^^^^^^
+
+These operators involve testing for special SQL values such as
+``NULL``, boolean constants such as ``true`` or ``false`` which some
+databases support:
+
+* :meth:`_sql.ColumnOperators.is_`:
+
+ This operator will provide exactly the SQL for "x IS y", most often seen
+ as "<expr> IS NULL". The ``NULL`` constant is most easily acquired
+ using regular Python ``None``::
+
+ >>> print(column('x').is_(None))
+ x IS NULL
+
+ SQL NULL is also explicitly available, if needed, using the
+ :func:`_sql.null` construct::
+
+ >>> from sqlalchemy import null
+ >>> print(column('x').is_(null()))
+ x IS NULL
+
+ The :meth:`_sql.ColumnOperators.is_` operator is automatically invoked when
+ using the :meth:`_sql.ColumnOperators.__eq__` overloaded operator, i.e.
+ ``==``, in conjunction with the ``None`` or :func:`_sql.null` value. In this
+ way, there's typically not a need to use :meth:`_sql.ColumnOperators.is_`
+ explicitly, paricularly when used with a dynamic value::
+
+ >>> a = None
+ >>> print(column('x') == a)
+ x IS NULL
+
+ Note that the Python ``is`` operator is **not overloaded**. Even though
+ Python provides hooks to overload operators such as ``==`` and ``!=``,
+ it does **not** provide any way to redefine ``is``.
+
+* :meth:`_sql.ColumnOperators.is_not`:
+
+ Similar to :meth:`_sql.ColumnOperators.is_`, produces "IS NOT"::
+
+ >>> print(column('x').is_not(None))
+ x IS NOT NULL
+
+ Is similarly equivalent to ``!= None``::
+
+ >>> print(column('x') != None)
+ x IS NOT NULL
+
+* :meth:`_sql.ColumnOperators.is_distinct_from`:
+
+ Produces SQL IS DISTINCT FROM::
+
+ >>> print(column('x').is_distinct_from('some value'))
+ x IS DISTINCT FROM :x_1
+
+* :meth:`_sql.ColumnOperators.isnot_distinct_from`:
+
+ Produces SQL IS NOT DISTINCT FROM::
+
+ >>> print(column('x').isnot_distinct_from('some value'))
+ x IS NOT DISTINCT FROM :x_1
+
+String Comparisons
+^^^^^^^^^^^^^^^^^^
+
+* :meth:`_sql.ColumnOperators.like`::
+
+ >>> print(column('x').like('word'))
+ x LIKE :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.ilike`:
+
+ Case insensitive LIKE makes use of the SQL ``lower()`` function on a
+ generic backend. On the PostgreSQL backend it will use ``ILIKE``::
+
+ >>> print(column('x').ilike('word'))
+ lower(x) LIKE lower(:x_1)
+
+ ..
+
+* :meth:`_sql.ColumnOperators.notlike`::
+
+ >>> print(column('x').notlike('word'))
+ x NOT LIKE :x_1
+
+ ..
+
+
+* :meth:`_sql.ColumnOperators.notilike`::
+
+ >>> print(column('x').notilike('word'))
+ lower(x) NOT LIKE lower(:x_1)
+
+ ..
+
+String Containment
+^^^^^^^^^^^^^^^^^^^
+
+String containment operators are basically built as a combination of
+LIKE and the string concatenation operator, which is ``||`` on most
+backends or sometimes a function like ``concat()``:
+
+* :meth:`_sql.ColumnOperators.startswith`::
+
+ The string containment operators
+ >>> print(column('x').startswith('word'))
+ x LIKE :x_1 || '%'
+
+ ..
+
+* :meth:`_sql.ColumnOperators.endswith`::
+
+ >>> print(column('x').endswith('word'))
+ x LIKE '%' || :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.contains`::
+
+ >>> print(column('x').contains('word'))
+ x LIKE '%' || :x_1 || '%'
+
+ ..
+
+String matching
+^^^^^^^^^^^^^^^^
+
+Matching operators are always backend-specific and may provide different
+behaviors and results on different databases:
+
+* :meth:`_sql.ColumnOperators.match`:
+
+ This is a dialect-specific operator that makes use of the MATCH
+ feature of the underlying database, if available::
+
+ >>> print(column('x').match('word'))
+ x MATCH :x_1
+
+ ..
+
+* :meth:`_sql.ColumnOperators.regexp_match`:
+
+ This operator is dialect specific. We can illustrate it in terms of
+ for example the PostgreSQL dialect::
+
+ >>> from sqlalchemy.dialects import postgresql
+ >>> print(column('x').regexp_match('word').compile(dialect=postgresql.dialect()))
+ x ~ %(x_1)s
+
+ Or MySQL::
+
+ >>> from sqlalchemy.dialects import mysql
+ >>> print(column('x').regexp_match('word').compile(dialect=mysql.dialect()))
+ x REGEXP %s
+
+ ..
+
+
+.. _queryguide_operators_concat_op:
+
+String Alteration
+^^^^^^^^^^^^^^^^^
+
+* :meth:`_sql.ColumnOperators.concat`:
+
+ String concatenation::
+
+ >>> print(column('x').concat("some string"))
+ x || :x_1
+
+ This operator is available via :meth:`_sql.ColumnOperators.__add__`, that
+ is, the Python ``+`` operator, when working with a column expression that
+ derives from :class:`_types.String`::
+
+ >>> print(column('x', String) + "some string")
+ x || :x_1
+
+ The operator will produce the appropriate database-specific construct,
+ such as on MySQL it's historically been the ``concat()`` SQL function::
+
+ >>> print((column('x', String) + "some string").compile(dialect=mysql.dialect()))
+ concat(x, %s)
+
+ ..
+
+* :meth:`_sql.ColumnOperators.regexp_replace`:
+
+ Complementary to :meth:`_sql.ColumnOperators.regexp` this produces REGEXP
+ REPLACE equivalent for the backends which support it::
+
+ >>> print(column('x').regexp_replace('foo', 'bar').compile(dialect=postgresql.dialect()))
+ REGEXP_REPLACE(x, %(x_1)s, %(x_2)s)
+
+ ..
+
+* :meth:`_sql.ColumnOperators.collate`:
+
+ Produces the COLLATE SQL operator which provides for specific collations
+ at expression time::
+
+ >>> print((column('x').collate('latin1_german2_ci') == 'Müller').compile(dialect=mysql.dialect()))
+ (x COLLATE latin1_german2_ci) = %s
+
+
+ To use COLLATE against a literal value, use the :func:`_sql.literal` construct::
+
+
+ >>> from sqlalchemy import literal
+ >>> print((literal('Müller').collate('latin1_german2_ci') == column('x')).compile(dialect=mysql.dialect()))
+ (%s COLLATE latin1_german2_ci) = x
+
+ ..
+
+Arithmetic Operators
+^^^^^^^^^^^^^^^^^^^^
+
+* :meth:`_sql.ColumnOperators.__add__`, :meth:`_sql.ColumnOperators.__radd__` (Python "``+``" operator)::
+
+ >>> print(column('x') + 5)
+ x + :x_1
+
+ >>> print(5 + column('x'))
+ :x_1 + x
+
+ ..
+
+
+ Note that when the datatype of the expression is :class:`_types.String`
+ or similar, the :meth:`_sql.ColumnOperators.__add__` operator instead produces
+ :ref:`string concatenation <queryguide_operators_concat_op>`.
+
+
+* :meth:`_sql.ColumnOperators.__sub__`, :meth:`_sql.ColumnOperators.__rsub__` (Python "``-``" operator)::
+
+ >>> print(column('x') - 5)
+ x - :x_1
+
+ >>> print(5 - column('x'))
+ :x_1 - x
+
+ ..
+
+
+* :meth:`_sql.ColumnOperators.__mul__`, :meth:`_sql.ColumnOperators.__rmul__` (Python "``*``" operator)::
+
+ >>> print(column('x') * 5)
+ x * :x_1
+
+ >>> print(5 * column('x'))
+ :x_1 * x
+
+ ..
+
+* :meth:`_sql.ColumnOperators.__div__`, :meth:`_sql.ColumnOperators.__rdiv__` (Python "``/``" operator)::
+
+ >>> print(column('x') / 5)
+ x / :x_1
+ >>> print(5 / column('x'))
+ :x_1 / x
+
+ ..
+
+
+* :meth:`_sql.ColumnOperators.__mod__`, :meth:`_sql.ColumnOperators.__rmod__` (Python "``%``" operator)::
+
+ >>> print(column('x') % 5)
+ x % :x_1
+ >>> print(5 % column('x'))
+ :x_1 % x
+
+ ..
+
+
+Using Conjunctions and Negations
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The most common conjunction, "AND", is automatically applied if we make repeated use of the :meth:`_sql.Select.where` method, as well as similar methods such as
+:meth:`_sql.Update.where` and :meth:`_sql.Delete.where`::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... where(user_table.c.name == 'squidward').
+ ... where(address_table.c.user_id == user_table.c.id)
+ ... )
+ SELECT address.email_address
+ FROM address, user_account
+ WHERE user_account.name = :name_1 AND address.user_id = user_account.id
+
+:meth:`_sql.Select.where`, :meth:`_sql.Update.where` and :meth:`_sql.Delete.where` also accept multiple expressions with the same effect::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... where(
+ ... user_table.c.name == 'squidward',
+ ... address_table.c.user_id == user_table.c.id
+ ... )
+ ... )
+ SELECT address.email_address
+ FROM address, user_account
+ WHERE user_account.name = :name_1 AND address.user_id = user_account.id
+
+The "AND" conjunction, as well as its partner "OR", are both available directly using the :func:`_sql.and_` and :func:`_sql.or_` functions::
+
+
+ >>> from sqlalchemy import and_, or_
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... where(
+ ... and_(
+ ... or_(user_table.c.name == 'squidward', user_table.c.name == 'sandy'),
+ ... address_table.c.user_id == user_table.c.id
+ ... )
+ ... )
+ ... )
+ SELECT address.email_address
+ FROM address, user_account
+ WHERE (user_account.name = :name_1 OR user_account.name = :name_2)
+ AND address.user_id = user_account.id
+
+A negation is available using the :func:`_sql.not_` function. This will
+typically invert the operator in a boolean expression::
+
+ >>> from sqlalchemy import not_
+ >>> print(not_(column('x') == 5))
+ x != :x_1
+
+It also may apply a keyword such as ``NOT`` when appropriate::
+
+ >>> from sqlalchemy import Boolean
+ >>> print(not_(column('x', Boolean)))
+ NOT x
+
+
+Conjunction Operators
+^^^^^^^^^^^^^^^^^^^^^^
+
+The above conjunction functions :func:`_sql.and_`, :func:`_sql.or_`,
+:func:`_sql.not_` are also available as overloaded Python operators:
+
+.. note:: The Python ``&``, ``|`` and ``~`` operators take high precedence
+ in the language; as a result, parenthesis must usually be applied
+ for operands that themselves contain expressions, as indicated in the
+ examples below.
+
+* :meth:`_sql.Operators.__and__` (Python "``&``" operator):
+
+ The Python binary ``&`` operator is overloaded to behave the same
+ as :func:`_sql.and_` (note parenthesis around the two operands)::
+
+ >>> print((column('x') == 5) & (column('y') == 10))
+ x = :x_1 AND y = :y_1
+
+ ..
+
+
+* :meth:`_sql.Operators.__or__` (Python "``|``" operator):
+
+ The Python binary ``|`` operator is overloaded to behave the same
+ as :func:`_sql.or_` (note parenthesis around the two operands)::
+
+ >>> print((column('x') == 5) | (column('y') == 10))
+ x = :x_1 OR y = :y_1
+
+ ..
+
+
+* :meth:`_sql.Operators.__invert__` (Python "``~``" operator):
+
+ The Python binary ``~`` operator is overloaded to behave the same
+ as :func:`_sql.not_`, either inverting the existing operator, or
+ applying the ``NOT`` keyword to the expression as a whole::
+
+ >>> print(~(column('x') == 5))
+ x != :x_1
+
+ >>> from sqlalchemy import Boolean
+ >>> print(~column('x', Boolean))
+ NOT x
+
+ ..
+
+
+
+Operator Customization
+^^^^^^^^^^^^^^^^^^^^^^
+
+
diff --git a/doc/build/core/selectable.rst b/doc/build/core/selectable.rst
index f7a6b5969..147e372be 100644
--- a/doc/build/core/selectable.rst
+++ b/doc/build/core/selectable.rst
@@ -65,7 +65,7 @@ Selectable Class Documentation
--------------------------------
The classes here are generated using the constructors listed at
-:ref:`fromclause_foundational_constructors` and
+:ref:`selectable_foundational_constructors` and
:ref:`fromclause_modifier_constructors`.
.. autoclass:: Alias
@@ -141,6 +141,7 @@ The classes here are generated using the constructors listed at
.. autoclass:: TextualSelect
:members:
+ :inherited-members:
.. autoclass:: Values
:members:
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 491236066..1248e00a3 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -4,6 +4,35 @@
SQL Expression Language Tutorial (1.x API)
==========================================
+.. admonition:: About this document
+
+ This tutorial covers the well known SQLAlchemy Core API
+ that has been in use for many years. As of SQLAlchemy 1.4, there are two
+ distinct styles of Core use known as :term:`1.x style` and :term:`2.0
+ style`, the latter of which makes some adjustments mostly in the area
+ of how transactions are controlled as well as narrows down the patterns
+ for how SQL statement constructs are executed.
+
+ The plan is that in SQLAlchemy 2.0, those elements of 1.x style
+ Core use will be removed, after a deprecation phase that continues
+ throughout the 1.4 series. For ORM use, some elements of 1.x style
+ will still be available; see the :ref:`migration_20_toplevel` document
+ for a complete overview.
+
+ The tutorial here is applicable to users who want to learn how SQLAlchemy
+ Core has been used for many years, particularly those users working with
+ existing applications or related learning material that is in 1.x style.
+
+ For an introduction to SQLAlchemy Core from the new 1.4/2.0 perspective,
+ see :ref:`unified_tutorial`.
+
+ .. seealso::
+
+ :ref:`migration_20_toplevel`
+
+ :ref:`unified_tutorial`
+
+
The SQLAlchemy Expression Language presents a system of representing
relational database structures and expressions using Python constructs. These
constructs are modeled to resemble those of the underlying database as closely
@@ -157,10 +186,7 @@ each table first before creating, so it's safe to call multiple times:
fullname VARCHAR,
PRIMARY KEY (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
- <BLANKLINE>
CREATE TABLE addresses (
id INTEGER NOT NULL,
user_id INTEGER,
@@ -168,8 +194,6 @@ each table first before creating, so it's safe to call multiple times:
PRIMARY KEY (id),
FOREIGN KEY(user_id) REFERENCES users (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
COMMIT
diff --git a/doc/build/dialects/postgresql.rst b/doc/build/dialects/postgresql.rst
index 6c36e5814..2b062f581 100644
--- a/doc/build/dialects/postgresql.rst
+++ b/doc/build/dialects/postgresql.rst
@@ -186,6 +186,8 @@ PostgreSQL DML Constructs
.. autoclass:: sqlalchemy.dialects.postgresql.Insert
:members:
+.. _postgresql_psycopg2:
+
psycopg2
--------
diff --git a/doc/build/errors.rst b/doc/build/errors.rst
index 69664ae1d..42c0db977 100644
--- a/doc/build/errors.rst
+++ b/doc/build/errors.rst
@@ -168,7 +168,7 @@ engine::
session.add(MyClass())
session.commit()
-In SQLAlchemy 1.4, this :term:`2.x style` behavior is enabled when the
+In SQLAlchemy 1.4, this :term:`2.0 style` behavior is enabled when the
:paramref:`_orm.Session.future` flag is set on :class:`_orm.sessionmaker`
or :class:`_orm.Session`.
diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst
index d9972cb86..2ab471dd1 100644
--- a/doc/build/faq/sessions.rst
+++ b/doc/build/faq/sessions.rst
@@ -67,8 +67,9 @@ Three ways, from most common to least:
:class:`.Session.refresh`. See :ref:`session_expire` for detail on this.
3. We can run whole queries while setting them to definitely overwrite
- already-loaded objects as they read rows by using
- :meth:`_query.Query.populate_existing`.
+ already-loaded objects as they read rows by using "populate existing".
+ This is an exection option described at
+ :ref:`orm_queryguide_populate_existing`.
But remember, **the ORM cannot see changes in rows if our isolation
level is repeatable read or higher, unless we start a new transaction**.
diff --git a/doc/build/glossary.rst b/doc/build/glossary.rst
index d51d696e3..d47f0397a 100644
--- a/doc/build/glossary.rst
+++ b/doc/build/glossary.rst
@@ -63,14 +63,26 @@ Glossary
from sqlalchemy import select
+ # a Core select statement with ORM entities is
+ # now ORM-enabled at the compiler level
stmt = select(User).join(User.addresses)
- session = Session() # make sure future=True is used for 1.4
+ session = Session(engine)
result = session.execute(stmt)
+ # Session returns a Result that has ORM entities
list_of_users = result.scalars().all()
+ facade
+
+ An object that serves as a front-facing interface masking more complex
+ underlying or structural code.
+
+ .. seealso::
+
+ `Facade pattern (via Wikipedia) <https://en.wikipedia.org/wiki/Facade_pattern>`_
+
relational
relational algebra
@@ -81,6 +93,25 @@ Glossary
`Relational Algebra (via Wikipedia) <https://en.wikipedia.org/wiki/Relational_algebra>`_
+ cartesian product
+
+ Given two sets A and B, the cartesian product is the set of all ordered pairs (a, b)
+ where a is in A and b is in B.
+
+ In terms of SQL databases, a cartesian product occurs when we select from two
+ or more tables (or other subqueries) without establishing any kind of criteria
+ between the rows of one table to another (directly or indirectly). If we
+ SELECT from table A and table B at the same time, we get every row of A matched
+ to the first row of B, then every row of A matched to the second row of B, and
+ so on until every row from A has been paired with every row of B.
+
+ Cartesian products cause enormous result sets to be generated and can easily
+ crash a client application if not prevented.
+
+ .. seealso::
+
+ `Cartesian Product (via Wikipedia) <https://en.wikipedia.org/wiki/Cartesian_product>`_
+
cyclomatic complexity
A measure of code complexity based on the number of possible paths
through a program's source code.
@@ -89,6 +120,34 @@ Glossary
`Cyclomatic Complexity <https://en.wikipedia.org/wiki/Cyclomatic_complexity>`_
+ bound parameter
+ bound parameters
+ bind parameter
+ bind parameters
+
+ Bound parameters are the primary means in which data is passed to the
+ :term:`DBAPI` database driver. While the operation to be invoked is
+ based on the SQL statement string, the data values themselves are
+ passed separately, where the driver contains logic that will safely
+ process these strings and pass them to the backend database server,
+ which may either involve formatting the parameters into the SQL string
+ itself, or passing them to the database using separate protocols.
+
+ The specific system by which the database driver does this should not
+ matter to the caller; the point is that on the outside, data should
+ **always** be passed separately and not as part of the SQL string
+ itself. This is integral both to having adequate security against
+ SQL injections as well as allowing the driver to have the best
+ performance.
+
+ .. seealso::
+
+ `Prepared Statement <https://en.wikipedia.org/wiki/Prepared_statement>`_ - at Wikipedia
+
+ `bind parameters <https://use-the-index-luke.com/sql/where-clause/bind-parameters>`_ - at Use The Index, Luke!
+
+
+
selectable
A term used in SQLAlchemy to describe a SQL construct that represents
a collection of rows. It's largely similar to the concept of a
@@ -216,6 +275,7 @@ Glossary
:term:`DML`
+ :term:`DQL`
DML
An acronym for **Data Manipulation Language**. DML is the subset of
@@ -230,7 +290,24 @@ Glossary
:term:`DDL`
+ :term:`DQL`
+
+ DQL
+ An acronym for **Data Query Language**. DQL is the subset of
+ SQL that relational databases use to *read* the data in tables.
+ DQL almost exclusively refers to the SQL SELECT construct as the
+ top level SQL statement in use.
+
+ .. seealso::
+
+ `DQL (via Wikipedia) <https://en.wikipedia.org/wiki/Data_query_language>`_
+
+ :term:`DML`
+
+ :term:`DDL`
+
metadata
+ database metadata
table metadata
The term "metadata" generally refers to "data that describes data";
data that itself represents the format and/or structure of some other
@@ -394,14 +471,19 @@ Glossary
mapping
mapped
+ mapped class
We say a class is "mapped" when it has been passed through the
:func:`_orm.mapper` function. This process associates the
class with a database table or other :term:`selectable`
construct, so that instances of it can be persisted
- using a :class:`.Session` as well as loaded using a
- :class:`.query.Query`.
+ and loaded using a :class:`.Session`.
+
+ .. seealso::
+
+ :ref:`orm_mapping_classes_toplevel`
N plus one problem
+ N plus one
The N plus one problem is a common side effect of the
:term:`lazy load` pattern, whereby an application wishes
to iterate through a related attribute or collection on
@@ -419,6 +501,8 @@ Glossary
.. seealso::
+ :ref:`tutorial_orm_loader_strategies`
+
:doc:`orm/loading_relationships`
polymorphic
@@ -627,6 +711,7 @@ Glossary
subquery
+ scalar subquery
Refers to a ``SELECT`` statement that is embedded within an enclosing
``SELECT``.
@@ -1135,6 +1220,19 @@ Glossary
:ref:`relationship_config_toplevel`
+ cursor
+ A control structure that enables traversal over the records in a database.
+ In the Python DBAPI, the cursor object in fact the starting point
+ for statement execution as well as the interface used for fetching
+ results.
+
+ .. seealso::
+
+ `Cursor Objects (in pep-249) <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_
+
+ `Cursor (via Wikipedia) <https://en.wikipedia.org/wiki/Cursor_(databases)>`_
+
+
association relationship
A two-tiered :term:`relationship` which links two tables
together using an association table in the middle. The
@@ -1260,8 +1358,20 @@ Glossary
.. seealso::
+ :term:`composite primary key`
+
`Primary key (via Wikipedia) <http://en.wikipedia.org/wiki/Primary_Key>`_
+ composite primary key
+
+ A :term:`primary key` that has more than one column. A particular
+ database row is unique based on two or more columns rather than just
+ a single value.
+
+ .. seealso::
+
+ :term:`primary key`
+
foreign key constraint
A referential constraint between two tables. A foreign key is a field or set of fields in a
relational table that matches a :term:`candidate key` of another table.
diff --git a/doc/build/index.rst b/doc/build/index.rst
index b5b5e9351..e7f19c1f1 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -43,9 +43,7 @@ SQLAlchemy Documentation
Core and ORM working styles more closely than ever. The new tutorial introduces
both concepts in parallel. New users and those starting new projects should start here!
- * **SQLAlchemy 2.0 (Coming Soon)** - SQLAlchemy 2.0's main tutorial
-
- * **SQLAlchemy Querying Guide (Coming Soon)** - reference documentation for writing queries
+ * :doc:`/tutorial/index` - SQLAlchemy 2.0's main tutorial
* :doc:`Migrating to SQLAlchemy 2.0 <changelog/migration_20>` - Complete background on migrating from 1.3 or 1.4 to 2.0
@@ -79,18 +77,17 @@ SQLAlchemy Documentation
:doc:`Mapper Configuration <orm/mapper_config>` |
:doc:`Relationship Configuration <orm/relationships>`
+ * **ORM Usage:**
+ :doc:`Session Usage and Guidelines <orm/session>` |
+ :doc:`Querying Data, Loading Objects <orm/loading_objects>` |
+ :doc:`AsyncIO Support <orm/extensions/asyncio>`
+
* **Configuration Extensions:**
- :doc:`Declarative Extension <orm/extensions/declarative/index>` |
:doc:`Association Proxy <orm/extensions/associationproxy>` |
:doc:`Hybrid Attributes <orm/extensions/hybrid>` |
:doc:`Automap <orm/extensions/automap>` |
:doc:`Mutable Scalars <orm/extensions/mutable>` |
- :doc:`Indexable <orm/extensions/indexable>`
-
- * **ORM Usage:**
- :doc:`Session Usage and Guidelines <orm/session>` |
- :doc:`Loading Objects <orm/loading_objects>` |
- :doc:`AsyncIO Support <orm/extensions/asyncio>`
+ :doc:`All extensions <orm/extensions/index>`
* **Extending the ORM:**
:doc:`ORM Events and Internals <orm/extending>`
diff --git a/doc/build/orm/extensions/declarative/api.rst b/doc/build/orm/extensions/declarative/api.rst
index 6e413a07e..e41e735d3 100644
--- a/doc/build/orm/extensions/declarative/api.rst
+++ b/doc/build/orm/extensions/declarative/api.rst
@@ -21,13 +21,6 @@ API Reference
* :meth:`_orm.as_declarative`
-.. autofunction:: instrument_declarative
-
-.. autoclass:: AbstractConcreteBase
-
-.. autoclass:: ConcreteBase
-
-.. autoclass:: DeferredReflection
- :members:
-
+See :ref:`declarative_toplevel` for the remaining Declarative extension
+classes.
diff --git a/doc/build/orm/extensions/declarative/index.rst b/doc/build/orm/extensions/declarative/index.rst
index 36700f812..7ef2551c6 100644
--- a/doc/build/orm/extensions/declarative/index.rst
+++ b/doc/build/orm/extensions/declarative/index.rst
@@ -21,3 +21,16 @@ mapping API.
.. autoclass:: DeferredReflection
:members:
+
+.. these pages have all been integrated into the main ORM documentation
+ however are still here as placeholder docs with links to where they've moved
+
+.. toctree::
+ :hidden:
+
+ api
+ basic_use
+ inheritance
+ mixins
+ relationships
+ table_config \ No newline at end of file
diff --git a/doc/build/orm/extensions/declarative/inheritance.rst b/doc/build/orm/extensions/declarative/inheritance.rst
index 70148986b..d65cafd35 100644
--- a/doc/build/orm/extensions/declarative/inheritance.rst
+++ b/doc/build/orm/extensions/declarative/inheritance.rst
@@ -1,3 +1,6 @@
.. _declarative_inheritance:
+Declarative Inheritance
+=======================
+
See :ref:`inheritance_toplevel` for this section.
diff --git a/doc/build/orm/loading_columns.rst b/doc/build/orm/loading_columns.rst
index a0759e768..9566d0efc 100644
--- a/doc/build/orm/loading_columns.rst
+++ b/doc/build/orm/loading_columns.rst
@@ -1,3 +1,5 @@
+.. _loading_columns:
+
.. currentmodule:: sqlalchemy.orm
===============
@@ -277,11 +279,9 @@ Column Deferral API
Column Bundles
==============
-The :class:`.Bundle` may be used to query for groups of columns under one
+The :class:`_orm.Bundle` may be used to query for groups of columns under one
namespace.
-.. versionadded:: 0.9.0
-
The bundle allows columns to be grouped together::
from sqlalchemy.orm import Bundle
@@ -292,7 +292,7 @@ The bundle allows columns to be grouped together::
The bundle can be subclassed to provide custom behaviors when results
are fetched. The method :meth:`.Bundle.create_row_processor` is given
-the :class:`_query.Query` and a set of "row processor" functions at query execution
+the statement object and a set of "row processor" functions at query execution
time; these processor functions when given a result row will return the
individual attribute value, which can then be adapted into any kind of
return data structure. Below illustrates replacing the usual :class:`.Row`
@@ -309,6 +309,11 @@ return structure with a straight Python dictionary::
)
return proc
+.. note::
+
+ The :class:`_orm.Bundle` construct only applies to column expressions.
+ It does not apply to ORM attributes mapped using :func:`_orm.relationship`.
+
.. versionchanged:: 1.0
The ``proc()`` callable passed to the ``create_row_processor()``
diff --git a/doc/build/orm/loading_objects.rst b/doc/build/orm/loading_objects.rst
index b26b32087..3075ea9f4 100644
--- a/doc/build/orm/loading_objects.rst
+++ b/doc/build/orm/loading_objects.rst
@@ -1,14 +1,26 @@
-===============
-Loading Objects
-===============
+===============================
+Querying Data, Loading Objects
+===============================
-Notes and features regarding the general loading of mapped objects.
+The following sections refer to techniques for emitting SELECT statements within
+an ORM context. This involves primarily statements that return instances of
+ORM mapped objects, but also involves calling forms that deliver individual
+column or groups of columns as well.
-For an in-depth introduction to querying with the SQLAlchemy ORM, please see the :ref:`ormtutorial_toplevel`.
+For an introduction to querying with the SQLAlchemy ORM, one of the
+following tutorials shoud be consulted:
+
+* :doc:`/tutorial/index` - for :term:`2.0 style` usage
+
+* :doc:`/orm/tutorial` - for :term:`1.x style` usage.
+
+As SQLAlchemy 1.4 represents a transition from 1.x to 2.0 style, the below
+sections are currently mixed as far as which style they are using.
.. toctree::
:maxdepth: 3
+ queryguide
loading_columns
loading_relationships
inheritance_loading
diff --git a/doc/build/orm/loading_relationships.rst b/doc/build/orm/loading_relationships.rst
index 5a0902945..4b436f9c2 100644
--- a/doc/build/orm/loading_relationships.rst
+++ b/doc/build/orm/loading_relationships.rst
@@ -66,6 +66,7 @@ The primary forms of relationship loading are:
uncommon loader option.
+.. _relationship_lazy_option:
Configuring Loader Strategies at Mapping Time
---------------------------------------------
@@ -776,7 +777,7 @@ order to load related associations:
.. sourcecode:: python+sql
>>> jack = session.query(User).\
- ... options(selectinload('addresses')).\
+ ... options(selectinload(User.addresses)).\
... filter(or_(User.name == 'jack', User.name == 'ed')).all()
{opensql}SELECT
users.id AS users_id,
@@ -818,7 +819,7 @@ loaded on the parent objects and would otherwise need to be loaded:
.. sourcecode:: python+sql
>>> session.query(Address).\
- ... options(selectinload('user')).all()
+ ... options(selectinload(Address.user)).all()
{opensql}SELECT
addresses.id AS addresses_id,
addresses.email_address AS addresses_email_address,
@@ -1204,7 +1205,7 @@ to a string SQL statement::
Creating Custom Load Rules
--------------------------
-.. warning:: This is an advanced technique! Great care and testing
+.. deepalchemy:: This is an advanced technique! Great care and testing
should be applied.
The ORM has various edge cases where the value of an attribute is locally
diff --git a/doc/build/orm/mapping_styles.rst b/doc/build/orm/mapping_styles.rst
index 5e3c5154a..b88934094 100644
--- a/doc/build/orm/mapping_styles.rst
+++ b/doc/build/orm/mapping_styles.rst
@@ -588,6 +588,8 @@ Mapped Class Behavior
Across all styles of mapping using the :class:`_orm.registry` object,
the following behaviors are common:
+.. _mapped_class_default_constructor:
+
Default Constructor
-------------------
diff --git a/doc/build/orm/query.rst b/doc/build/orm/query.rst
index 592004e86..d7711671c 100644
--- a/doc/build/orm/query.rst
+++ b/doc/build/orm/query.rst
@@ -38,10 +38,10 @@ ORM-Specific Query Constructs
.. autoclass:: sqlalchemy.orm.util.AliasedInsp
-.. autoclass:: sqlalchemy.orm.util.Bundle
+.. autoclass:: sqlalchemy.orm.Bundle
:members:
-.. autoclass:: sqlalchemy.orm.strategy_options.Load
+.. autoclass:: sqlalchemy.orm.Load
:members:
.. autofunction:: sqlalchemy.orm.with_loader_criteria
diff --git a/doc/build/orm/queryguide.rst b/doc/build/orm/queryguide.rst
new file mode 100644
index 000000000..5576882a6
--- /dev/null
+++ b/doc/build/orm/queryguide.rst
@@ -0,0 +1,882 @@
+.. highlight:: pycon+sql
+
+.. _queryguide_toplevel:
+
+==================
+ORM Querying Guide
+==================
+
+This section provides an overview of emitting queries with the
+SQLAlchemy ORM using :term:`2.0 style` usage.
+
+Readers of this section should be familiar with the SQLAlchemy overview
+at :ref:`unified_tutorial`, and in particular most of the content here expands
+upon the content at :ref:`tutorial_selecting_data`.
+
+
+.. Setup code, not for display
+
+ >>> from sqlalchemy import create_engine
+ >>> engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
+ >>> from sqlalchemy import MetaData, Table, Column, Integer, String
+ >>> metadata = MetaData()
+ >>> user_table = Table(
+ ... "user_account",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('name', String(30)),
+ ... Column('fullname', String)
+ ... )
+ >>> from sqlalchemy import ForeignKey
+ >>> address_table = Table(
+ ... "address",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('user_id', None, ForeignKey('user_account.id')),
+ ... Column('email_address', String, nullable=False)
+ ... )
+ >>> orders_table = Table(
+ ... "user_order",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('user_id', None, ForeignKey('user_account.id')),
+ ... Column('email_address', String, nullable=False)
+ ... )
+ >>> order_items_table = Table(
+ ... "order_items",
+ ... metadata,
+ ... Column("order_id", ForeignKey("user_order.id"), primary_key=True),
+ ... Column("item_id", ForeignKey("item.id"), primary_key=True)
+ ... )
+ >>> items_table = Table(
+ ... "item",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('name', String),
+ ... Column('description', String)
+ ... )
+ >>> metadata.create_all(engine)
+ BEGIN (implicit)
+ ...
+ >>> from sqlalchemy.orm import declarative_base
+ >>> Base = declarative_base()
+ >>> from sqlalchemy.orm import relationship
+ >>> class User(Base):
+ ... __table__ = user_table
+ ...
+ ... addresses = relationship("Address", back_populates="user")
+ ... orders = relationship("Order")
+ ...
+ ... def __repr__(self):
+ ... return f"User(id={self.id!r}, name={self.name!r}, fullname={self.fullname!r})"
+
+ >>> class Address(Base):
+ ... __table__ = address_table
+ ...
+ ... user = relationship("User", back_populates="addresses")
+ ...
+ ... def __repr__(self):
+ ... return f"Address(id={self.id!r}, email_address={self.email_address!r})"
+
+ >>> class Order(Base):
+ ... __table__ = orders_table
+ ... items = relationship("Item", secondary=order_items_table)
+
+ >>> class Item(Base):
+ ... __table__ = items_table
+
+ >>> conn = engine.connect()
+ >>> from sqlalchemy.orm import Session
+ >>> session = Session(conn)
+ >>> session.add_all([
+ ... User(name="spongebob", fullname="Spongebob Squarepants", addresses=[
+ ... Address(email_address="spongebob@sqlalchemy.org")
+ ... ]),
+ ... User(name="sandy", fullname="Sandy Cheeks", addresses=[
+ ... Address(email_address="sandy@sqlalchemy.org"),
+ ... Address(email_address="squirrel@squirrelpower.org")
+ ... ]),
+ ... User(name="patrick", fullname="Patrick Star", addresses=[
+ ... Address(email_address="pat999@aol.com")
+ ... ]),
+ ... User(name="squidward", fullname="Squidward Tentacles", addresses=[
+ ... Address(email_address="stentcl@sqlalchemy.org")
+ ... ]),
+ ... User(name="ehkrabs", fullname="Eugene H. Krabs"),
+ ... ])
+ >>> session.commit()
+ BEGIN ...
+ >>> conn.begin()
+ BEGIN ...
+
+
+SELECT statements
+=================
+
+SELECT statements are produced by the :func:`_sql.select` function which
+returns a :class:`_sql.Select` object::
+
+ >>> from sqlalchemy import select
+ >>> stmt = select(User).where(User.name == 'spongebob')
+
+To invoke a :class:`_sql.Select` with the ORM, it is passed to
+:meth:`_orm.Session.execute`::
+
+ {sql}>>> result = session.execute(stmt)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('spongebob',){stop}
+ >>> for user_obj in result.scalars():
+ ... print(f"{user_obj.name} {user_obj.fullname}")
+ spongebob Spongebob Squarepants
+
+
+
+.. _orm_queryguide_select_columns:
+
+Selecting ORM Entities and Attributes
+--------------------------------------
+
+The :func:`_sql.select` construct accepts ORM entities, including mapped
+classes as well as class-level attributes representing mapped columns, which
+are converted into ORM-annotated :class:`_sql.FromClause` and
+:class:`_sql.ColumnElement` elements at construction time.
+
+A :class:`_sql.Select` object that contains ORM-annotated entities is normally
+executed using a :class:`_orm.Session` object, and not a :class:`_future.Connection`
+object, so that ORM-related features may take effect.
+
+Below we select from the ``User`` entity, producing a :class:`_sql.Select`
+that selects from the mapped :class:`_schema.Table` to which ``User`` is mapped::
+
+ {sql}>>> result = session.execute(select(User).order_by(User.id))
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account ORDER BY user_account.id
+ [...] (){stop}
+
+When selecting from ORM entities, the entity itself is returned in the result
+as a single column value; for example above, the :class:`_engine.Result`
+returns :class:`_engine.Row` objects that have just a single column, that column
+holding onto a ``User`` object::
+
+ >>> result.fetchone()
+ (User(id=1, name='spongebob', fullname='Spongebob Squarepants'),)
+
+When selecting a list of single-column ORM entities, it is typical to skip
+the generation of :class:`_engine.Row` objects and instead receive
+ORM entities directly, which is achieved using the :meth:`_engine.Result.scalars`
+method::
+
+ >>> result.scalars().all()
+ [User(id=2, name='sandy', fullname='Sandy Cheeks'),
+ User(id=3, name='patrick', fullname='Patrick Star'),
+ User(id=4, name='squidward', fullname='Squidward Tentacles'),
+ User(id=5, name='ehkrabs', fullname='Eugene H. Krabs')]
+
+ORM Entities are named in the result row based on their class name,
+such as below where we SELECT from both ``User`` and ``Address`` at the
+same time::
+
+ >>> stmt = select(User, Address).join(User.addresses).order_by(User.id, Address.id)
+
+ {sql}>>> for row in session.execute(stmt):
+ ... print(f"{row.User.name} {row.Address.email_address}")
+ SELECT user_account.id, user_account.name, user_account.fullname,
+ address.id AS id_1, address.user_id, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ ORDER BY user_account.id, address.id
+ [...] (){stop}
+ spongebob spongebob@sqlalchemy.org
+ sandy sandy@sqlalchemy.org
+ sandy squirrel@squirrelpower.org
+ patrick pat999@aol.com
+ squidward stentcl@sqlalchemy.org
+
+
+Selecting Individual Attributes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The attributes on a mapped class, such as ``User.name`` and ``Address.email_address``,
+have a similar behavior as that of the entity class itself such as ``User``
+in that they are automatically converted into ORM-annotated Core objects
+when passed to :func:`_sql.select`. They may be used in the same way
+as table columns are used::
+
+ {sql}>>> result = session.execute(
+ ... select(User.name, Address.email_address).
+ ... join(User.addresses).
+ ... order_by(User.id, Address.id)
+ ... )
+ SELECT user_account.name, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ ORDER BY user_account.id, address.id
+ [...] (){stop}
+
+ORM attributes, themselves known as :class:`_orm.InstrumentedAttribute`
+objects, can be used in the same way as any :class:`_sql.ColumnElement`,
+and are delivered in result rows just the same way, such as below
+where we refer to their values by column name within each row::
+
+ >>> for row in result:
+ ... print(f"{row.name} {row.email_address}")
+ spongebob spongebob@sqlalchemy.org
+ sandy sandy@sqlalchemy.org
+ sandy squirrel@squirrelpower.org
+ patrick pat999@aol.com
+ squidward stentcl@sqlalchemy.org
+
+Grouping Selected Attributes with Bundles
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :class:`_orm.Bundle` construct is an extensible ORM-only construct that
+allows sets of column expressions to be grouped in result rows::
+
+ >>> from sqlalchemy.orm import Bundle
+ >>> stmt = select(
+ ... Bundle("user", User.name, User.fullname),
+ ... Bundle("email", Address.email_address)
+ ... ).join_from(User, Address)
+ {sql}>>> for row in session.execute(stmt):
+ ... print(f"{row.user.name} {row.email.email_address}")
+ SELECT user_account.name, user_account.fullname, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ [...] (){stop}
+ spongebob spongebob@sqlalchemy.org
+ sandy sandy@sqlalchemy.org
+ sandy squirrel@squirrelpower.org
+ patrick pat999@aol.com
+ squidward stentcl@sqlalchemy.org
+
+
+The :class:`_orm.Bundle` is potentially useful for creating lightweight
+views as well as custom column groupings such as mappings.
+
+.. seealso::
+
+ :ref:`bundles` - in the ORM loading documentation.
+
+
+Selecting ORM Aliases
+^^^^^^^^^^^^^^^^^^^^^
+
+As discussed in the tutorial at :ref:`tutorial_using_aliases`, to create a
+SQL alias of an ORM entity is achieved using the :func:`_orm.aliased`
+construct against a mapped class::
+
+ >>> from sqlalchemy.orm import aliased
+ >>> u1 = aliased(User)
+ >>> print(select(u1).order_by(u1.id))
+ {opensql}SELECT user_account_1.id, user_account_1.name, user_account_1.fullname
+ FROM user_account AS user_account_1 ORDER BY user_account_1.id
+
+As is the case when using :meth:`_schema.Table.alias`, the SQL alias
+is anonymously named. For the case of selecting the entity from a row
+with an explicit name, the :paramref:`_orm.aliased.name` parameter may be
+passed as well::
+
+ >>> from sqlalchemy.orm import aliased
+ >>> u1 = aliased(User, name="u1")
+ >>> stmt = select(u1).order_by(u1.id)
+ {sql}>>> row = session.execute(stmt).first()
+ SELECT u1.id, u1.name, u1.fullname
+ FROM user_account AS u1 ORDER BY u1.id
+ [...] (){stop}
+ >>> print(f"{row.u1.name}")
+ spongebob
+
+The :class:`_orm.aliased` construct is also central to making use of subqueries
+with the ORM; the section :ref:`orm_queryguide_subqueries` discusses this further.
+
+.. _orm_queryguide_selecting_text:
+
+Getting ORM Results from Textual and Core Statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ORM supports loading of entities from SELECT statements that come from other
+sources. The typical use case is that of a textual SELECT statement, which
+in SQLAlchemy is represented using the :func:`_sql.text` construct. The
+:func:`_sql.text` construct, once constructed, can be augmented with
+information
+about the ORM-mapped columns that the statement would load; this can then be
+associated with the ORM entity itself so that ORM objects can be loaded based
+on this statement.
+
+Given a textual SQL statement we'd like to load from::
+
+ >>> from sqlalchemy import text
+ >>> textual_sql = text("SELECT id, name, fullname FROM user_account ORDER BY id")
+
+We can add column information to the statement by using the
+:meth:`_sql.TextClause.columns` method; when this method is invoked, the
+:class:`_sql.TextClause` object is converted into a :class:`_sql.TextualSelect`
+object, which takes on a role that is comparable to the :class:`_sql.Select`
+construct. The :meth:`_sql.TextClause.columns` method
+is typically passed :class:`_schema.Column` objects or equivalent, and in this
+case we can make use of the ORM-mapped attributes on the ``User`` class
+directly::
+
+ >>> textual_sql = textual_sql.columns(User.id, User.name, User.fullname)
+
+We now have an ORM-configured SQL construct that as given, can load the "id",
+"name" and "fullname" columns separately. To use this SELECT statement as a
+source of complete ``User`` entities instead, we can link these columns to a
+regular ORM-enabled
+:class:`_sql.Select` construct using the :meth:`_sql.Select.from_statement`
+method::
+
+ >>> # using from_statement()
+ >>> orm_sql = select(User).from_statement(textual_sql)
+ >>> for user_obj in session.execute(orm_sql).scalars():
+ ... print(user_obj)
+ {opensql}SELECT id, name, fullname FROM user_account ORDER BY id
+ [...] (){stop}
+ User(id=1, name='spongebob', fullname='Spongebob Squarepants')
+ User(id=2, name='sandy', fullname='Sandy Cheeks')
+ User(id=3, name='patrick', fullname='Patrick Star')
+ User(id=4, name='squidward', fullname='Squidward Tentacles')
+ User(id=5, name='ehkrabs', fullname='Eugene H. Krabs')
+
+The same :class:`_sql.TextualSelect` object can also be converted into
+a subquery using the :meth:`_sql.TextualSelect.subquery` method,
+and linked to the ``User`` entity to it using the :func:`_orm.aliased`
+construct, in a similar manner as discussed below in :ref:`orm_queryguide_subqueries`::
+
+ >>> # using aliased() to select from a subquery
+ >>> orm_subquery = aliased(User, textual_sql.subquery())
+ >>> stmt = select(orm_subquery)
+ >>> for user_obj in session.execute(stmt).scalars():
+ ... print(user_obj)
+ {opensql}SELECT anon_1.id, anon_1.name, anon_1.fullname
+ FROM (SELECT id, name, fullname FROM user_account ORDER BY id) AS anon_1
+ [...] (){stop}
+ User(id=1, name='spongebob', fullname='Spongebob Squarepants')
+ User(id=2, name='sandy', fullname='Sandy Cheeks')
+ User(id=3, name='patrick', fullname='Patrick Star')
+ User(id=4, name='squidward', fullname='Squidward Tentacles')
+ User(id=5, name='ehkrabs', fullname='Eugene H. Krabs')
+
+The difference between using the :class:`_sql.TextualSelect` directly with
+:meth:`_sql.Select.from_statement` versus making use of :func:`_sql.aliased`
+is that in the former case, no subuqery is produced in the resulting SQL.
+This can in some scenarios be advantageous from a performance or complexity
+perspective.
+
+.. _orm_queryguide_joins:
+
+Joins
+-----
+
+The :meth:`_sql.Select.join` and :meth:`_sql.Select.join_from` methods
+are used to construct SQL JOINs against a SELECT statement.
+
+This section will detail ORM use cases for these methods. For a general
+overview of their use from a Core perspective, see :ref:`tutorial_select_join`
+in the :ref:`unified_tutorial`.
+
+The usage of :meth:`_sql.Select.join` in an ORM context for :term:`2.0 style`
+queries is mostly equivalent, minus legacy use cases, to the usage of the
+:meth:`_orm.Query.join` method in :term:`1.x style` queries.
+
+Simple Relationship Joins
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Consider a mapping between two classes ``User`` and ``Address``,
+with a relationship ``User.addresses`` representing a collection
+of ``Address`` objects associated with each ``User``. The most
+common usage of :meth:`_sql.Select.join`
+is to create a JOIN along this
+relationship, using the ``User.addresses`` attribute as an indicator
+for how this should occur::
+
+ >>> stmt = select(User).join(User.addresses)
+
+Where above, the call to :meth:`_sql.Select.join` along
+``User.addresses`` will result in SQL approximately equivalent to::
+
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+In the above example we refer to ``User.addresses`` as passed to
+:meth:`_sql.Select.join` as the "on clause", that is, it indicates
+how the "ON" portion of the JOIN should be constructed.
+
+Chaining Multiple Joins
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+To construct a chain of joins, multiple :meth:`_sql.Select.join` calls may be
+used. The relationship-bound attribute implies both the left and right side of
+the join at once. Consider additional entities ``Order`` and ``Item``, where
+the ``User.orders`` relationship refers to the ``Order`` entity, and the
+``Order.items`` relationship refers to the ``Item`` entity, via an association
+table ``order_items``. Two :meth:`_sql.Select.join` calls will result in
+a JOIN first from ``User`` to ``Order``, and a second from ``Order`` to
+``Item``. However, since ``Order.items`` is a :ref:`many to many <relationships_many_to_many>`
+relationship, it results in two separate JOIN elements, for a total of three
+JOIN elements in the resulting SQL::
+
+ >>> stmt = (
+ ... select(User).
+ ... join(User.orders).
+ ... join(Order.items)
+ ... )
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN user_order ON user_account.id = user_order.user_id
+ JOIN order_items AS order_items_1 ON user_order.id = order_items_1.order_id
+ JOIN item ON item.id = order_items_1.item_id
+
+.. tip::
+
+ as seen in the above example, **the order in which each call to the join()
+ method occurs is important**. Query would not, for example, know how to
+ join correctly if we were to specify ``User``, then ``Item``, then
+ ``Order``, in our chain of joins; in such a case, depending on the
+ arguments passed, it may raise an error that it doesn't know how to join,
+ or it may produce invalid SQL in which case the database will raise an
+ error. In correct practice, the :meth:`_sql.Select.join` method is invoked
+ in such a way that lines up with how we would want the JOIN clauses in SQL
+ to be rendered, and each call should represent a clear link from what
+ precedes it.
+
+Joins to a Target Entity or Selectable
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A second form of :meth:`_sql.Select.join` allows any mapped entity or core
+selectable construct as a target. In this usage, :meth:`_sql.Select.join`
+will attempt to **infer** the ON clause for the JOIN, using the natural foreign
+key relationship between two entities::
+
+ >>> stmt = select(User).join(Address)
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+In the above calling form, :meth:`_sql.Select.join` is called upon to infer
+the "on clause" automatically. This calling form will ultimately raise
+an error if either there are no :class:`_schema.ForeignKeyConstraint` setup
+between the two mapped :class:`_schema.Table` constructs, or if there are multiple
+:class:`_schema.ForeignKeyConstraint` linakges between them such that the
+appropriate constraint to use is ambiguous.
+
+.. note:: When making use of :meth:`_sql.Select.join` or :meth:`_sql.Select.join_from`
+ without indicating an ON clause, ORM
+ configured :func:`_orm.relationship` constructs are **not taken into account**.
+ Only the configured :class:`_schema.ForeignKeyConstraint` relationships between
+ the entities at the level of the mapped :class:`_schema.Table` objects are consulted
+ when an attempt is made to infer an ON clause for the JOIN.
+
+Joins to a Target with an ON Clause
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The third calling form allows both the target entity as well
+as the ON clause to be passed explicitly. A example that includes
+a SQL expression as the ON clause is as follows::
+
+ >>> stmt = select(User).join(Address, User.id==Address.user_id)
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+The expression-based ON clause may also be the relationship-bound
+attribute; this form in fact states the target of ``Address`` twice, however
+this is accepted::
+
+ >>> stmt = select(User).join(Address, User.addresses)
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+The above syntax has more functionality if we use it in terms of aliased
+entities. The default target for ``User.addresses`` is the ``Address``
+class, however if we pass aliased forms using :func:`_orm.aliased`, the
+:func:`_orm.aliased` form will be used as the target, as in the example
+below::
+
+ >>> a1 = aliased(Address)
+ >>> a2 = aliased(Address)
+ >>> stmt = (
+ ... select(User).
+ ... join(a1, User.addresses).
+ ... join(a2, User.addresses).
+ ... where(a1.email_address == 'ed@foo.com').
+ ... where(a2.email_address == 'ed@bar.com')
+ ... )
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN address AS address_1 ON user_account.id = address_1.user_id
+ JOIN address AS address_2 ON user_account.id = address_2.user_id
+ WHERE address_1.email_address = :email_address_1
+ AND address_2.email_address = :email_address_2
+
+When using relationship-bound attributes, the target entity can also be
+substituted with an aliased entity by using the
+:meth:`_orm.PropComparator.of_type` method. The same example using
+this method would be::
+
+ >>> stmt = (
+ ... select(User).
+ ... join(User.addresses.of_type(a1)).
+ ... join(User.addresses.of_type(a2)).
+ ... where(a1.email_address == 'ed@foo.com').
+ ... where(a2.email_address == 'ed@bar.com')
+ ... )
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN address AS address_1 ON user_account.id = address_1.user_id
+ JOIN address AS address_2 ON user_account.id = address_2.user_id
+ WHERE address_1.email_address = :email_address_1
+ AND address_2.email_address = :email_address_2
+
+.. _orm_queryguide_join_on_augmented:
+
+Augmenting Built-in ON Clauses
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As a substitute for providing a full custom ON condition for an
+existing relationship, the :meth:`_orm.PropComparator.and_` function
+may be applied to a relationship attribute to augment additional
+criteria into the ON clause; the additional criteria will be combined
+with the default criteria using AND. Below, the ON criteria between
+``user_account`` and ``address`` contains two separate elements joined
+by ``AND``, the first one being the natural join along the foreign key,
+and the second being a custom limiting criteria::
+
+ >>> stmt = (
+ ... select(User).
+ ... join(User.addresses.and_(Address.email_address != 'foo@bar.com'))
+ ... )
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN address ON user_account.id = address.user_id
+ AND address.email_address != :email_address_1
+
+.. seealso::
+
+ The :meth:`_orm.PropComparator.and_` method also works with loader
+ strategies. See the section :ref:`loader_option_criteria` for an example.
+
+.. _orm_queryguide_subqueries:
+
+Joining to Subqueries
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The target of a join may be any "selectable" entity which usefully includes
+subuqeries. When using the ORM, it is typical
+that these targets are stated in terms of an
+:func:`_orm.aliased` construct, but this is not strictly required particularly
+if the joined entity is not being returned in the results. For example, to join from the
+``User`` entity to the ``Address`` entity, where the ``Address`` entity
+is represented as a row limited subquery, we first construct a :class:`_sql.Subquery`
+object using :meth:`_sql.Select.subquery`, which may then be used as the
+target of the :meth:`_sql.Select.join` method::
+
+ >>> subq = (
+ ... select(Address).
+ ... where(Address.email_address == 'pat999@aol.com').
+ ... subquery()
+ ... )
+ >>> stmt = select(User).join(subq, User.id == subq.c.user_id)
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN (SELECT address.id AS id,
+ address.user_id AS user_id, address.email_address AS email_address
+ FROM address
+ WHERE address.email_address = :email_address_1) AS anon_1
+ ON user_account.id = anon_1.user_id{stop}
+
+The above SELECT statement when invoked via :meth:`_orm.Session.execute`
+will return rows that contain ``User`` entities, but not ``Address`` entities.
+In order to add ``Address`` entities to the set of entities that would be
+returned in result sets, we construct an :func:`_orm.aliased` object against
+the ``Address`` entity and the custom subquery. Note we also apply a name
+``"address"`` to the :func:`_orm.aliased` construct so that we may
+refer to it by name in the result row::
+
+
+ >>> address_subq = aliased(Address, subq, name="address")
+ >>> stmt = select(User, address_subq).join(address_subq)
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.User} {row.address}")
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname,
+ anon_1.id AS id_1, anon_1.user_id, anon_1.email_address
+ FROM user_account
+ JOIN (SELECT address.id AS id,
+ address.user_id AS user_id, address.email_address AS email_address
+ FROM address
+ WHERE address.email_address = ?) AS anon_1 ON user_account.id = anon_1.user_id
+ [...] ('pat999@aol.com',){stop}
+ User(id=3, name='patrick', fullname='Patrick Star') Address(id=4, email_address='pat999@aol.com')
+
+The same subquery may be referred towards by multiple entities as well,
+for a subquery that represents more than one entity. The subquery itself
+will remain unique within the statement, while the entities that are linked
+to it using :class:`_orm.aliased` refer to distinct sets of columns::
+
+ >>> user_address_subq = (
+ ... select(User.id, User.name, Address.id, Address.email_address).
+ ... join_from(User, Address).
+ ... where(Address.email_address.in_(['pat999@aol.com', 'squirrel@squirrelpower.org'])).
+ ... subquery()
+ ... )
+ >>> user_alias = aliased(User, user_address_subq, name="user")
+ >>> address_alias = aliased(Address, user_address_subq, name="address")
+ >>> stmt = select(user_alias, address_alias).where(user_alias.name == 'sandy')
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.user} {row.address}")
+ {opensql}SELECT anon_1.id, anon_1.name, anon_1.id_1, anon_1.email_address
+ FROM (SELECT user_account.id AS id, user_account.name AS name, address.id AS id_1, address.email_address AS email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ WHERE address.email_address IN (?, ?)) AS anon_1
+ WHERE anon_1.name = ?
+ [...] ('pat999@aol.com', 'squirrel@squirrelpower.org', 'sandy'){stop}
+ User(id=2, name='sandy', fullname='Sandy Cheeks') Address(id=3, email_address='squirrel@squirrelpower.org')
+
+
+
+Controlling what to Join From
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In cases where the left side of the current state of
+:class:`_sql.Select` is not in line with what we want to join from,
+the :meth:`_sql.Select.join_from` method may be used::
+
+ >>> stmt = select(Address).join_from(User, User.addresses).where(User.name == 'sandy')
+ >>> print(stmt)
+ SELECT address.id, address.user_id, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ WHERE user_account.name = :name_1
+
+The :meth:`_sql.Select.join_from` method accepts two or three arguments, either
+in the form ``<join from>, <onclause>``, or ``<join from>, <join to>,
+[<onclause>]``::
+
+ >>> stmt = select(Address).join_from(User, Address).where(User.name == 'sandy')
+ >>> print(stmt)
+ SELECT address.id, address.user_id, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ WHERE user_account.name = :name_1
+
+To set up the initial FROM clause for a SELECT such that :meth:`_sql.Select.join`
+can be used subsequent, the :meth:`_sql.Select.select_from` method may also
+be used::
+
+
+ >>> stmt = select(Address).select_from(User).join(User.addresses).where(User.name == 'sandy')
+ >>> print(stmt)
+ SELECT address.id, address.user_id, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+ WHERE user_account.name = :name_1
+
+
+
+
+Special Relationship Operators
+------------------------------
+
+As detailed in the :ref:`unified_tutorial` at
+:ref:`tutorial_select_relationships`, ORM attributes mapped by
+:func:`_orm.relationship` may be used in a variety of ways as SQL construction
+helpers. In addition to the above documentation on
+:ref:`orm_queryguide_joins`, relationships may produce criteria to be used in
+the WHERE clause as well. See the linked sections below.
+
+.. seealso::
+
+ Sections in the :ref:`tutorial_orm_related_objects` section of the
+ :ref:`unified_tutorial`:
+
+ * :ref:`tutorial_relationship_exists` - helpers to generate EXISTS clauses
+ using :func:`_orm.relationship`
+
+
+ * :ref:`tutorial_relationship_operators` - helpers to create comparisons in
+ terms of a :func:`_orm.relationship` in reference to a specific object
+ instance
+
+
+ORM Loader Options
+-------------------
+
+Loader options are objects that are passed to the :meth:`_sql.Select.options`
+method which affect the loading of both column and relationship-oriented
+attributes. The majority of loader options descend from the :class:`_orm.Load`
+hierarchy. For a complete overview of using loader options, see the linked
+sections below.
+
+.. seealso::
+
+ * :ref:`loading_columns` - details mapper and loading options that affect
+ how column and SQL-expression mapped attributes are loaded
+
+ * :ref:`loading_toplevel` - details relationship and loading options that
+ affect how :func:`_orm.relationship` mapped attributes are loaded
+
+
+ORM Execution Options
+---------------------
+
+Execution options are keyword arguments that are passed to an
+"execution_options" method, which take place at the level of statement
+execution. The primary "execution option" method is in Core at
+:meth:`_engine.Connection.execution_options`. In the ORM, execution options may
+also be passed to :meth:`_orm.Session.execute` using the
+:paramref:`_orm.Session.execute.execution_options` parameter. Perhaps more
+succinctly, most execution options, including those specific to the ORM, can be
+assigned to a statement directly, using the
+:meth:`_sql.Executable.execution_options` method, so that the options may be
+associated directly with the statement instead of being configured separately.
+The examples below will use this form.
+
+.. _orm_queryguide_populate_existing:
+
+Populate Existing
+^^^^^^^^^^^^^^^^^^
+
+The ``populate_existing`` execution option ensures that for all rows
+loaded, the corresponding instances in the :class:`_orm.Session` will
+be fully refreshed, erasing any existing data within the objects
+(including pending changes) and replacing with the data loaded from the
+result.
+
+Example use looks like::
+
+ >>> stmt = select(User).execution_options(populate_existing=True)
+ {sql}>>> result = session.execute(stmt)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ ...
+
+Normally, ORM objects are only loaded once, and if they are matched up
+to the primary key in a subsequent result row, the row is not applied to the
+object. This is both to preserve pending, unflushed changes on the object
+as well as to avoid the overhead and complexity of refreshing data which
+is already there. The :class:`_orm.Session` assumes a default working
+model of a highly isolated transaction, and to the degree that data is
+expected to change within the transaction outside of the local changes being
+made, those use cases would be handled using explicit steps such as this method.
+
+Another use case for ``populate_existing`` is in support of various
+attribute loading features that can change how an attribute is loaded on
+a per-query basis. Options for which this apply include:
+
+* The :func:`_orm.with_expression` option
+
+* The :meth:`_orm.PropComparator.and_` method that can modify what a loader
+ strategy loads
+
+* The :func:`_orm.contains_eager` option
+
+* The :func:`_orm.with_loader_criteria` option
+
+The ``populate_existing`` execution option is equvialent to the
+:meth:`_orm.Query.populate_existing` method in :term:`1.x style` ORM queries.
+
+.. seealso::
+
+ :ref:`faq_session_identity` - in :doc:`/faq/index`
+
+ :ref:`session_expire` - in the ORM :class:`_orm.Session`
+ documentation
+
+.. _orm_queryguide_autoflush:
+
+Autoflush
+^^^^^^^^^^
+
+This option when passed as ``False`` will cause the :class:`_orm.Session`
+to not invoke the "autoflush" step. It's equivalent to using the
+:attr:`_orm.Session.no_autoflush` context manager to disable autoflush::
+
+ >>> stmt = select(User).execution_options(autoflush=False)
+ {sql}>>> session.execute(stmt)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ ...
+
+This option will also work on ORM-enabled :class:`_sql.Update` and
+:class:`_sql.Delete` queries.
+
+The ``autoflush`` execution option is equvialent to the
+:meth:`_orm.Query.autoflush` method in :term:`1.x style` ORM queries.
+
+.. seealso::
+
+ :ref:`session_flushing`
+
+.. _orm_queryguide_yield_per:
+
+Yield Per
+^^^^^^^^^^
+
+The ``yield_per`` execution option is an integer value which will cause the
+:class:`_engine.Result` to yield only a fixed count of rows at a time. It is
+often useful to use with a result partitioning method such as
+:meth:`_engine.Result.partitions`, e.g.::
+
+ >>> stmt = select(User).execution_options(yield_per=10)
+ {sql}>>> for partition in session.execute(stmt).partitions(10):
+ ... for row in partition:
+ ... print(row)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ [...] (){stop}
+ (User(id=1, name='spongebob', fullname='Spongebob Squarepants'),)
+ ...
+
+The purpose of this method is when fetching very large result sets
+(> 10K rows), to batch results in sub-collections and yield them
+out partially, so that the Python interpreter doesn't need to declare
+very large areas of memory which is both time consuming and leads
+to excessive memory use. The performance from fetching hundreds of
+thousands of rows can often double when a suitable yield-per setting
+(e.g. approximately 1000) is used, even with DBAPIs that buffer
+rows (which are most).
+
+When ``yield_per`` is used, the
+:paramref:`_engine.Connection.execution_options.stream_results` option is also
+set for the Core execution, so that a streaming / server side cursor will be
+used if the backend supports it (currently known are
+:mod:`~sqlalchemy.dialects.postgresql.psycopg2`,
+:mod:`~sqlalchemy.dialects.mysql.mysqldb` and
+:mod:`~sqlalchemy.dialects.mysql.pymysql`. Other backends will pre buffer all
+rows. The memory use of raw database rows is much less than that of an
+ORM-mapped object, but should still be taken into consideration when
+benchmarking.
+
+
+The ``yield_per`` execution option **is not compatible subqueryload eager
+loading or joinedload eager loading when using collections**. It is
+potentially compatible with "select in" eager loading, **provided the database
+driver supports multiple, independent cursors** (pysqlite and psycopg2 are
+known to work, MySQL and SQL Server ODBC drivers do not).
+
+The ``yield_per`` execution option is equvialent to the
+:meth:`_orm.Query.yield_per` method in :term:`1.x style` ORM queries.
+
+.. seealso::
+
+ :ref:`engine_stream_results`
+
+
+
+
+ORM Update / Delete with Arbitrary WHERE clause
+================================================
+
+The :meth:`_orm.Session.execute` method, in addition to handling ORM-enabled
+:class:`_sql.Select` objects, can also accommodate ORM-enabled
+:class:`_sql.Update` and :class:`_sql.Delete` objects, which UPDATE or DELETE
+any number of database rows while also being able to synchronize the state of
+matching objects locally present in the :class:`_orm.Session`. See the section
+:ref:`orm_expression_update_delete` for background on this feature.
+
+
+
+
diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst
index 79af8f27f..8cec8a18e 100644
--- a/doc/build/orm/session_basics.rst
+++ b/doc/build/orm/session_basics.rst
@@ -492,6 +492,7 @@ ways to refresh its contents with new data from the current transaction:
session.expire(u1)
u1.some_attribute # <-- lazy loads from the transaction
+
..
* **the refresh() method** - closely related is the :meth:`_orm.Session.refresh`
@@ -539,7 +540,7 @@ time refresh locally present objects which match those rows.
To emit an ORM-enabled UPDATE in :term:`1.x style`, the :meth:`_query.Query.update` method
may be used::
- session.query(User).filter(User.nane == "squidward").\
+ session.query(User).filter(User.name == "squidward").\
update({"name": "spongebob"}, synchronize_session="fetch")
Above, an UPDATE will be emitted against all rows that match the name
@@ -551,10 +552,7 @@ objects locally present in memory will be updated in memory based on these
primary key identities.
For ORM-enabled UPDATEs in :term:`2.0 style`, :meth:`_orm.Session.execute` is used with the
-Core :class:`_sql.Update` construct. The :meth:`_orm.Session` must
-be configured with :paramref:`_orm.Session.future` set to ``True``::
-
- session = Session(future=True)
+Core :class:`_sql.Update` construct::
from sqlalchemy import update
@@ -575,10 +573,8 @@ ORM-enabled delete, :term:`1.x style`::
session.query(User).filter(User.nane == "squidward").\
delete(synchronize_session="fetch")
-ORM-enabled delete, :term:`2.0 style`. The :meth:`_orm.Session` must
-be configured with :paramref:`_orm.Session.future` set to ``True``::
+ORM-enabled delete, :term:`2.0 style`::
- session = Session(future=True)
from sqlalchemy import delete
stmt = delete(User).where(User.nane == "squidward").execution_options(synchronize_session="fetch")
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 2025268c9..ae62d256a 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -4,6 +4,35 @@
Object Relational Tutorial (1.x API)
====================================
+.. admonition:: About this document
+
+ This tutorial covers the well known SQLAlchemy ORM API
+ that has been in use for many years. As of SQLAlchemy 1.4, there are two
+ distinct styles of ORM use known as :term:`1.x style` and :term:`2.0
+ style`, the latter of which makes a wide range of changes most prominently
+ around how ORM queries are constructed and executed.
+
+ The plan is that in SQLAlchemy 2.0, the 1.x style of ORM use will be
+ considered legacy and no longer featured in documentation and many
+ aspects of it will be removed. However, the most central element of
+ :term:`1.x style` ORM use, the :class:`_orm.Query` object, will still
+ remain available for long-term legacy use cases.
+
+ This tutorial is applicable to users who want to learn how SQLAlchemy has
+ been used for many years, particularly those users working with existing
+ applications or related learning material that is in 1.x style.
+
+ For an introduction to SQLAlchemy from the new 1.4/2.0 perspective,
+ see :ref:`unified_tutorial`.
+
+ .. seealso::
+
+ :ref:`change_5159`
+
+ :ref:`migration_20_toplevel`
+
+ :ref:`unified_tutorial`
+
The SQLAlchemy Object Relational Mapper presents a method of associating
user-defined Python classes with database tables, and instances of those
classes (objects) with rows in their corresponding tables. It includes a
@@ -168,7 +197,7 @@ this information for a specific table is called the :class:`_schema.Table` objec
one for us. We can see this object by inspecting the ``__table__`` attribute::
>>> User.__table__ # doctest: +NORMALIZE_WHITESPACE
- Table('users', MetaData(bind=None),
+ Table('users', MetaData(),
Column('id', Integer(), table=<users>, primary_key=True, nullable=False),
Column('name', String(), table=<users>),
Column('fullname', String(), table=<users>),
@@ -217,8 +246,6 @@ the actual ``CREATE TABLE`` statement:
nickname VARCHAR,
PRIMARY KEY (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
COMMIT
@@ -1223,8 +1250,6 @@ already been created:
PRIMARY KEY (id),
FOREIGN KEY(user_id) REFERENCES users (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
COMMIT
@@ -2089,10 +2114,7 @@ Create new tables:
PRIMARY KEY (id),
UNIQUE (keyword)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
- <BLANKLINE>
CREATE TABLE posts (
id INTEGER NOT NULL,
user_id INTEGER,
@@ -2101,10 +2123,7 @@ Create new tables:
PRIMARY KEY (id),
FOREIGN KEY(user_id) REFERENCES users (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
- <BLANKLINE>
CREATE TABLE post_keywords (
post_id INTEGER NOT NULL,
keyword_id INTEGER NOT NULL,
@@ -2112,8 +2131,6 @@ Create new tables:
FOREIGN KEY(post_id) REFERENCES posts (id),
FOREIGN KEY(keyword_id) REFERENCES keywords (id)
)
- <BLANKLINE>
- <BLANKLINE>
[...] ()
COMMIT
diff --git a/doc/build/queryguide.rst b/doc/build/queryguide.rst
deleted file mode 100644
index 0fabdcb9f..000000000
--- a/doc/build/queryguide.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-===========================
-SQLAlchemy Querying Guide
-===========================
-
-
-Coming Soon!
-
diff --git a/doc/build/tutorial.rst b/doc/build/tutorial.rst
deleted file mode 100644
index 5db372063..000000000
--- a/doc/build/tutorial.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-=============================
-SQLAlchemy 1.4 / 2.0 Tutorial
-=============================
-
-
-Coming soon!
diff --git a/doc/build/tutorial/data.rst b/doc/build/tutorial/data.rst
new file mode 100644
index 000000000..6238e5e1f
--- /dev/null
+++ b/doc/build/tutorial/data.rst
@@ -0,0 +1,1509 @@
+.. highlight:: pycon+sql
+
+.. |prev| replace:: :doc:`metadata`
+.. |next| replace:: :doc:`orm_data_manipulation`
+
+.. include:: tutorial_nav_include.rst
+
+.. _tutorial_working_with_data:
+
+Working with Data
+==================
+
+In :ref:`tutorial_working_with_transactions`, we learned the basics of how to
+interact with the Python DBAPI and its transactional state. Then, in
+:ref:`tutorial_working_with_metadata`, we learned how to represent database
+tables, columns, and constraints within SQLAlchemy using the
+:class:`_schema.MetaData` and related objects. In this section we will combine
+both concepts above to create, select and manipulate data within a relational
+database. Our interaction with the database is **always** in terms
+of a transaction, even if we've set our database driver to use :ref:`autocommit
+<dbapi_autocommit>` behind the scenes.
+
+The components of this section are as follows:
+
+* :ref:`tutorial_core_insert` - to get some data into the database, we introduce
+ and demonstrate the Core :class:`_sql.Insert` construct. INSERTs from an
+ ORM perspective are described later, at :ref:`tutorial_orm_data_manipulation`.
+
+* :ref:`tutorial_selecting_data` - this section will describe in detail
+ the :class:`_sql.Select` construct, which is the most commonly used object
+ in SQLAlchemy. The :class:`_sql.Select` construct emits SELECT statements
+ for both Core and ORM centric applications and both use cases will be
+ described here. Additional ORM use cases are also noted in he later
+ section :ref:`tutorial_select_relationships` as well as the
+ :ref:`queryguide_toplevel`.
+
+* :ref:`tutorial_core_update_delete` - Rounding out the INSERT and SELECtion
+ of data, this section will describe from a Core perspective the use of the
+ :class:`_sql.Update` and :class:`_sql.Delete` constructs. ORM-specific
+ UPDATE and DELETE is similarly described in the
+ :ref:`tutorial_orm_data_manipulation` section.
+
+.. rst-class:: core-header
+
+.. _tutorial_core_insert:
+
+Core Insert
+-----------
+
+When using Core, a SQL INSERT statement is generated using the
+:func:`_sql.insert` function - this function generates a new instance of
+:class:`_sql.Insert` which represents an INSERT statement in SQL, that adds
+new data into a table.
+
+.. container:: orm-header
+
+ **ORM Readers** - The way that rows are INSERTed into the database from an ORM
+ perspective makes use of object-centric APIs on the :class:`_orm.Session` object known as the
+ :term:`unit of work` process,
+ and is fairly different from the Core-only approach described here.
+ The more ORM-focused sections later starting at :ref:`tutorial_inserting_orm`
+ subsequent to the Expression Language sections introduce this.
+
+The insert() SQL Expression Construct
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A simple example of :class:`_sql.Insert` illustrates the target table
+and the VALUES clause at once::
+
+ >>> from sqlalchemy import insert
+ >>> stmt = insert(user_table).values(name='spongebob', fullname="Spongebob Squarepants")
+
+The above ``stmt`` variable is an instance of :class:`_sql.Insert`. Most
+SQL expressions can be stringified in place as a means to see the general
+form of what's being produced::
+
+ >>> print(stmt)
+ {opensql}INSERT INTO user_account (name, fullname) VALUES (:name, :fullname)
+
+The stringified form is created by producing a :class:`_engine.Compiled` form
+of the object which includes a database-specific string SQL representation of
+the statement; we can acquire this object directly using the
+:meth:`_sql.ClauseElement.compile` method::
+
+ >>> compiled = stmt.compile()
+
+Our :class:`_sql.Insert` construct is an example of a "parameterized"
+construct, illustrated previously at :ref:`tutorial_sending_parameters`; to
+view the ``name`` and ``fullname`` :term:`bound parameters`, these are
+available from the :class:`_engine.Compiled` construct as well::
+
+ >>> compiled.params
+ {'name': 'spongebob', 'fullname': 'Spongebob Squarepants'}
+
+Executing the Statement
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Invoking the statement we can INSERT a row into ``user_table``.
+The INSERT SQL as well as the bundled parameters can be seen in the
+SQL logging:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(stmt)
+ ... conn.commit()
+ {opensql}BEGIN (implicit)
+ INSERT INTO user_account (name, fullname) VALUES (?, ?)
+ [...] ('spongebob', 'Spongebob Squarepants')
+ COMMIT
+
+In its simple form above, the INSERT statement does not return any rows, and if
+only a single row is inserted, it will usually include the ability to return
+information about column-level default values that were generated during the
+INSERT of that row, most commonly an integer primary key value. In the above
+case the first row in a SQLite database will normally return ``1`` for the
+first integer primary key value, which we can acquire using the
+:attr:`_engine.CursorResult.inserted_primary_key` accessor:
+
+.. sourcecode:: pycon+sql
+
+ >>> result.inserted_primary_key
+ (1,)
+
+.. tip:: :attr:`_engine.CursorResult.inserted_primary_key` returns a tuple
+ because a primary key may contain multiple columns. This is known as
+ a :term:`composite primary key`. The :attr:`_engine.CursorResult.inserted_primary_key`
+ is intended to always contain the complete primary key of the record just
+ inserted, not just a "cursor.lastrowid" kind of value, and is also intended
+ to be populated regardless of whether or not "autoincrement" were used, hence
+ to express a complete primary key it's a tuple.
+
+.. _tutorial_core_insert_values_clause:
+
+INSERT usually generates the "values" clause automatically
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The example above made use of the :meth:`_sql.Insert.values` method to
+explicitly create the VALUES clause of the SQL INSERT statement. This method
+in fact has some variants that allow for special forms such as multiple rows in
+one statement and insertion of SQL expressions. However the usual way that
+:class:`_sql.Insert` is used is such that the VALUES clause is generated
+automatically from the parameters passed to the
+:meth:`_future.Connection.execute` method; below we INSERT two more rows to
+illustrate this:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... insert(user_table),
+ ... [
+ ... {"name": "sandy", "fullname": "Sandy Cheeks"},
+ ... {"name": "patrick", "fullname": "Patrick Star"}
+ ... ]
+ ... )
+ ... conn.commit()
+ {opensql}BEGIN (implicit)
+ INSERT INTO user_account (name, fullname) VALUES (?, ?)
+ [...] (('sandy', 'Sandy Cheeks'), ('patrick', 'Patrick Star'))
+ COMMIT{stop}
+
+The execution above features "executemany" form first illustrated at
+:ref:`tutorial_multiple_parameters`, however unlike when using the
+:func:`_sql.text` construct, we didn't have to spell out any SQL.
+By passing a dictionary or list of dictionaries to the :meth:`_future.Connection.execute`
+method in conjunction with the :class:`_sql.Insert` construct, the
+:class:`_future.Connection` ensures that the column names which are passed
+will be expressed in the VALUES clause of the :class:`_sql.Insert`
+construct automatically.
+
+.. deepalchemy::
+
+ Hi, welcome to the first edition of **Deep Alchemy**. The person on the
+ left is known as **The Alchemist**, and you'll note they are **not** a wizard,
+ as the pointy hat is not sticking upwards. The Alchemist comes around to
+ describe things that are generally **more advanced and/or tricky** and
+ additionally **not usually needed**, but for whatever reason they feel you
+ should know about this thing that SQLAlchemy can do.
+
+ In this edition, towards the goal of having some interesting data in the
+ ``address_table`` as well, below is a more advanced example illustrating
+ how the :meth:`_sql.Insert.values` method may be used explicitly while at
+ the same time including for additional VALUES generated from the
+ parameters. A :term:`scalar subquery` is constructed, making use of the
+ :func:`_sql.select` construct introduced in the next section, and the
+ parameters used in the subquery are set up using an explicit bound
+ parameter name, established using the :func:`_sql.bindparam` construct.
+
+ This is some slightly **deeper** alchemy just so that we can add related
+ rows without fetching the primary key identifiers from the ``user_table``
+ operation into the application. Most Alchemists will simply use the ORM
+ which takes care of things like this for us.
+
+ .. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import select, bindparam
+ >>> scalar_subquery = (
+ ... select(user_table.c.id).
+ ... where(user_table.c.name==bindparam('username')).
+ ... scalar_subquery()
+ ... )
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... insert(address_table).values(user_id=scalar_subquery),
+ ... [
+ ... {"username": 'spongebob', "email_address": "spongebob@sqlalchemy.org"},
+ ... {"username": 'sandy', "email_address": "sandy@sqlalchemy.org"},
+ ... {"username": 'sandy', "email_address": "sandy@squirrelpower.org"},
+ ... ]
+ ... )
+ ... conn.commit()
+ {opensql}BEGIN (implicit)
+ INSERT INTO address (user_id, email_address) VALUES ((SELECT user_account.id
+ FROM user_account
+ WHERE user_account.name = ?), ?)
+ [...] (('spongebob', 'spongebob@sqlalchemy.org'), ('sandy', 'sandy@sqlalchemy.org'),
+ ('sandy', 'sandy@squirrelpower.org'))
+ COMMIT{stop}
+
+Other INSERT Options
+^^^^^^^^^^^^^^^^^^^^^
+
+A quick overview of some other patterns that are available with :func:`_sql.insert`:
+
+* **INSERT..FROM SELECT** - the :class:`_sql.Insert` construct can compose
+ an INSERT that gets rows directly from a SELECT using the :meth:`_sql.Insert.from_select`
+ method::
+
+ >>> select_stmt = select(user_table.c.id, user_table.c.name + "@aol.com")
+ >>> insert_stmt = insert(address_table).from_select(
+ ... ["user_id", "email_address"], select_stmt
+ ... )
+ >>> print(insert_stmt)
+ {opensql}INSERT INTO address (user_id, email_address)
+ SELECT user_account.id, user_account.name || :name_1 AS anon_1
+ FROM user_account
+
+ ..
+
+* **RETURNING clause** - the RETURNING clause for supported backends is used
+ automatically in order to retrieve the last inserted primary key value
+ as well as the values for server defaults. However the RETURNING clause
+ may also be specified explicitly using the :meth:`_sql.Insert.returning`
+ method; in this case, the :class:`_engine.Result`
+ object that's returned when the statement is executed has rows which
+ can be fetched. It is only supported for single-statement
+ forms, and for some backends may only support single-row INSERT statements
+ overall. It can also be combined with :meth:`_sql.Insert.from_select`,
+ as in the example below that builds upon the previous example::
+
+ >>> print(insert_stmt.returning(address_table.c.id, address_table.c.email_address))
+ {opensql}INSERT INTO address (user_id, email_address)
+ SELECT user_account.id, user_account.name || :name_1 AS anon_1
+ FROM user_account RETURNING address.id, address.email_address
+
+ ..
+
+.. seealso::
+
+ :class:`_sql.Insert` - in the SQL Expression API documentation
+
+
+.. _tutorial_selecting_data:
+
+.. rst-class:: core-header, orm-dependency
+
+Selecting Data
+--------------
+
+For both Core and ORM, the :func:`_sql.select` function generates a
+:class:`_sql.Select` construct which is used for all SELECT queries.
+Passed to methods like :meth:`_future.Connection.execute` in Core and
+:meth:`_orm.Session.execute` in ORM, a SELECT statement is emitted in the
+current transaction and the result rows available via the returned
+:class:`_engine.Result` object.
+
+.. container:: orm-header
+
+ **ORM Readers** - the content here applies equally well to both Core and ORM
+ use and basic ORM variant use cases are mentioned here. However there are
+ a lot more ORM-specific features available as well; these are documented
+ at :ref:`queryguide_toplevel`.
+
+
+The select() SQL Expression Construct
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :func:`_sql.select` construct builds up a statement in the same way
+as that of :func:`_sql.insert`, using a :term:`generative` approach where
+each method builds more state onto the object. Like the other SQL constructs,
+it can be stringified in place::
+
+ >>> from sqlalchemy import select
+ >>> stmt = select(user_table).where(user_table.c.name == 'spongebob')
+ >>> print(stmt)
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = :name_1
+
+Also in the same manner as all other statement-level SQL constructs, to
+actually run the statement we pass it to an execution method.
+Since a SELECT statement returns
+rows we can always iterate the result object to get :class:`_engine.Row`
+objects back:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... for row in conn.execute(stmt):
+ ... print(row)
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('spongebob',){stop}
+ (1, 'spongebob', 'Spongebob Squarepants')
+ {opensql}ROLLBACK{stop}
+
+When using the ORM, particularly with a :func:`_sql.select` construct that's
+composed against ORM entities, we will want to execute it using the
+:meth:`_orm.Session.execute` method on the :class:`_orm.Session`; using
+this approach, we continue to get :class:`_engine.Row` objects from the
+result, however these rows are now capable of including
+complete entities, such as instances of the ``User`` class, as column values:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = select(User).where(User.name == 'spongebob')
+ >>> with Session(engine) as session:
+ ... for row in session.execute(stmt):
+ ... print(row)
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('spongebob',){stop}
+ (User(id=1, name='spongebob', fullname='Spongebob Squarepants'),)
+ {opensql}ROLLBACK{stop}
+
+The following sections will discuss the SELECT construct in more detail.
+
+
+Setting the COLUMNS and FROM clause
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :func:`_sql.select` function accepts positional elements representing any
+number of :class:`_schema.Column` and/or :class:`_schema.Table` expressions, as
+well as a wide range of compatible objects, which are resolved into a list of SQL
+expressions to be SELECTed from that will be returned as columns in the result
+set. These elements also serve in simpler cases to create the FROM clause,
+which is inferred from the columns and table-like expressions passed::
+
+ >>> print(select(user_table))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+
+To SELECT from individual columns using a Core approach,
+:class:`_schema.Column` objects are accessed from the :attr:`_schema.Table.c`
+accessor and can be sent directly; the FROM clause will be inferred as the set
+of all :class:`_schema.Table` and other :class:`_sql.FromClause` objects that
+are represented by those columns::
+
+ >>> print(select(user_table.c.name, user_table.c.fullname))
+ {opensql}SELECT user_account.name, user_account.fullname
+ FROM user_account
+
+.. _tutorial_selecting_orm_entities:
+
+Selecting ORM Entities and Columns
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ORM entities, such our ``User`` class as well as the column-mapped
+attributes upon it such as ``User.name``, also participate in the SQL Expression
+Language system representing tables and columns. Below illustrates an
+example of SELECTing from the ``User`` entity, which ultimately renders
+in the same way as if we had used ``user_table`` directly::
+
+ >>> print(select(User))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+
+To select from individual columns using ORM entities, the class-bound
+attributes can be passed directly which are resolved into the
+:class:`_schema.Column` or other SQL expression represented by each attribute::
+
+ >>> print(select(User.name, User.fullname))
+ {opensql}SELECT user_account.name, user_account.fullname
+ FROM user_account
+
+.. tip::
+
+ When ORM-related objects are used within the :class:`_sql.Select`
+ construct, they are resolved into the underlying :class:`_schema.Table` and
+ :class:`_schema.Column` and similar Core constructs they represent; at the
+ same time, they apply a **plugin** to the core :class:`_sql.Select`
+ construct such that a new set of ORM-specific behaviors make take
+ effect when the construct is being compiled.
+
+.. seealso::
+
+ :ref:`orm_queryguide_select_columns` - in the :ref:`queryguide_toplevel`
+
+Selecting from Labeled SQL Expressions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :meth:`_sql.ColumnElement.label` method as well as the same-named method
+available on ORM attributes provides a SQL label of a column or expression,
+allowing it to have a specific name in a result set. This can be helpful
+when referring to arbitrary SQL expressions in a result row by name:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import func, cast
+ >>> stmt = (
+ ... select(
+ ... ("Username: " + user_table.c.name).label("username"),
+ ... ).order_by(user_table.c.name)
+ ... )
+ >>> with engine.connect() as conn:
+ ... for row in conn.execute(stmt):
+ ... print(f"{row.username}")
+ {opensql}BEGIN (implicit)
+ SELECT ? || user_account.name AS username
+ FROM user_account ORDER BY user_account.name
+ [...] ('Username: ',){stop}
+ Username: patrick
+ Username: sandy
+ Username: spongebob
+ {opensql}ROLLBACK{stop}
+
+.. _tutorial_select_where_clause:
+
+The WHERE clause
+^^^^^^^^^^^^^^^^
+
+SQLAlchemy allows us to compose SQL expressions, such as ``name = 'squidward'``
+or ``user_id > 10``, by making use of standard Python operators in
+conjunction with
+:class:`_schema.Column` and similar objects. For boolean expressions, most
+Python operators such as ``==``, ``!=``, ``<``, ``>=`` etc. generate new
+SQL Expression objects, rather than plain boolean True/False values::
+
+ >>> print(user_table.c.name == 'squidward')
+ user_account.name = :name_1
+
+ >>> print(address_table.c.user_id > 10)
+ address.user_id > :user_id_1
+
+
+We can use expressions like these to generate the WHERE clause by passing
+the resulting objects to the :meth:`_sql.Select.where` method::
+
+ >>> print(select(user_table).where(user_table.c.name == 'squidward'))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = :name_1
+
+
+To produce multiple expressions joined by AND, the :meth:`_sql.Select.where`
+method may be invoked any number of times::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... where(user_table.c.name == 'squidward').
+ ... where(address_table.c.user_id == user_table.c.id)
+ ... )
+ {opensql}SELECT address.email_address
+ FROM address, user_account
+ WHERE user_account.name = :name_1 AND address.user_id = user_account.id
+
+A single call to :meth:`_sql.Select.where` also accepts multiple expressions
+with the same effect::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... where(
+ ... user_table.c.name == 'squidward',
+ ... address_table.c.user_id == user_table.c.id
+ ... )
+ ... )
+ {opensql}SELECT address.email_address
+ FROM address, user_account
+ WHERE user_account.name = :name_1 AND address.user_id = user_account.id
+
+"AND" and "OR" conjunctions are both available directly using the
+:func:`_sql.and_` and :func:`_sql.or_` functions, illustrated below in terms
+of ORM entities::
+
+ >>> from sqlalchemy import and_, or_
+ >>> print(
+ ... select(Address.email_address).
+ ... where(
+ ... and_(
+ ... or_(User.name == 'squidward', User.name == 'sandy'),
+ ... Address.user_id == User.id
+ ... )
+ ... )
+ ... )
+ {opensql}SELECT address.email_address
+ FROM address, user_account
+ WHERE (user_account.name = :name_1 OR user_account.name = :name_2)
+ AND address.user_id = user_account.id
+
+For simple "equality" comparisons against a single entity, there's also a
+popular method known as :meth:`_sql.Select.filter_by` which accepts keyword
+arguments that match to column keys or ORM attribute names. It will filter
+against the leftmost FROM clause or the last entity joined::
+
+ >>> print(
+ ... select(User).filter_by(name='spongebob', fullname='Spongebob Squarepants')
+ ... )
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = :name_1 AND user_account.fullname = :fullname_1
+
+
+.. seealso::
+
+
+ :doc:`/core/operators` - descriptions of most SQL operator functions in SQLAlchemy
+
+
+.. _tutorial_select_join:
+
+Explicit FROM clauses and JOINs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As mentioned previously, the FROM clause is usually **inferred**
+based on the expressions that we are setting in the columns
+clause as well as other elements of the :class:`_sql.Select`.
+
+If we set a single column from a particular :class:`_schema.Table`
+in the COLUMNS clause, it puts that :class:`_schema.Table` in the FROM
+clause as well::
+
+ >>> print(select(user_table.c.name))
+ {opensql}SELECT user_account.name
+ FROM user_account
+
+If we were to put columns from two tables, then we get a comma-separated FROM
+clause::
+
+ >>> print(select(user_table.c.name, address_table.c.email_address))
+ {opensql}SELECT user_account.name, address.email_address
+ FROM user_account, address
+
+In order to JOIN these two tables together, two methods that are
+most straightforward are :meth:`_sql.Select.join_from`, which
+allows us to indicate the left and right side of the JOIN explicitly::
+
+ >>> print(
+ ... select(user_table.c.name, address_table.c.email_address).
+ ... join_from(user_table, address_table)
+ ... )
+ {opensql}SELECT user_account.name, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+
+the other is the :meth:`_sql.Select.join` method, which indicates only the
+right side of the JOIN, the left hand-side is inferred::
+
+ >>> print(
+ ... select(user_table.c.name, address_table.c.email_address).
+ ... join(address_table)
+ ... )
+ {opensql}SELECT user_account.name, address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+.. sidebar:: The ON Clause is inferred
+
+ When using :meth:`_sql.Select.join_from` or :meth:`_sql.Select.join`, we may
+ observe that the ON clause of the join is also inferred for us in simple cases.
+ More on that in the next section.
+
+We also have the option add elements to the FROM clause explicitly, if it is not
+inferred the way we want from the columns clause. We use the
+:meth:`_sql.Select.select_from` method to achieve this, as below
+where we establish ``user_table`` as the first element in the FROM
+clause and :meth:`_sql.Select.join` to establish ``address_table`` as
+the second::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... select_from(user_table).join(address_table)
+ ... )
+ {opensql}SELECT address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+Another example where we might want to use :meth:`_sql.Select.select_from`
+is if our columns clause doesn't have enough information to provide for a
+FROM clause. For example, to SELECT from the common SQL expression
+``count(*)``, we use a SQLAlchemy element known as :attr:`_sql.func` to
+produce the SQL ``count()`` function::
+
+ >>> from sqlalchemy import func
+ >>> print (
+ ... select(func.count('*')).select_from(user_table)
+ ... )
+ {opensql}SELECT count(:count_2) AS count_1
+ FROM user_account
+
+.. _tutorial_select_join_onclause:
+
+Setting the ON Clause
+~~~~~~~~~~~~~~~~~~~~~
+
+The previous examples on JOIN illustrated that the :class:`_sql.Select` construct
+can join between two tables and produce the ON clause automatically. This
+occurs in those examples because the ``user_table`` and ``address_table``
+:class:`_sql.Table` objects include a single :class:`_schema.ForeignKeyConstraint`
+definition which is used to form this ON clause.
+
+If the left and right targets of the join do not have such a constraint, or
+there are multiple constraints in place, we need to specify the ON clause
+directly. Both :meth:`_sql.Select.join` and :meth:`_sql.Select.join_from`
+accept an additional argument for the ON clause, which is stated using the
+same SQL Expression mechanics as we saw about in :ref:`tutorial_select_where_clause`::
+
+ >>> print(
+ ... select(address_table.c.email_address).
+ ... select_from(user_table).
+ ... join(address_table, user_table.c.id == address_table.c.user_id)
+ ... )
+ {opensql}SELECT address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+.. container:: orm-header
+
+ **ORM Tip** - there's another way to generate the ON clause when using
+ ORM entities as well, when using the :func:`_orm.relationship` construct
+ that can be seen in the mapping set up at :ref:`tutorial_declaring_mapped_classes`.
+ This is a whole subject onto itself, which is introduced more fully
+ at :ref:`tutorial_joining_relationships`.
+
+OUTER and FULL join
+~~~~~~~~~~~~~~~~~~~
+
+Both the :meth:`_sql.Select.join` and :meth:`_sql.Select.join_from` methods
+accept keyword arguments :paramref:`_sql.Select.join.isouter` and
+:paramref:`_sql.Select.join.full` which will render LEFT OUTER JOIN
+and FULL OUTER JOIN, respectively::
+
+ >>> print(
+ ... select(user_table).join(address_table, isouter=True)
+ ... )
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account LEFT OUTER JOIN address ON user_account.id = address.user_id
+
+ >>> print(
+ ... select(user_table).join(address_table, full=True)
+ ... )
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account FULL OUTER JOIN address ON user_account.id = address.user_id
+
+There is also a method :meth:`_sql.Select.outerjoin` that is equivalent to
+using ``.join(..., isouter=True)``.
+
+ORDER BY
+^^^^^^^^^
+
+The ORDER BY clause is constructed in terms
+of SQL Expression constructs typically based on :class:`_schema.Column` or
+similar objects. The :meth:`_sql.Select.order_by` method accepts one or
+more of these expressions positionally::
+
+ >>> print(select(user_table).order_by(user_table.c.name))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account ORDER BY user_account.name
+
+Ascending / descending is available from the :meth:`_sql.ColumnElement.asc`
+and :meth:`_sql.ColumnElement.desc` modifiers, which are present
+from ORM-bound attributes as well::
+
+
+ >>> print(select(User).order_by(User.name.asc(), User.fullname.desc()))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account ORDER BY user_account.name ASC, user_account.fullname DESC
+
+.. _tutorial_group_by_w_aggregates:
+
+Aggregate functions with GROUP BY / HAVING
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In SQL, aggregate functions allow column expressions across multiple rows
+to be aggregated together to produce a single result. Examples include
+counting, computing averages, as well as locating the maximum or minimum
+value in a set of values.
+
+SQLAlchemy provides for SQL functions in an open-ended way using a namespace
+known as :data:`_sql.func`. This is a special constructor object which
+will create new instances of :class:`_functions.Function` when given the name
+of a particular SQL function, which can be any name, as well as zero or
+more arguments to pass to the function, which are like in all other cases
+SQL Expression constructs. For example, to
+render the SQL COUNT() function against the ``user_account.id`` column,
+we call upon the name ``count()`` name::
+
+ >>> from sqlalchemy import func
+ >>> count_fn = func.count(user_table.c.id)
+ >>> print(count_fn)
+ {opensql}count(user_account.id)
+
+When using aggregate functions in SQL, the GROUP BY clause is essential in that
+it allows rows to be partitioned into groups where aggregate functions will
+be applied to each group individually. When requesting non-aggregated columns
+in the COLUMNS clause of a SELECT statement, SQL requires that these columns
+all be subject to a GROUP BY clause, either directly or indirectly based on
+a primary key association. The HAVING clause is then used in a similar
+manner as the WHERE clause, except that it filters out rows based on aggregated
+values rather than direct row contents.
+
+SQLAlchemy provides for these two clauses using the :meth:`_sql.Select.group_by`
+and :meth:`_sql.Select.having` methods. Below we illustrate selecting
+user name fields as well as count of addresses, for those users that have more
+than one address:
+
+.. sourcecode:: python+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... select(User.name, func.count(Address.id).label("count")).
+ ... join(Address).
+ ... group_by(User.name).
+ ... having(func.count(Address.id) > 1)
+ ... )
+ ... print(result.all())
+ {opensql}BEGIN (implicit)
+ SELECT user_account.name, count(address.id) AS count
+ FROM user_account JOIN address ON user_account.id = address.user_id GROUP BY user_account.name
+ HAVING count(address.id) > ?
+ [...] (1,){stop}
+ [('sandy', 2)]
+ {opensql}ROLLBACK{stop}
+
+Ordering or Grouping by a Label
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+An important technique in particular on some database backends is the ability
+to ORDER BY or GROUP BY an expression that is already stated in the columns
+clause, without re-stating the expression in the ORDER BY or GROUP BY clause
+and instead using the column name or labeled name from the COLUMNS clause.
+This form is available by passing the string text of the name to the
+:meth:`_sql.Select.order_by` or :meth:`_sql.Select.group_by` method. The text
+passed is **not rendered directly**; instead, the name given to an expression
+in the columns clause and rendered as that expression name in context, raising an
+error if no match is found. The unary modifiers
+:func:`.asc` and :func:`.desc` may also be used in this form:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import func, desc
+ >>> stmt = select(
+ ... Address.user_id,
+ ... func.count(Address.id).label('num_addresses')).\
+ ... group_by("user_id").order_by("user_id", desc("num_addresses"))
+ >>> print(stmt)
+ {opensql}SELECT address.user_id, count(address.id) AS num_addresses
+ FROM address GROUP BY address.user_id ORDER BY address.user_id, num_addresses DESC
+
+.. _tutorial_using_aliases:
+
+Using Aliases
+^^^^^^^^^^^^^
+
+Now that we are selecting from multiple tables and using joins, we quickly
+run into the case where we need to refer to the same table mutiple times
+in the FROM clause of a statement. We accomplish this using SQL **aliases**,
+which are a syntax that supplies an alternative name to a table or subquery
+from which it can be referred towards in the statement.
+
+In the SQLAlchemy Expression Language, these "names" are instead represented by
+:class:`_sql.FromClause` objects known as the :class:`_sql.Alias` construct,
+which is constructed in Core using the :meth:`_sql.FromClause.alias`
+method. An :class:`_sql.Alias` construct is just like a :class:`_sql.Table`
+construct in that it also has a namespace of :class:`_schema.Column`
+objects within the :attr:`_sql.Alias.c` collection. The SELECT statement
+below for example returns all unique pairs of user names::
+
+ >>> user_alias_1 = user_table.alias()
+ >>> user_alias_2 = user_table.alias()
+ >>> print(
+ ... select(user_alias_1.c.name, user_alias_2.c.name).
+ ... join_from(user_alias_1, user_alias_2, user_alias_1.c.id > user_alias_2.c.id)
+ ... )
+ {opensql}SELECT user_account_1.name, user_account_2.name
+ FROM user_account AS user_account_1
+ JOIN user_account AS user_account_2 ON user_account_1.id > user_account_2.id
+
+.. _tutorial_orm_entity_aliases:
+
+ORM Entity Aliases
+~~~~~~~~~~~~~~~~~~
+
+The ORM equivalent of the :meth:`_sql.FromClause.alias` method is the
+ORM :func:`_orm.aliased` function, which may be applied to an entity
+such as ``User`` and ``Address``. This produces a :class:`_sql.Alias` object
+internally that's against the original mapped :class:`_schema.Table` object,
+while maintaining ORM functionality. The SELECT below selects from the
+``User`` entity all objects that include two particular email addresses::
+
+ >>> from sqlalchemy.orm import aliased
+ >>> address_alias_1 = aliased(Address)
+ >>> address_alias_2 = aliased(Address)
+ >>> print(
+ ... select(User).
+ ... join_from(User, address_alias_1).
+ ... where(address_alias_1.email_address == 'patrick@aol.com').
+ ... join_from(User, address_alias_2).
+ ... where(address_alias_2.email_address == 'patrick@gmail.com')
+ ... )
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN address AS address_1 ON user_account.id = address_1.user_id
+ JOIN address AS address_2 ON user_account.id = address_2.user_id
+ WHERE address_1.email_address = :email_address_1
+ AND address_2.email_address = :email_address_2
+
+.. tip::
+
+ As mentioned in :ref:`tutorial_select_join_onclause`, the ORM provides
+ for another way to join using the :func:`_orm.relationship` construct.
+ The above example using aliases is demonstrated using :func:`_orm.relationship`
+ at :ref:`tutorial_joining_relationships_aliased`.
+
+
+.. _tutorial_subqueries_ctes:
+
+Subqueries and CTEs
+^^^^^^^^^^^^^^^^^^^^
+
+A subquery in SQL is a SELECT statement that is rendered within parenthesis and
+placed within the context of an enclosing statement, typically a SELECT
+statement but not necessarily.
+
+This section will cover a so-called "non-scalar" subquery, which is typically
+placed in the FROM clause of an enclosing SELECT. We will also cover the
+Common Table Expression or CTE, which is used in a similar way as a subquery,
+but includes additional features.
+
+SQLAlchemy uses the :class:`_sql.Subquery` object to represent a subquery and
+the :class:`_sql.CTE` to represent a CTE, usually obtained from the
+:meth:`_sql.Select.subquery` and :meth:`_sql.Select.cte` methods, respectively.
+Either object can be used as a FROM element inside of a larger
+:func:`_sql.select` construct.
+
+We can construct a :class:`_sql.Subquery` that will select an aggregate count
+of rows from the ``address`` table (aggregate functions and GROUP BY were
+introduced previously at :ref:`tutorial_group_by_w_aggregates`):
+
+ >>> subq = select(
+ ... func.count(address_table.c.id).label("count"),
+ ... address_table.c.user_id
+ ... ).group_by(address_table.c.user_id).subquery()
+
+Stringifying the subquery by itself without it being embedded inside of another
+:class:`_sql.Select` or other statement produces the plain SELECT statement
+without any enclosing parenthesis::
+
+ >>> print(subq)
+ {opensql}SELECT count(address.id) AS count, address.user_id
+ FROM address GROUP BY address.user_id
+
+
+The :class:`_sql.Subquery` object behaves like any other FROM object such
+as a :class:`_schema.Table`, notably that it includes a :attr:`_sql.Subquery.c`
+namespace of the columns which it selects. We can use this namespace to
+refer to both the ``user_id`` column as well as our custom labeled
+``count`` expression::
+
+ >>> print(select(subq.c.user_id, subq.c.count))
+ {opensql}SELECT anon_1.user_id, anon_1.count
+ FROM (SELECT count(address.id) AS count, address.user_id AS user_id
+ FROM address GROUP BY address.user_id) AS anon_1
+
+With a selection of rows contained within the ``subq`` object, we can apply
+the object to a larger :class:`_sql.Select` that will join the data to
+the ``user_account`` table::
+
+ >>> stmt = select(
+ ... user_table.c.name,
+ ... user_table.c.fullname,
+ ... subq.c.count
+ ... ).join_from(user_table, subq)
+
+ >>> print(stmt)
+ {opensql}SELECT user_account.name, user_account.fullname, anon_1.count
+ FROM user_account JOIN (SELECT count(address.id) AS count, address.user_id AS user_id
+ FROM address GROUP BY address.user_id) AS anon_1 ON user_account.id = anon_1.user_id
+
+In order to join from ``user_account`` to ``address``, we made use of the
+:meth:`_sql.Select.join_from` method. As has been illustrated previously, the
+ON clause of this join was again **inferred** based on foreign key constraints.
+Even though a SQL subquery does not itself have any constraints, SQLAlchemy can
+act upon constraints represented on the columns by determining that the
+``subq.c.user_id`` column is **derived** from the ``address_table.c.user_id``
+column, which does express a foreign key relationship back to the
+``user_table.c.id`` column which is then used to generate the ON clause.
+
+Common Table Expressions (CTEs)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Usage of the :class:`_sql.CTE` construct in SQLAlchemy is virtually
+the same as how the :class:`_sql.Subquery` construct is used. By changing
+the invocation of the :meth:`_sql.Select.subquery` method to use
+:meth:`_sql.Select.cte` instead, we can use the resulting object as a FROM
+element in the same way, but the SQL rendered is the very different common
+table expression syntax::
+
+ >>> subq = select(
+ ... func.count(address_table.c.id).label("count"),
+ ... address_table.c.user_id
+ ... ).group_by(address_table.c.user_id).cte()
+
+ >>> stmt = select(
+ ... user_table.c.name,
+ ... user_table.c.fullname,
+ ... subq.c.count
+ ... ).join_from(user_table, subq)
+
+ >>> print(stmt)
+ {opensql}WITH anon_1 AS
+ (SELECT count(address.id) AS count, address.user_id AS user_id
+ FROM address GROUP BY address.user_id)
+ SELECT user_account.name, user_account.fullname, anon_1.count
+ FROM user_account JOIN anon_1 ON user_account.id = anon_1.user_id
+
+The :class:`_sql.CTE` construct also features the ability to be used
+in a "recursive" style, and may in more elaborate cases be composed from the
+RETURNING clause of an INSERT, UPDATE or DELETE statement. The docstring
+for :class:`_sql.CTE` includes details on these additional patterns.
+
+.. seealso::
+
+ :meth:`_sql.Select.subquery` - further detail on subqueries
+
+ :meth:`_sql.Select.cte` - examples for CTE including how to use
+ RECURSIVE as well as DML-oriented CTEs
+
+ORM Entity Subqueries/CTEs
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the ORM, the :func:`_orm.aliased` construct may be used to associate an ORM
+entity, such as our ``User`` or ``Address`` class, with any :class:`_sql.FromClause`
+concept that represents a source of rows. The preceding section
+:ref:`tutorial_orm_entity_aliases` illustrates using :func:`_orm.aliased`
+to associate the mapped class with an :class:`_sql.Alias` of its
+mapped :class:`_schema.Table`. Here we illustrate :func:`_orm.aliased` doing the same
+thing against both a :class:`_sql.Subquery` as well as a :class:`_sql.CTE`
+generated against a :class:`_sql.Select` construct, that ultimately derives
+from that same mapped :class:`_schema.Table`.
+
+Below is an example of applying :func:`_orm.aliased` to the :class:`_sql.Subquery`
+construct, so that ORM entities can be extracted from its rows. The result
+shows a series of ``User`` and ``Address`` objects, where the data for
+each ``Address`` object ultimately came from a subquery against the
+``address`` table rather than that table directly:
+
+.. sourcecode:: python+sql
+
+ >>> subq = select(Address).where(~Address.email_address.like('%@aol.com')).subquery()
+ >>> address_subq = aliased(Address, subq)
+ >>> stmt = select(User, address_subq).join_from(User, address_subq).order_by(User.id, address_subq.id)
+ >>> with Session(engine) as session:
+ ... for user, address in session.execute(stmt):
+ ... print(f"{user} {address}")
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id, user_account.name, user_account.fullname,
+ anon_1.id AS id_1, anon_1.email_address, anon_1.user_id
+ FROM user_account JOIN
+ (SELECT address.id AS id, address.email_address AS email_address, address.user_id AS user_id
+ FROM address
+ WHERE address.email_address NOT LIKE ?) AS anon_1 ON user_account.id = anon_1.user_id
+ ORDER BY user_account.id, anon_1.id
+ [...] ('%@aol.com',){stop}
+ User(id=1, name='spongebob', fullname='Spongebob Squarepants') Address(id=1, email_address='spongebob@sqlalchemy.org')
+ User(id=2, name='sandy', fullname='Sandy Cheeks') Address(id=2, email_address='sandy@sqlalchemy.org')
+ User(id=2, name='sandy', fullname='Sandy Cheeks') Address(id=3, email_address='sandy@squirrelpower.org')
+ {opensql}ROLLBACK{stop}
+
+Another example follows, which is exactly the same except it makes use of the
+:class:`_sql.CTE` construct instead:
+
+.. sourcecode:: python+sql
+
+ >>> cte = select(Address).where(~Address.email_address.like('%@aol.com')).cte()
+ >>> address_cte = aliased(Address, cte)
+ >>> stmt = select(User, address_cte).join_from(User, address_cte).order_by(User.id, address_cte.id)
+ >>> with Session(engine) as session:
+ ... for user, address in session.execute(stmt):
+ ... print(f"{user} {address}")
+ {opensql}BEGIN (implicit)
+ WITH anon_1 AS
+ (SELECT address.id AS id, address.email_address AS email_address, address.user_id AS user_id
+ FROM address
+ WHERE address.email_address NOT LIKE ?)
+ SELECT user_account.id, user_account.name, user_account.fullname,
+ anon_1.id AS id_1, anon_1.email_address, anon_1.user_id
+ FROM user_account
+ JOIN anon_1 ON user_account.id = anon_1.user_id
+ ORDER BY user_account.id, anon_1.id
+ [...] ('%@aol.com',){stop}
+ User(id=1, name='spongebob', fullname='Spongebob Squarepants') Address(id=1, email_address='spongebob@sqlalchemy.org')
+ User(id=2, name='sandy', fullname='Sandy Cheeks') Address(id=2, email_address='sandy@sqlalchemy.org')
+ User(id=2, name='sandy', fullname='Sandy Cheeks') Address(id=3, email_address='sandy@squirrelpower.org')
+ {opensql}ROLLBACK{stop}
+
+In both cases, the subquery and CTE were named at the SQL level using an
+"anonymous" name. In the Python code, we don't need to provide these names
+at all. The object identity of the :class:`_sql.Subquery` or :class:`_sql.CTE`
+instances serves as the syntactical identity of the object when rendered.
+
+.. _tutorial_scalar_subquery:
+
+Scalar and Correlated Subqueries
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A scalar subquery is a subquery that returns exactly zero or one row and
+exactly one column. The subquery is then used in the COLUMNS or WHERE clause
+of an enclosing SELECT statement and is different than a regular subquery in
+that it is not used in the FROM clause. A :term:`correlated subquery` is a
+scalar subquery that refers to a table in the enclosing SELECT statement.
+
+SQLAlchemy represents the scalar subquery using the
+:class:`_sql.ScalarSelect` construct, which is part of the
+:class:`_sql.ColumnElement` expression hierarchy, in contrast to the regular
+subquery which is represented by the :class:`_sql.Subquery` construct, which is
+in the :class:`_sql.FromClause` hierarchy.
+
+Scalar subqueries are often, but not necessarily, used with aggregate functions,
+introduced previously at :ref:`tutorial_group_by_w_aggregates`. A scalar
+subquery is indicated explicitly by making use of the :meth:`_sql.Select.scalar_subquery`
+method as below. It's default string form when stringified by itself
+renders as an ordinary SELECT statement that is selecting from two tables::
+
+ >>> subq = select(func.count(address_table.c.id)).\
+ ... where(user_table.c.id == address_table.c.user_id).\
+ ... scalar_subquery()
+ >>> print(subq)
+ {opensql}(SELECT count(address.id) AS count_1
+ FROM address, user_account
+ WHERE user_account.id = address.user_id)
+
+The above ``subq`` object now falls within the :class:`_sql.ColumnElement`
+SQL expression hierarchy, in that it may be used like any other column
+expression::
+
+ >>> print(subq == 5)
+ {opensql}(SELECT count(address.id) AS count_1
+ FROM address, user_account
+ WHERE user_account.id = address.user_id) = :param_1
+
+
+Although the scalar subquery by itself renders both ``user_account`` and
+``address`` in its FROM clause when stringified by itself, when embedding it
+into an enclosing :func:`_sql.select` construct that deals with the
+``user_account`` table, the ``user_account`` table is automatically
+**correlated**, meaning it does not render in the FROM clause of the subquery::
+
+ >>> stmt = select(user_table.c.name, subq.label("address_count"))
+ >>> print(stmt)
+ {opensql}SELECT user_account.name, (SELECT count(address.id) AS count_1
+ FROM address
+ WHERE user_account.id = address.user_id) AS address_count
+ FROM user_account
+
+Simple correlated subqueries will usually do the right thing that's desired.
+However, in the case where the correlation is ambiguous, SQLAlchemy will let
+us know that more clarity is needed::
+
+ >>> stmt = select(
+ ... user_table.c.name,
+ ... address_table.c.email_address,
+ ... subq.label("address_count")
+ ... ).\
+ ... join_from(user_table, address_table).\
+ ... order_by(user_table.c.id, address_table.c.id)
+ >>> print(stmt)
+ Traceback (most recent call last):
+ ...
+ InvalidRequestError: Select statement '<... Select object at ...>' returned
+ no FROM clauses due to auto-correlation; specify correlate(<tables>) to
+ control correlation manually.
+
+To specify that the ``user_table`` is the one we seek to correlate we specify
+this using the :meth:`_sql.ScalarSelect.correlate` or
+:meth:`_sql.ScalarSelect.correlate_except` methods::
+
+ >>> subq = select(func.count(address_table.c.id)).\
+ ... where(user_table.c.id == address_table.c.user_id).\
+ ... scalar_subquery().correlate(user_table)
+
+The statement then can return the data for this column like any other:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... select(
+ ... user_table.c.name,
+ ... address_table.c.email_address,
+ ... subq.label("address_count")
+ ... ).
+ ... join_from(user_table, address_table).
+ ... order_by(user_table.c.id, address_table.c.id)
+ ... )
+ ... print(result.all())
+ {opensql}BEGIN (implicit)
+ SELECT user_account.name, address.email_address, (SELECT count(address.id) AS count_1
+ FROM address
+ WHERE user_account.id = address.user_id) AS address_count
+ FROM user_account JOIN address ON user_account.id = address.user_id ORDER BY user_account.id, address.id
+ [...] (){stop}
+ [('spongebob', 'spongebob@sqlalchemy.org', 1), ('sandy', 'sandy@sqlalchemy.org', 2),
+ ('sandy', 'sandy@squirrelpower.org', 2)]
+ {opensql}ROLLBACK{stop}
+
+.. _tutorial_exists:
+
+EXISTS subqueries
+^^^^^^^^^^^^^^^^^^
+
+The SQL EXISTS keyword is an operator that is used with :ref:`scalar subqueries
+<tutorial_scalar_subquery>` to return a boolean true or false depending on if
+the SELECT statement would return a row. SQLAlchemy includes a variant of the
+:class:`_sql.ScalarSelect` object called :class:`_sql.Exists`, which will
+generate an EXISTS subquery and is most conveniently generated using the
+:meth:`_sql.SelectBase.exists` method. Below we produce an EXISTS so that we
+can return ``user_account`` rows that have more than one related row in
+``address``:
+
+.. sourcecode:: pycon+sql
+
+ >>> subq = (
+ ... select(func.count(address_table.c.id)).
+ ... where(user_table.c.id == address_table.c.user_id).
+ ... group_by(address_table.c.user_id).
+ ... having(func.count(address_table.c.id) > 1)
+ ... ).exists()
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... select(user_table.c.name).where(subq)
+ ... )
+ ... print(result.all())
+ {opensql}BEGIN (implicit)
+ SELECT user_account.name
+ FROM user_account
+ WHERE EXISTS (SELECT count(address.id) AS count_1
+ FROM address
+ WHERE user_account.id = address.user_id GROUP BY address.user_id
+ HAVING count(address.id) > ?)
+ [...] (1,){stop}
+ [('sandy',)]
+ {opensql}ROLLBACK{stop}
+
+The EXISTS construct is more often than not used as a negation, e.g. NOT EXISTS,
+as it provides a SQL-efficient form of locating rows for which a related
+table has no rows. Below we select user names that have no email addresses;
+note the binary negation operator (``~``) used inside the second WHERE
+clause:
+
+.. sourcecode:: pycon+sql
+
+ >>> subq = (
+ ... select(address_table.c.id).
+ ... where(user_table.c.id == address_table.c.user_id)
+ ... ).exists()
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... select(user_table.c.name).where(~subq)
+ ... )
+ ... print(result.all())
+ {opensql}BEGIN (implicit)
+ SELECT user_account.name
+ FROM user_account
+ WHERE NOT (EXISTS (SELECT address.id
+ FROM address
+ WHERE user_account.id = address.user_id))
+ [...] (){stop}
+ [('patrick',)]
+ {opensql}ROLLBACK{stop}
+
+
+.. rst-class:: core-header, orm-addin
+
+.. _tutorial_core_update_delete:
+
+Core UPDATE and DELETE
+----------------------
+
+So far we've covered :class:`_sql.Insert`, so that we can get some data into
+our database, and then spent a lot of time on :class:`_sql.Select` which
+handles the broad range of usage patterns used for retrieving data from the
+database. In this section we will cover the :class:`_sql.Update` and
+:class:`_sql.Delete` constructs, which are used to modify existing rows
+as well as delete existing rows. This section will cover these constructs
+from a Core-centric perspective.
+
+
+.. container:: orm-header
+
+ **ORM Readers** - As was the case mentioned at :ref:`tutorial_core_insert`,
+ the :class:`_sql.Update` and :class:`_sql.Delete` operations when used with
+ the ORM are usually invoked internally from the :class:`_orm.Session`
+ object as part of the :term:`unit of work` process.
+
+ However, unlike :class:`_sql.Insert`, the :class:`_sql.Update` and
+ :class:`_sql.Delete` constructs can also be used directly with the ORM,
+ using a pattern known as "ORM-enabled update and delete"; for this reason,
+ familiarity with these constructs is useful for ORM use. Both styles of
+ use are discussed in the sections :ref:`tutorial_orm_updating` and
+ :ref:`tutorial_orm_deleting`.
+
+The update() SQL Expression Construct
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :func:`_sql.update` function generates a new instance of
+:class:`_sql.Update` which represents an UPDATE statement in SQL, that will
+update existing data in a table.
+
+Like the :func:`_sql.insert` construct, there is a "traditional" form of
+:func:`_sql.update`, which emits UPDATE against a single table at a time and
+does not return any rows. However some backends support an UPDATE statement
+that may modify multiple tables at once, and the UPDATE statement also
+supports RETURNING such that columns contained in matched rows may be returned
+in the result set.
+
+A basic UPDATE looks like::
+
+ >>> from sqlalchemy import update
+ >>> stmt = (
+ ... update(user_table).where(user_table.c.name == 'patrick').
+ ... values(fullname='Patrick the Star')
+ ... )
+ >>> print(stmt)
+ {opensql}UPDATE user_account SET fullname=:fullname WHERE user_account.name = :name_1
+
+The :meth:`_sql.Update.values` method controls the contents of the SET elements
+of the UPDATE statement. This is the same method shared by the :class:`_sql.Insert`
+construct. Parameters can normally be passed using the column names as
+keyword arguments.
+
+UPDATE supports all the major SQL forms of UPDATE, including updates against expressions,
+where we can make use of :class:`_schema.Column` expressions::
+
+ >>> stmt = (
+ ... update(user_table).
+ ... values(fullname="Username: " + user_table.c.name)
+ ... )
+ >>> print(stmt)
+ {opensql}UPDATE user_account SET fullname=(:name_1 || user_account.name)
+
+To support UPDATE in an "executemany" context, where many parameter sets will
+be invoked against the same statement, the :func:`_sql.bindparam`
+construct may be used to set up bound parameters; these replace the places
+that literal values would normally go:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import bindparam
+ >>> stmt = (
+ ... update(user_table).
+ ... where(user_table.c.name == bindparam('oldname')).
+ ... values(name=bindparam('newname'))
+ ... )
+ >>> with engine.begin() as conn:
+ ... conn.execute(
+ ... stmt,
+ ... [
+ ... {'oldname':'jack', 'newname':'ed'},
+ ... {'oldname':'wendy', 'newname':'mary'},
+ ... {'oldname':'jim', 'newname':'jake'},
+ ... ]
+ ... )
+ {opensql}BEGIN (implicit)
+ UPDATE user_account SET name=? WHERE user_account.name = ?
+ [...] (('ed', 'jack'), ('mary', 'wendy'), ('jake', 'jim'))
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+ COMMIT{stop}
+
+Other techniques which may be applied to UPDATE include:
+
+* **Correlated Updates**: a :ref:`correlated subquery <tutorial_scalar_subquery>`
+ may be used anywhere a column expression might be
+ placed::
+
+ >>> scalar_subq = (
+ ... select(address_table.c.email_address).
+ ... where(address_table.c.user_id == user_table.c.id).
+ ... order_by(address_table.c.id).
+ ... limit(1).
+ ... scalar_subquery()
+ ... )
+ >>> update_stmt = update(user_table).values(fullname=scalar_subq)
+ >>> print(update_stmt)
+ {opensql}UPDATE user_account SET fullname=(SELECT address.email_address
+ FROM address
+ WHERE address.user_id = user_account.id ORDER BY address.id
+ LIMIT :param_1)
+
+ ..
+
+
+* **UPDATE..FROM**: Some databases such as PostgreSQL and MySQL support a syntax
+ "UPDATE FROM" where additional tables may be stated in the FROM clause.
+ This syntax will be generated implicitly when additional tables are located
+ in the WHERE clause of the statement::
+
+ >>> update_stmt = (
+ ... update(user_table).
+ ... where(user_table.c.id == address_table.c.user_id).
+ ... where(address_table.c.email_address == 'patrick@aol.com').
+ ... values(fullname='Pat')
+ ... )
+ >>> print(update_stmt)
+ {opensql}UPDATE user_account SET fullname=:fullname FROM address
+ WHERE user_account.id = address.user_id AND address.email_address = :email_address_1
+
+ ..
+
+* **UPDATE..FROM updating multiple tables**: this is a MySQL specific syntax which
+ requires we refer to :class:`_schema.Table` objects in the VALUES
+ clause in order to refer to additional tables::
+
+ >>> update_stmt = (
+ ... update(user_table).
+ ... where(user_table.c.id == address_table.c.user_id).
+ ... where(address_table.c.email_address == 'patrick@aol.com').
+ ... values(
+ ... {
+ ... user_table.c.fullname: "Pat",
+ ... address_table.c.email_address: "pat@aol.com"
+ ... }
+ ... )
+ ... )
+ >>> from sqlalchemy.dialects import mysql
+ >>> print(update_stmt.compile(dialect=mysql.dialect()))
+ {opensql}UPDATE user_account, address
+ SET address.email_address=%s, user_account.fullname=%s
+ WHERE user_account.id = address.user_id AND address.email_address = %s
+
+ ..
+
+* **Parameter Ordered Updates**: Another MySQL-only behavior is that the order
+ of parameters in the SET clause of an UPDATE actually impacts the evaluation
+ of each expression. For this use case, the :meth:`_sql.Update.ordered_values`
+ method accepts a sequence of tuples so that this order may be controlled [1]_::
+
+ >>> update_stmt = (
+ ... update(some_table).
+ ... ordered_values(
+ ... (some_table.c.y, 20),
+ ... (some_table.c.x, some_table.c.y + 10)
+ ... )
+ ... )
+ >>> print(update_stmt)
+ {opensql}UPDATE some_table SET y=:y, x=(some_table.y + :y_1)
+
+ ..
+
+
+.. [1] While Python dictionaries are `guaranteed to be insert ordered
+ <https://mail.python.org/pipermail/python-dev/2017-December/151283.html>`_
+ as of Python 3.7, the
+ :meth:`_sql.Update.ordered_values` method stilll provides an additional
+ measure of clarity of intent when it is essential that the SET clause
+ of a MySQL UPDATE statement proceed in a specific way.
+
+
+The delete() SQL Expression Construct
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The :func:`_sql.delete` function generates a new instance of
+:class:`_sql.Delete` which represents an DELETE statement in SQL, that will
+delete rows from a table.
+
+The :func:`_sql.delete` statement from an API perspective is very similar to
+that of the :func:`_sql.update` construct, traditionally returning no rows but
+allowing for a RETURNING variant.
+
+::
+
+ >>> from sqlalchemy import delete
+ >>> stmt = (
+ ... delete(user_table).where(user_table.c.name == 'patrick')
+ ... )
+ >>> print(stmt)
+ {opensql}DELETE FROM user_account WHERE user_account.name = :name_1
+
+Like :class:`_sql.Update`, :class:`_sql.Delete` supports the use of correlated
+subqueries in the WHERE clause as well as backend-specific multiple table
+syntaxes, such as ``DELETE FROM..USING`` on MySQL::
+
+ >>> delete_stmt = (
+ ... delete(user_table).
+ ... where(user_table.c.id == address_table.c.user_id).
+ ... where(address_table.c.email_address == 'patrick@aol.com')
+ ... )
+ >>> from sqlalchemy.dialects import mysql
+ >>> print(delete_stmt.compile(dialect=mysql.dialect()))
+ {opensql}DELETE FROM user_account USING user_account, address
+ WHERE user_account.id = address.user_id AND address.email_address = %s
+
+Getting Affected Row Count from UPDATE, DELETE
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Both :class:`_sql.Update` and :class:`_sql.Delete` support the ability to
+return the number of rows matched after the statement proceeds, for statements
+that are invoked using Core :class:`_engine.Connection`, i.e.
+:meth:`_engine.Connection.execute`. Per the caveats mentioned below, this value
+is available from the :attr:`_engine.CursorResult.rowcount` attribute:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.begin() as conn:
+ ... result = conn.execute(
+ ... update(user_table).
+ ... values(fullname="Patrick McStar").
+ ... where(user_table.c.name == 'patrick')
+ ... )
+ ... print(result.rowcount)
+ {opensql}BEGIN (implicit)
+ UPDATE user_account SET fullname=? WHERE user_account.name = ?
+ [...] ('Patrick McStar', 'patrick'){stop}
+ 1
+ {opensql}COMMIT{stop}
+
+.. tip::
+
+ The :class:`_engine.CursorResult` class is a subclass of
+ :class:`_engine.Result` which contains additional attributes that are
+ specific to the DBAPI ``cursor`` object. An instance of this subclass is
+ returned when a statement is invoked via the
+ :meth:`_engine.Connection.execute` method. When using the ORM, the
+ :meth:`_orm.Session.execute` method returns an object of this type for
+ all INSERT, UPDATE, and DELETE statements.
+
+Facts about :attr:`_engine.CursorResult.rowcount`:
+
+* The value returned is the number of rows **matched** by the WHERE clause of
+ the statement. It does not matter if the row were actually modified or not.
+
+* :attr:`_engine.CursorResult.rowcount` is not necessarily available for an UPDATE
+ or DELETE statement that uses RETURNING.
+
+* For an :ref:`executemany <tutorial_multiple_parameters>` execution,
+ :attr:`_engine.CursorResult.rowcount` may not be available either, which depends
+ highly on the DBAPI module in use as well as configured options. The
+ attribute :attr:`_engine.CursorResult.supports_sane_multi_rowcount` indicates
+ if this value will be available for the current backend in use.
+
+* Some drivers, particularly third party dialects for non-relational databases,
+ may not support :attr:`_engine.CursorResult.rowcount` at all. The
+ :attr:`_engine.CursorResult.supports_sane_rowcount` will indicate this.
+
+* "rowcount" is used by the ORM :term:`unit of work` process to validate that
+ an UPDATE or DELETE statement matched the expected number of rows, and is
+ also essential for the ORM versioning feature documented at
+ :ref:`mapper_version_counter`.
+
+Using RETURNING with UPDATE, DELETE
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Like the :class:`_sql.Insert` construct, :class:`_sql.Update` and :class:`_sql.Delete`
+also support the RETURNING clause which is added by using the
+:meth:`_sql.Update.returning` and :meth:`_sql.Delete.returning` methods.
+When these methods are used on a backend that supports RETURNING, selected
+columns from all rows that match the WHERE criteria of the statement
+will be returned in the :class:`_engine.Result` object as rows that can
+be iterated::
+
+
+ >>> update_stmt = (
+ ... update(user_table).where(user_table.c.name == 'patrick').
+ ... values(fullname='Patrick the Star').
+ ... returning(user_table.c.id, user_table.c.name)
+ ... )
+ >>> print(update_stmt)
+ {opensql}UPDATE user_account SET fullname=:fullname
+ WHERE user_account.name = :name_1
+ RETURNING user_account.id, user_account.name
+
+ >>> delete_stmt = (
+ ... delete(user_table).where(user_table.c.name == 'patrick').
+ ... returning(user_table.c.id, user_table.c.name)
+ ... )
+ >>> print(delete_stmt.returning(user_table.c.id, user_table.c.name))
+ {opensql}DELETE FROM user_account
+ WHERE user_account.name = :name_1
+ RETURNING user_account.id, user_account.name
+
+Further Reading for UPDATE, DELETE
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. seealso::
+
+ API documentation for UPDATE / DELETE:
+
+ * :class:`_sql.Update`
+
+ * :class:`_sql.Delete`
+
+ ORM-enabled UPDATE and DELETE:
+
+ * :ref:`tutorial_orm_enabled_update`
+
+ * :ref:`tutorial_orm_enabled_delete`
+
diff --git a/doc/build/tutorial/dbapi_transactions.rst b/doc/build/tutorial/dbapi_transactions.rst
new file mode 100644
index 000000000..24df53943
--- /dev/null
+++ b/doc/build/tutorial/dbapi_transactions.rst
@@ -0,0 +1,518 @@
+.. |prev| replace:: :doc:`engine`
+.. |next| replace:: :doc:`metadata`
+
+.. include:: tutorial_nav_include.rst
+
+
+.. _tutorial_working_with_transactions:
+
+Working with Transactions and the DBAPI
+========================================
+
+
+
+With the :class:`_future.Engine` object ready to go, we may now proceed
+to dive into the basic operation of an :class:`_future.Engine` and
+its primary interactive endpoints, the :class:`_future.Connection` and
+:class:`_engine.Result`. We will additionally introduce the ORM's
+:term:`facade` for these objects, known as the :class:`_orm.Session`.
+
+.. container:: orm-header
+
+ **Note to ORM readers**
+
+ When using the ORM, the :class:`_future.Engine` is managed by another
+ object called the :class:`_orm.Session`. The :class:`_orm.Session` in
+ modern SQLAlchemy emphasizes a transactional and SQL execution pattern that
+ is largely identical to that of the :class:`_future.Connection` discussed
+ below, so while this subsection is Core-centric, all of the concepts here
+ are essentially relevant to ORM use as well and is recommended for all ORM
+ learners. The execution pattern used by the :class:`_future.Connection`
+ will be contrasted with that of the :class:`_orm.Session` at the end
+ of this section.
+
+As we have yet to introduce the SQLAlchemy Expression Language that is the
+primary feature of SQLAlchemy, we will make use of one simple construct within
+this package called the :func:`_sql.text` construct, which allows us to write
+SQL statements as **textual SQL**. Rest assured that textual SQL in
+day-to-day SQLAlchemy use is by far the exception rather than the rule for most
+tasks, even though it always remains fully available.
+
+.. rst-class:: core-header
+
+.. _tutorial_getting_connection:
+
+Getting a Connection
+---------------------
+
+The sole purpose of the :class:`_future.Engine` object from a user-facing
+perspective is to provide a unit of
+connectivity to the database called the :class:`_future.Connection`. When
+working with the Core directly, the :class:`_future.Connection` object
+is how all interaction with the database is done. As the :class:`_future.Connection`
+represents an open resource against the database, we want to always limit
+the scope of our use of this object to a specific context, and the best
+way to do that is by using Python context manager form, also known as
+`the with statement <https://docs.python.org/3/reference/compound_stmts.html#with>`_.
+Below we illustrate "Hello World", using a textual SQL statement. Textual
+SQL is emitted using a construct called :func:`_sql.text` that will be discussed
+in more detail later:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import text
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(text("select 'hello world'"))
+ ... print(result.all())
+ {opensql}BEGIN (implicit)
+ select 'hello world'
+ [...] ()
+ {stop}[('hello world',)]
+ {opensql}ROLLBACK{stop}
+
+In the above example, the context manager provided for a database connection
+and also framed the operation inside of a transaction. The default behavior of
+the Python DBAPI includes that a transaction is always in progress; when the
+scope of the connection is :term:`released`, a ROLLBACK is emitted to end the
+transaction. The transaction is **not committed automatically**; when we want
+to commit data we normally need to call :meth:`_future.Connection.commit`
+as we'll see in the next section.
+
+.. tip:: "autocommit" mode is available for special cases. The section
+ :ref:`dbapi_autocommit` discusses this.
+
+The result of our SELECT was also returned in an object called
+:class:`_engine.Result` that will be discussed later, however for the moment
+we'll add that it's best to ensure this object is consumed within the
+"connect" block, and is not passed along outside of the scope of our connection.
+
+.. rst-class:: core-header
+
+.. _tutorial_committing_data:
+
+Committing Changes
+------------------
+
+We just learned that the DBAPI connection is non-autocommitting. What if
+we want to commit some data? We can alter our above example to create a
+table and insert some data, and the transaction is then committed using
+the :meth:`_future.Connection.commit` method, invoked **inside** the block
+where we acquired the :class:`_future.Connection` object:
+
+.. sourcecode:: pycon+sql
+
+ # "commit as you go"
+ >>> with engine.connect() as conn:
+ ... conn.execute(text("CREATE TABLE some_table (x int, y int)"))
+ ... conn.execute(
+ ... text("INSERT INTO some_table (x, y) VALUES (:x, :y)"),
+ ... [{"x": 1, "y": 1}, {"x": 2, "y": 4}]
+ ... )
+ ... conn.commit()
+ {opensql}BEGIN (implicit)
+ CREATE TABLE some_table (x int, y int)
+ [...] ()
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+ INSERT INTO some_table (x, y) VALUES (?, ?)
+ [...] ((1, 1), (2, 4))
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+ COMMIT
+
+Above, we emitted two SQL statements that are generally transactional, a
+"CREATE TABLE" statement [1]_ and an "INSERT" statement that's parameterized
+(the parameterization syntax above is discussed a few sections below in
+:ref:`tutorial_multiple_parameters`). As we want the work we've done to be
+committed within our block, we invoke the
+:meth:`_future.Connection.commit` method which commits the transaction. After
+we call this method inside the block, we can continue to run more SQL
+statements and if we choose we may call :meth:`_future.Connection.commit`
+again for subsequent statements. SQLAlchemy refers to this style as **commit as
+you go**.
+
+There is also another style of committing data, which is that we can declare
+our "connect" block to be a transaction block up front. For this mode of
+operation, we use the :meth:`_future.Engine.begin` method to acquire the
+connection, rather than the :meth:`_future.Engine.connect` method. This method
+will both manage the scope of the :class:`_future.Connection` and also
+enclose everything inside of a transaction with COMMIT at the end, assuming
+a successful block, or ROLLBACK in case of exception raise. This style
+may be referred towards as **begin once**:
+
+.. sourcecode:: pycon+sql
+
+ # "begin once"
+ >>> with engine.begin() as conn:
+ ... conn.execute(
+ ... text("INSERT INTO some_table (x, y) VALUES (:x, :y)"),
+ ... [{"x": 6, "y": 8}, {"x": 9, "y": 10}]
+ ... )
+ {opensql}BEGIN (implicit)
+ INSERT INTO some_table (x, y) VALUES (?, ?)
+ [...] ((6, 8), (9, 10))
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+ COMMIT
+
+"Begin once" style is often preferred as it is more succinct and indicates the
+intention of the entire block up front. However, within this tutorial we will
+normally use "commit as you go" style as it is more flexible for demonstration
+purposes.
+
+.. topic:: What's "BEGIN (implicit)"?
+
+ You might have noticed the log line "BEGIN (implicit)" at the start of a
+ transaction block. "implicit" here means that SQLAlchemy **did not
+ actually send any command** to the database; it just considers this to be
+ the start of the DBAPI's implicit transaction. You can register
+ :ref:`event hooks <core_sql_events>` to intercept this event, for example.
+
+
+.. [1] :term:`DDL` refers to the subset of SQL that instructs the database
+ to create, modify, or remove schema-level constructs such as tables. DDL
+ such as "CREATE TABLE" is recommended to be within a transaction block that
+ ends with COMMIT, as many databases uses transactional DDL such that the
+ schema changes don't take place until the transaction is committed. However,
+ as we'll see later, we usually let SQLAlchemy run DDL sequences for us as
+ part of a higher level operation where we don't generally need to worry
+ about the COMMIT.
+
+
+.. rst-class:: core-header
+
+
+Basics of Statement Execution
+-----------------------------
+
+We have seen a few examples that run SQL statements against a database, making
+use of a method called :meth:`_future.Connection.execute`, in conjunction with
+an object called :func:`_sql.text`, and returning an object called
+:class:`_engine.Result`. In this section we'll illustrate more closely the
+mechanics and interactions of these components.
+
+.. container:: orm-header
+
+ Most of the content in this section applies equally well to modern ORM
+ use when using the :meth:`_orm.Session.execute` method, which works
+ very similarly to that of :meth:`_future.Connection.execute`, including that
+ ORM result rows are delivered using the same :class:`_engine.Result`
+ interface used by Core.
+
+.. rst-class:: orm-addin
+
+.. _tutorial_fetching_rows:
+
+Fetching Rows
+^^^^^^^^^^^^^
+
+We'll first illustrate the :class:`_engine.Result` object more closely by
+making use of the rows we've inserted previously, running a textual SELECT
+statement on the table we've created:
+
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(text("SELECT x, y FROM some_table"))
+ ... for row in result:
+ ... print(f"x: {row.x} y: {row.y}")
+ {opensql}BEGIN (implicit)
+ SELECT x, y FROM some_table
+ [...] ()
+ {stop}x: 1 y: 1
+ x: 2 y: 4
+ x: 6 y: 8
+ x: 9 y: 10
+ {opensql}ROLLBACK{stop}
+
+Above, the "SELECT" string we executed selected all rows from our table.
+The object returned is called :class:`_engine.Result` and represents an
+iterable object of result rows.
+
+:class:`_engine.Result` has lots of methods for
+fetching and transforming rows, such as the :meth:`_engine.Result.all`
+method illustrated previously, which returns a list of all :class:`_engine.Row`
+objects. It also implements the Python iterator interface so that we can
+iterate over the collection of :class:`_engine.Row` objects directly.
+
+The :class:`_engine.Row` objects themselves are intended to act like Python
+`named tuples
+<https://docs.python.org/3/library/collections.html#collections.namedtuple>`_.
+Below we illustrate a variety of ways to access rows.
+
+* **Tuple Assignment** - This is the most Python-idiomatic style, which is to assign variables
+ to each row positionally as they are received:
+
+ ::
+
+ result = conn.execute(text("select x, y from some_table"))
+
+ for x, y in result:
+ # ...
+
+* **Integer Index** - Tuples are Python sequences, so regular integer access is available too:
+
+ ::
+
+ result = conn.execute(text("select x, y from some_table"))
+
+ for row in result:
+ x = row[0]
+
+* **Attribute Name** - As these are Python named tuples, the tuples have dynamic attribute names
+ matching the names of each column. These names are normally the names that the
+ SQL statement assigns to the columns in each row. While they are usually
+ fairly predictable and can also be controlled by labels, in less defined cases
+ they may be subject to database-specific behaviors::
+
+ result = conn.execute(text("select x, y from some_table"))
+
+ for row in result:
+ y = row.y
+
+ # illustrate use with Python f-strings
+ print(f"Row: {row.x} {row.y}")
+
+ ..
+
+* **Mapping Access** - To receive rows as Python **mapping** objects, which is
+ essentially a read-only version of Python's interface to the common ``dict``
+ object, the :class:`_engine.Result` may be **transformed** into a
+ :class:`_engine.MappingResult` object using the
+ :meth:`_engine.Result.mappings` modifier; this is a result object that yields
+ dictionary-like :class:`_engine.RowMapping` objects rather than
+ :class:`_engine.Row` objects::
+
+ result = conn.execute(text("select x, y from some_table"))
+
+ for dict_row in result.mappings():
+ x = dict_row['x']
+ y = dict_row['y']
+
+ ..
+
+.. rst-class:: orm-addin
+
+.. _tutorial_sending_parameters:
+
+Sending Parameters
+^^^^^^^^^^^^^^^^^^
+
+SQL statements are usually accompanied by data that is to be passed with the
+statement itself, as we saw in the INSERT example previously. The
+:meth:`_future.Connection.execute` method therefore also accepts parameters,
+which are referred towards as :term:`bound parameters`. A rudimentary example
+might be if we wanted to limit our SELECT statement only to rows that meet a
+certain criteria, such as rows where the "y" value were greater than a certain
+value that is passed in to a function.
+
+In order to achieve this such that the SQL statement can remain fixed and
+that the driver can properly sanitize the value, we add a WHERE criteria to
+our statement that names a new parameter called "y"; the :func:`_sql.text`
+construct accepts these using a colon format "``:y``". The actual value for
+"``:y``" is then passed as the second argument to
+:meth:`_future.Connection.execute` in the form of a dictionary:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(
+ ... text("SELECT x, y FROM some_table WHERE y > :y"),
+ ... {"y": 2}
+ ... )
+ ... for row in result:
+ ... print(f"x: {row.x} y: {row.y}")
+ {opensql}BEGIN (implicit)
+ SELECT x, y FROM some_table WHERE y > ?
+ [...] (2,)
+ {stop}x: 2 y: 4
+ x: 6 y: 8
+ x: 9 y: 10
+ {opensql}ROLLBACK{stop}
+
+
+In the logged SQL output, we can see that the bound parameter ``:y`` was
+converted into a question mark when it was sent to the SQLite database.
+This is because the SQLite database driver uses a format called "qmark parameter style",
+which is one of six different formats allowed by the DBAPI specification.
+SQLAlchemy abstracts these formats into just one, which is the "named" format
+using a colon.
+
+.. _tutorial_multiple_parameters:
+
+Sending Multiple Parameters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In the example at :ref:`tutorial_committing_data`, we executed an INSERT
+statement where it appeared that we were able to INSERT multiple rows into the
+database at once. For statements that **operate upon data, but do not return
+result sets**, namely :term:`DML` statements such as "INSERT" which don't
+include a phrase like "RETURNING", we can send **multi params** to the
+:meth:`_future.Connection.execute` method by passing a list of dictionaries
+instead of a single dictionary, thus allowing the single SQL statement to
+be invoked against each parameter set individually:
+
+.. sourcecode:: pycon+sql
+
+ >>> with engine.connect() as conn:
+ ... conn.execute(
+ ... text("INSERT INTO some_table (x, y) VALUES (:x, :y)"),
+ ... [{"x": 11, "y": 12}, {"x": 13, "y": 14}]
+ ... )
+ ... conn.commit()
+ {opensql}BEGIN (implicit)
+ INSERT INTO some_table (x, y) VALUES (?, ?)
+ [...] ((11, 12), (13, 14))
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+ COMMIT
+
+Behind the scenes, the :class:`_future.Connection` objects uses a DBAPI feature
+known as `cursor.executemany()
+<https://www.python.org/dev/peps/pep-0249/#id18>`_. This method performs the
+equivalent operation of invoking the given SQL statement against each parameter
+set individually. The DBAPI may optimize this operation in a variety of ways,
+by using prepared statements, or by concatenating the parameter sets into a
+single SQL statement in some cases. Some SQLAlchemy dialects may also use
+alternate APIs for this case, such as the :ref:`psycopg2 dialect for PostgreSQL
+<postgresql_psycopg2>` which uses more performant APIs
+for this use case.
+
+.. tip:: you may have noticed this section isn't tagged as an ORM concept.
+ That's because the "multiple parameters" use case is **usually** used
+ for INSERT statements, which when using the ORM are invoked in a different
+ way. Multiple parameters also may be used with UPDATE and DELETE
+ statements to emit distinct UPDATE/DELETE operations on a per-row basis,
+ however again when using the ORM, there is a different technique
+ generally used for updating or deleting many individual rows separately.
+
+.. rst-class:: orm-addin
+
+.. _tutorial_bundling_parameters:
+
+Bundling Parameters with a Statement
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The two previous cases illustrate a series of parameters being passed to
+accompany a SQL statement. For single-parameter statement executions,
+SQLAlchemy's use of parameters is in fact more often than not done by
+**bundling** the parameters with the statement itself, which is a primary
+feature of the SQL Expression Language and makes for queries that can be
+composed naturally while still making use of parameterization in all cases.
+This concept will be discussed in much more detail in the sections that follow;
+for a brief preview, the :func:`_sql.text` construct itself being part of the
+SQL Expression Language supports this feature by using the
+:meth:`_sql.TextClause.bindparams` method; this is a :term:`generative` method that
+returns a new copy of the SQL construct with additional state added, in this
+case the parameter values we want to pass along:
+
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = text("SELECT x, y FROM some_table WHERE y > :y ORDER BY x, y").bindparams(y=6)
+ >>> with engine.connect() as conn:
+ ... result = conn.execute(stmt)
+ ... for row in result:
+ ... print(f"x: {row.x} y: {row.y}")
+ {opensql}BEGIN (implicit)
+ SELECT x, y FROM some_table WHERE y > ? ORDER BY x, y
+ [...] (6,)
+ {stop}x: 6 y: 8
+ x: 9 y: 10
+ x: 11 y: 12
+ x: 13 y: 14
+ {opensql}ROLLBACK{stop}
+
+
+The interesting thing to note above is that even though we passed only a single
+argument, ``stmt``, to the :meth:`_future.Connection.execute` method, the
+execution of the statement illustrated both the SQL string as well as the
+separate parameter tuple.
+
+.. rst-class:: orm-addin
+
+.. _tutorial_executing_orm_session:
+
+Executing with an ORM Session
+-----------------------------
+
+As mentioned previously, most of the patterns and examples above apply to
+use with the ORM as well, so here we will introduce this usage so that
+as the tutorial proceeds, we will be able to illustrate each pattern in
+terms of Core and ORM use together.
+
+The fundamental transactional / database interactive object when using the
+ORM is called the :class:`_orm.Session`. In modern SQLAlchemy, this object
+is used in a manner very similar to that of the :class:`_future.Connection`,
+and in fact as the :class:`_orm.Session` is used, it refers to a
+:class:`_future.Connection` internally which it uses to emit SQL.
+
+When the :class:`_orm.Session` is used with non-ORM constructs, it
+passes through the SQL statements we give it and does not generally do things
+much differently from how the :class:`_future.Connection` does directly, so
+we can illustrate it here in terms of the simple textual SQL
+operations we've already learned.
+
+The :class:`_orm.Session` has a few different creational patterns, but
+here we will illustrate the most basic one that tracks exactly with how
+the :class:`_future.Connection` is used which is to construct it within
+a context manager:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy.orm import Session
+
+ >>> stmt = text("SELECT x, y FROM some_table WHERE y > :y ORDER BY x, y").bindparams(y=6)
+ >>> with Session(engine) as session:
+ ... result = session.execute(stmt)
+ ... for row in result:
+ ... print(f"x: {row.x} y: {row.y}")
+ {opensql}BEGIN (implicit)
+ SELECT x, y FROM some_table WHERE y > ? ORDER BY x, y
+ [...] (6,){stop}
+ x: 6 y: 8
+ x: 9 y: 10
+ x: 11 y: 12
+ x: 13 y: 14
+ {opensql}ROLLBACK{stop}
+
+The example above can be compared to the example in the preceding section
+in :ref:`tutorial_bundling_parameters` - we directly replace the call to
+``with engine.connect() as conn`` with ``with Session(engine) as session``,
+and then make use of the :meth:`_orm.Session.execute` method just like we
+do with the :meth:`_future.Connection.execute` method.
+
+Also, like the :class:`_future.Connection`, the :class:`_orm.Session` features
+"commit as you go" behavior using the :meth:`_orm.Session.commit` method,
+illustrated below using a textual UPDATE statement to alter some of
+our data:
+
+.. sourcecode:: pycon+sql
+
+ >>> with Session(engine) as session:
+ ... result = session.execute(
+ ... text("UPDATE some_table SET y=:y WHERE x=:x"),
+ ... [{"x": 9, "y":11}, {"x": 13, "y": 15}]
+ ... )
+ ... session.commit()
+ {opensql}BEGIN (implicit)
+ UPDATE some_table SET y=? WHERE x=?
+ [...] ((11, 9), (15, 13))
+ COMMIT{stop}
+
+Above, we invoked an UPDATE statement using the bound-parameter, "executemany"
+style of execution introduced at :ref:`tutorial_multiple_parameters`, ending
+the block with a "commit as you go" commit.
+
+.. tip:: The :class:`_orm.Session` doesn't actually hold onto the
+ :class:`_future.Connection` object after it ends the transaction. It
+ gets a new :class:`_future.Connection` from the :class:`_future.Engine`
+ when executing SQL against the database is next needed.
+
+The :class:`_orm.Session` obviously has a lot more tricks up its sleeve
+than that, however understanding that it has an :meth:`_orm.Session.execute`
+method that's used the same way as :meth:`_future.Connection.execute` will
+get us started with the examples that follow later.
+
+
+
+
+
diff --git a/doc/build/tutorial/engine.rst b/doc/build/tutorial/engine.rst
new file mode 100644
index 000000000..55cd9acfd
--- /dev/null
+++ b/doc/build/tutorial/engine.rst
@@ -0,0 +1,67 @@
+.. |prev| replace:: :doc:`index`
+.. |next| replace:: :doc:`dbapi_transactions`
+
+.. include:: tutorial_nav_include.rst
+
+.. _tutorial_engine:
+
+Establishing Connectivity - the Engine
+==========================================
+
+
+The start of any SQLAlchemy application is an object called the
+:class:`_future.Engine`. This object acts as a central source of connections
+to a particular database, providing both a factory as well as a holding
+space called a :ref:`connection pool <pooling_toplevel>` for these database
+connections. The engine is typically a global object created just
+once for a particular database server, and is configured using a URL string
+which will describe how it should connect to the database host or backend.
+
+For this tutorial we will use an in-memory-only SQLite database. This is an
+easy way to test things without needing to have an actual pre-existing database
+set up. The :class:`_future.Engine` is created by using :func:`_sa.create_engine`, specifying
+the :paramref:`_sa.create_engine.future` flag set to ``True`` so that we make full use
+of :term:`2.0 style` usage:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy import create_engine
+ >>> engine = create_engine("sqlite+pysqlite:///:memory:", echo=True, future=True)
+
+The main argument to :class:`_sa.create_engine`
+is a string URL, above passed as the string ``"sqlite+pysqlite:///:memory:"``.
+This string indicates to the :class:`_future.Engine` three important
+facts:
+
+1. What kind of database are we communicating with? This is the ``sqlite``
+ portion above, which links in SQLAlchemy to an object known as the
+ :term:`dialect`.
+
+2. What :term:`DBAPI` are we using? The Python :term:`DBAPI` is a third party
+ driver that SQLAlchemy uses to interact with a particular database. In
+ this case, we're using the name ``pysqlite``, which in modern Python
+ use is the `sqlite3 <http://docs.python.org/library/sqlite3.html>`_ standard
+ library interface for SQLite.
+
+3. How do we locate the database? In this case, our URL includes the phrase
+ ``/:memory:``, which is an indicator to the ``sqlite3`` module that we
+ will be using an **in-memory-only** database. This kind of database
+ is perfect for experimenting as it does not require any server nor does
+ it need to create new files.
+
+.. sidebar:: Lazy Connecting
+
+ The :class:`_future.Engine`, when first returned by :func:`_sa.create_engine`,
+ has not actually tried to connect to the database yet; that happens
+ only the first time it is asked to perform a task against the database.
+ This is a software design pattern known as :term:`lazy initialization`.
+
+We have also specified a parameter :paramref:`_sa.create_engine.echo`, which
+will instruct the :class:`_future.Engine` to log all of the SQL it emits to a
+Python logger that will write to standard out. This flag is a shorthand way
+of setting up
+:ref:`Python logging more formally <dbengine_logging>` and is useful for
+experimentation in scripts. Many of the SQL examples will include this
+SQL logging output beneath a ``[SQL]`` link that when clicked, will reveal
+the full SQL interaction.
+
diff --git a/doc/build/tutorial/further_reading.rst b/doc/build/tutorial/further_reading.rst
new file mode 100644
index 000000000..d8b792f03
--- /dev/null
+++ b/doc/build/tutorial/further_reading.rst
@@ -0,0 +1,44 @@
+.. |prev| replace:: :doc:`orm_related_objects`
+
+.. |tutorial_title| replace:: SQLAlchemy 1.4 / 2.0 Tutorial
+
+.. topic:: |tutorial_title|
+
+ This page is part of the :doc:`index`.
+
+ Previous: |prev|
+
+
+.. _tutorial_further_reading:
+
+Further Reading
+===============
+
+The sections below are the major top-level sections that discuss the concepts
+in this tutorial in much more detail, as well as describe many more features
+of each subsystem.
+
+Core Essential Reference
+
+* :ref:`connections_toplevel`
+
+* :ref:`schema_toplevel`
+
+* :ref:`expression_api_toplevel`
+
+* :ref:`types_toplevel`
+
+ORM Essential Reference
+
+* :ref:`mapper_config_toplevel`
+
+* :ref:`relationship_config_toplevel`
+
+* :ref:`session_toplevel`
+
+* :doc:`/orm/loading_objects`
+
+
+
+
+
diff --git a/doc/build/tutorial/index.rst b/doc/build/tutorial/index.rst
new file mode 100644
index 000000000..8547e7f1d
--- /dev/null
+++ b/doc/build/tutorial/index.rst
@@ -0,0 +1,165 @@
+.. |tutorial_title| replace:: SQLAlchemy 1.4 / 2.0 Tutorial
+.. |next| replace:: :doc:`engine`
+
+.. footer_topic:: |tutorial_title|
+
+ Next Section: |next|
+
+.. _unified_tutorial:
+
+.. rst-class:: orm_core
+
+=============================
+SQLAlchemy 1.4 / 2.0 Tutorial
+=============================
+
+.. admonition:: About this document
+
+ The new SQLAlchemy Tutorial is now integrated between Core and ORM and
+ serves as a unified introduction to SQLAlchemy as a whole. In the new
+ :term:`2.0 style` of working, fully available in the :ref:`1.4 release
+ <migration_14_toplevel>`, the ORM now uses Core-style querying with the
+ :func:`_sql.select` construct, and transactional semantics between Core
+ connections and ORM sessions are equivalent. Take note of the blue
+ border styles for each section, that will tell you how "ORM-ish" a
+ particular topic is!
+
+ Users who are already familiar with SQLAlchemy, and especially those
+ looking to migrate existing applications to work under SQLAlchemy 2.0
+ within the 1.4 transitional phase should check out the
+ :ref:`migration_20_toplevel` document as well.
+
+ For the newcomer, this document has a **lot** of detail, however at the
+ end they will be considered an **Alchemist**.
+
+SQLAlchemy is presented as two distinct APIs, one building on top of the other.
+These APIs are known as **Core** and **ORM**.
+
+.. container:: core-header
+
+ **SQLAlchemy Core** is the foundational architecture for SQLAlchemy as a
+ "database toolkit". The library provides tools for managing connectivity
+ to a database, interacting with database queries and results, and
+ programmatic construction of SQL statements.
+
+ Sections that have a **dark blue border on the right** will discuss
+ concepts that are **primarily Core-only**; when using the ORM, these
+ concepts are still in play but are less often explicit in user code.
+
+.. container:: orm-header
+
+ **SQLAlchemy ORM** builds upon the Core to provide optional **object
+ relational mapping** capabilities. The ORM provides an additional
+ configuration layer allowing user-defined Python classes to be **mapped**
+ to database tables and other constructs, as well as an object persistence
+ mechanism known as the **Session**. It then extends the Core-level
+ SQL Expression Language to allow SQL queries to be composed and invoked
+ in terms of user-defined objects.
+
+ Sections that have a **light blue border on the left** will discuss
+ concepts that are **primarily ORM-only**. Core-only users
+ can skip these.
+
+.. container:: core-header, orm-dependency
+
+ A section that has **both light and dark borders on both sides** will
+ discuss a **Core concept that is also used explicitly with the ORM**.
+
+
+Tutorial Overview
+=================
+
+The tutorial will present both concepts in the natural order that they
+should be learned, first with a mostly-Core-centric approach and then
+spanning out into a more ORM-centric concepts.
+
+The major sections of this tutorial are as follows:
+
+.. toctree::
+ :hidden:
+ :maxdepth: 10
+
+ engine
+ dbapi_transactions
+ metadata
+ data
+ orm_data_manipulation
+ orm_related_objects
+ further_reading
+
+* :ref:`tutorial_engine` - all SQLAlchemy applications start with an
+ :class:`_engine.Engine` object; here's how to create one.
+
+* :ref:`tutorial_working_with_transactions` - the usage API of the
+ :class:`_engine.Engine` and it's related objects :class:`_engine.Connection`
+ and :class:`_result.Result` are presented here. This content is Core-centric
+ however ORM users will want to be familiar with at least the
+ :class:`_result.Result` object.
+
+* :ref:`tutorial_working_with_metadata` - SQLAlchemy's SQL abstractions as well
+ as the ORM rely upon a system of defining database schema constructs as
+ Python objects. This section introduces how to do that from both a Core and
+ an ORM perspective.
+
+* :ref:`tutorial_working_with_data` - here we learn how to create, select,
+ update and delete data in the database. The so-called :term:`CRUD`
+ operations here are given in terms of SQLAlchemy Core with links out towards
+ their ORM counterparts. The SELECT operation is deeply introduced at
+ :ref:`tutorial_selecting_data` applies equally well to Core and ORM.
+
+* :ref:`tutorial_orm_data_manipulation` covers the persistence framework of the
+ ORM; basically the ORM-centric ways to insert, update and delete, as well as
+ how to handle transactions.
+
+* :ref:`tutorial_orm_related_objects` introduces the concept of the
+ :func:`_orm.relationship` construct and provides a brief overview
+ of how it's used, with links to deeper documentation.
+
+* :ref:`tutorial_further_reading` lists a series of major top-level
+ documentation sections which fully document the concepts introduced in this
+ tutorial.
+
+
+.. rst-class:: core-header, orm-dependency
+
+Version Check
+-------------
+
+This tutorial is written using a system called `doctest
+<https://docs.python.org/3/library/doctest.html>`_. All of the code excerpts
+written with a ``>>>`` are actually run as part of SQLAlchemy's test suite, and
+the reader is invited to work with the code examples given in real time with
+their own Python interpreter.
+
+If running the examples, it is advised that the reader perform quick check to
+verify that we are on **version 1.4** of SQLAlchemy:
+
+.. sourcecode:: pycon+sql
+
+ >>> import sqlalchemy
+ >>> sqlalchemy.__version__ # doctest: +SKIP
+ 1.4.0
+
+.. rst-class:: core-header, orm-dependency
+
+A Note on the Future
+---------------------
+
+This tutorial describes a new API that's released in SQLAlchemy 1.4 known
+as :term:`2.0 style`. The purpose of the 2.0-style API is to provide forwards
+compatibility with :ref:`SQLAlchemy 2.0 <migration_20_toplevel>`, which is
+planned as the next generation of SQLAlchemy.
+
+In order to provide the full 2.0 API, a new flag called ``future`` will be
+used, which will be seen as the tutorial describes the :class:`_engine.Engine`
+and :class:`_orm.Session` objects. These flags fully enable 2.0-compatibility
+mode and allow the code in the tutorial to proceed fully. When using the
+``future`` flag with the :func:`_sa.create_engine` function, the object
+returned is a sublass of :class:`sqlalchemy.engine.Engine` described as
+:class:`sqlalchemy.future.Engine`. This tutorial will be referring to
+:class:`sqlalchemy.future.Engine`.
+
+
+
+
+
diff --git a/doc/build/tutorial/metadata.rst b/doc/build/tutorial/metadata.rst
new file mode 100644
index 000000000..969536c01
--- /dev/null
+++ b/doc/build/tutorial/metadata.rst
@@ -0,0 +1,526 @@
+.. |prev| replace:: :doc:`dbapi_transactions`
+.. |next| replace:: :doc:`data`
+
+.. include:: tutorial_nav_include.rst
+
+.. _tutorial_working_with_metadata:
+
+Working with Database Metadata
+==============================
+
+With engines and SQL execution down, we are ready to begin some Alchemy.
+The central element of both SQLAlchemy Core and ORM is the SQL Expression
+Language which allows for fluent, composable construction of SQL queries.
+The foundation for these queries are Python objects that represent database
+concepts like tables and columns. These objects are known collectively
+as :term:`database metadata`.
+
+The most common foundational objects for database metadata in SQLAlchemy are
+known as :class:`_schema.MetaData`, :class:`_schema.Table`, and :class:`_schema.Column`.
+The sections below will illustrate how these objects are used in both a
+Core-oriented style as well as an ORM-oriented style.
+
+.. container:: orm-header
+
+ **ORM readers, stay with us!**
+
+ As with other sections, Core users can skip the ORM sections, but ORM users
+ would best be familiar with these objects from both perspectives.
+
+
+.. rst-class:: core-header
+
+.. _tutorial_core_metadata:
+
+Setting up MetaData with Table objects
+---------------------------------------
+
+When we work with a relational database, the basic structure that we create and
+query from is known as a **table**. In SQLAlchemy, the "table" is represented
+by a Python object similarly named :class:`_schema.Table`.
+
+To start using the SQLAlchemy Expression Language,
+we will want to have :class:`_schema.Table` objects constructed that represent
+all of the database tables we are interested in working with. Each
+:class:`_schema.Table` may be **declared**, meaning we explicitly spell out
+in source code what the table looks like, or may be **reflected**, which means
+we generate the object based on what's already present in a particular database.
+The two approaches can also be blended in many ways.
+
+Whether we will declare or reflect our tables, we start out with a collection
+that will be where we place our tables known as the :class:`_schema.MetaData`
+object. This object is essentially a :term:`facade` around a Python dictionary
+that stores a series of :class:`_schema.Table` objects keyed to their string
+name. Constructing this object looks like::
+
+ >>> from sqlalchemy import MetaData
+ >>> metadata = MetaData()
+
+Having a single :class:`_schema.MetaData` object for an entire application is
+the most common case, represented as a module-level variable in a single place
+in an application, often in a "models" or "dbschema" type of package. There
+can be multiple :class:`_schema.MetaData` collections as well, however
+it's typically most helpful if a series :class:`_schema.Table` objects that are
+related to each other belong to a single :class:`_schema.MetaData` collection.
+
+
+Once we have a :class:`_schema.MetaData` object, we can declare some
+:class:`_schema.Table` objects. This tutorial will start with the classic
+SQLAlchemy tutorial model, that of the table ``user``, which would for
+example represent the users of a website, and the table ``address``,
+representing a list of email addresses associated with rows in the ``user``
+table. We normally assign each :class:`_schema.Table` object to a variable
+that will be how we will refer to the table in application code::
+
+ >>> from sqlalchemy import Table, Column, Integer, String
+ >>> user_table = Table(
+ ... "user_account",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('name', String(30)),
+ ... Column('fullname', String)
+ ... )
+
+We can observe that the above :class:`_schema.Table` construct looks a lot like
+a SQL CREATE TABLE statement; starting with the table name, then listing out
+each column, where each column has a name and a datatype. The objects we
+use above are:
+
+* :class:`_schema.Table` - represents a database table and assigns itself
+ to a :class:`_schema.MetaData` collection.
+
+* :class:`_schema.Column` - represents a column in a database table, and
+ assigns itself to a :class:`_schema.Table` object. The :class:`_schema.Column`
+ usually includes a string name and a type object. The collection of
+ :class:`_schema.Column` objects in terms of the parent :class:`_schema.Table`
+ are typically accessed via an associative array located at :attr:`_schema.Table.c`::
+
+ >>> user_table.c.name
+ Column('name', String(length=30), table=<user_account>)
+
+ >>> user_table.c.keys()
+ ['id', 'name', 'fullname']
+
+* :class:`_types.Integer`, :class:`_types.String` - these classes represent
+ SQL datatypes and can be passed to a :class:`_schema.Column` with or without
+ necessarily being instantiated. Above, we want to give a length of "30" to
+ the "name" column, so we instantiated ``String(30)``. But for "id" and
+ "fullname" we did not specify these, so we can send the class itself.
+
+.. seealso::
+
+ The reference and API documentation for :class:`_schema.MetaData`,
+ :class:`_schema.Table` and :class:`_schema.Column` is at :ref:`metadata_toplevel`.
+ The reference documentation for datatypes is at :ref:`types_toplevel`.
+
+In an upcoming section, we will illustrate one of the fundamental
+functions of :class:`_schema.Table` which
+is to generate :term:`DDL` on a particular database connection. But first
+we will declare a second :class:`_schema.Table`.
+
+.. rst-class:: core-header
+
+Declaring Simple Constraints
+-----------------------------
+
+The first :class:`_schema.Column` in the above ``user_table`` includes the
+:paramref:`_schema.Column.primary_key` parameter which is a shorthand technique
+of indicating that this :class:`_schema.Column` should be part of the primary
+key for this table. The primary key itself is normally declared implicitly
+and is represented by the :class:`_schema.PrimaryKeyConstraint` construct,
+which we can see on the :attr:`_schema.Table.primary_key`
+attribute on the :class:`_schema.Table` object::
+
+ >>> user_table.primary_key
+ PrimaryKeyConstraint(Column('id', Integer(), table=<user_account>, primary_key=True, nullable=False))
+
+The constraint that is most typically declared explicitly is the
+:class:`_schema.ForeignKeyConstraint` object that corresponds to a database
+:term:`foreign key constraint`. When we declare tables that are related to
+each other, SQLAlchemy uses the presence of these foreign key constraint
+declarations not only so that they are emitted within CREATE statements to
+the database, but also to assist in constructing SQL expressions.
+
+A :class:`_schema.ForeignKeyConstraint` that involves only a single column
+on the target table is typically declared using a column-level shorthand notation
+via the :class:`_schema.ForeignKey` object. Below we declare a second table
+``address`` that will have a foreign key constraint referring to the ``user``
+table::
+
+ >>> from sqlalchemy import ForeignKey
+ >>> address_table = Table(
+ ... "address",
+ ... metadata,
+ ... Column('id', Integer, primary_key=True),
+ ... Column('user_id', ForeignKey('user_account.id'), nullable=False),
+ ... Column('email_address', String, nullable=False)
+ ... )
+
+The table above also features a third kind of constraint, which in SQL is the
+"NOT NULL" constraint, indicated above using the :paramref:`_schema.Column.nullable`
+parameter.
+
+.. tip:: When using the :class:`_schema.ForeignKey` object within a
+ :class:`_schema.Column` definition, we can omit the datatype for that
+ :class:`_schema.Column`; it is automatically inferred from that of the
+ related column, in the above example the :class:`_types.Integer` datatype
+ of the ``user_account.id`` column.
+
+In the next section we will emit the completed DDL for the ``user`` and
+``address`` table to see the completed result.
+
+.. rst-class:: core-header, orm-dependency
+
+
+.. _tutorial_emitting_ddl:
+
+Emitting DDL to the Database
+----------------------------
+
+We've constructed a fairly elaborate object hierarchy to represent
+two database tables, starting at the root :class:`_schema.MetaData`
+object, then into two :class:`_schema.Table` objects, each of which hold
+onto a collection of :class:`_schema.Column` and :class:`_schema.Constraint`
+objects. This object structure will be at the center of most operations
+we perform with both Core and ORM going forward.
+
+The first useful thing we can do with this structure will be to emit CREATE
+TABLE statements, or :term:`DDL`, to our SQLite database so that we can insert
+and query data from them. We have already all the tools needed to do so, by
+invoking the
+:meth:`_schema.MetaData.create_all` method on our :class:`_schema.MetaData`,
+sending it the :class:`_future.Engine` that refers to the target database:
+
+.. sourcecode:: pycon+sql
+
+ >>> metadata.create_all(engine)
+ {opensql}BEGIN (implicit)
+ PRAGMA main.table_...info("user_account")
+ ...
+ PRAGMA main.table_...info("address")
+ ...
+ CREATE TABLE user_account (
+ id INTEGER NOT NULL,
+ name VARCHAR(30),
+ fullname VARCHAR,
+ PRIMARY KEY (id)
+ )
+ ...
+ CREATE TABLE address (
+ id INTEGER NOT NULL,
+ user_id INTEGER NOT NULL,
+ email_address VARCHAR NOT NULL,
+ PRIMARY KEY (id),
+ FOREIGN KEY(user_id) REFERENCES user_account (id)
+ )
+ ...
+ COMMIT
+
+The DDL create process by default includes some SQLite-specific PRAGMA statements
+that test for the existence of each table before emitting a CREATE. The full
+series of steps are also included within a BEGIN/COMMIT pair to accommodate
+for transactional DDL (SQLite does actually support transactional DDL, however
+the ``sqlite3`` database driver historically runs DDL in "autocommit" mode).
+
+The create process also takes care of emitting CREATE statements in the correct
+order; above, the FOREIGN KEY constraint is dependent on the ``user`` table
+existing, so the ``address`` table is created second. In more complicated
+dependency scenarios the FOREIGN KEY constraints may also be applied to tables
+after the fact using ALTER.
+
+The :class:`_schema.MetaData` object also features a
+:meth:`_schema.MetaData.drop_all` method that will emit DROP statements in the
+reverse order as it would emit CREATE in order to drop schema elements.
+
+.. topic:: Migration tools are usually appropriate
+
+ Overall, the CREATE / DROP feature of :class:`_schema.MetaData` is useful
+ for test suites, small and/or new applications, and applications that use
+ short-lived databases. For management of an application database schema
+ over the long term however, a schema management tool such as `Alembic
+ <https://alembic.sqlalchemy.org>`_, which builds upon SQLAlchemy, is likely
+ a better choice, as it can manage and orchestrate the process of
+ incrementally altering a fixed database schema over time as the design of
+ the application changes.
+
+
+.. rst-class:: orm-header
+
+.. _tutorial_orm_table_metadata:
+
+Defining Table Metadata with the ORM
+------------------------------------
+
+This ORM-only section will provide an example of the declaring the
+same database structure illustrated in the previous section, using a more
+ORM-centric configuration paradigm. When using
+the ORM, the process by which we declare :class:`_schema.Table` metadata
+is usually combined with the process of declaring :term:`mapped` classes.
+The mapped class is any Python class we'd like to create, which will then
+have attributes on it that will be linked to the columns in a database table.
+While there are a few varieties of how this is achieved, the most common
+style is known as
+:ref:`declarative <orm_declarative_mapper_config_toplevel>`, and allows us
+to declare our user-defined classes and :class:`_schema.Table` metadata
+at once.
+
+Setting up the Registry
+^^^^^^^^^^^^^^^^^^^^^^^
+
+When using the ORM, the :class:`_schema.MetaData` collection remains present,
+however it itself is contained within an ORM-only object known as the
+:class:`_orm.registry`. We create a :class:`_orm.registry` by constructing
+it::
+
+ >>> from sqlalchemy.orm import registry
+ >>> mapper_registry = registry()
+
+The above :class:`_orm.registry`, when constructed, automatically includes
+a :class:`_schema.MetaData` object that will store a collection of
+:class:`_schema.Table` objects::
+
+ >>> mapper_registry.metadata
+ MetaData()
+
+Instead of declaring :class:`_schema.Table` objects directly, we will now
+declare them indirectly through directives applied to our mapped classes. In
+the most common approach, each mapped class descends from a common base class
+known as the **declarative base**. We get a new declarative base from the
+:class:`_orm.registry` using the :meth:`_orm.registry.generate_base` method::
+
+ >>> Base = mapper_registry.generate_base()
+
+.. tip::
+
+ The steps of creating the :class:`_orm.registry` and "declarative base"
+ classes can be combined into one step using the historically familiar
+ :func:`_orm.declarative_base` function::
+
+ from sqlalchemy.orm import declarative_base
+ Base = declarative_base()
+
+ ..
+
+.. _tutorial_declaring_mapped_classes:
+
+Declaring Mapped Classes
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ``Base`` object above is a Python class which will serve as the base class
+for the ORM mapped classes we declare. We can now define ORM mapped classes
+for the ``user`` and ``address`` table in terms of new classes ``User`` and
+``Address``::
+
+ >>> from sqlalchemy.orm import relationship
+ >>> class User(Base):
+ ... __tablename__ = 'user_account'
+ ...
+ ... id = Column(Integer, primary_key=True)
+ ... name = Column(String(30))
+ ... fullname = Column(String)
+ ...
+ ... addresses = relationship("Address", back_populates="user")
+ ...
+ ... def __repr__(self):
+ ... return f"User(id={self.id!r}, name={self.name!r}, fullname={self.fullname!r})"
+
+ >>> class Address(Base):
+ ... __tablename__ = 'address'
+ ...
+ ... id = Column(Integer, primary_key=True)
+ ... email_address = Column(String, nullable=False)
+ ... user_id = Column(Integer, ForeignKey('user_account.id'))
+ ...
+ ... user = relationship("User", back_populates="addresses")
+ ...
+ ... def __repr__(self):
+ ... return f"Address(id={self.id!r}, email_address={self.email_address!r})"
+
+The above two classes are now our mapped classes, and are available for use in
+ORM persistence and query operations, which will be described later. But they
+also include :class:`_schema.Table` objects that were generated as part of the
+declarative mapping process, and are equivalent to the ones that we declared
+directly in the previous Core section. We can see these
+:class:`_schema.Table` objects from a declarative mapped class using the
+``.__table__`` attribute::
+
+ >>> User.__table__
+ Table('user_account', MetaData(),
+ Column('id', Integer(), table=<user_account>, primary_key=True, nullable=False),
+ Column('name', String(length=30), table=<user_account>),
+ Column('fullname', String(), table=<user_account>), schema=None)
+
+This :class:`_schema.Table` object was generated from the declarative process
+based on the ``.__tablename__`` attribute defined on each of our classes,
+as well as through the use of :class:`_schema.Column` objects assigned
+to class-level attributes within the classes. These :class:`_schema.Column`
+objects can usually be declared without an explicit "name" field inside
+the constructor, as the Declarative process will name them automatically
+based on the attribute name that was used.
+
+Other Mapped Class Details
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For a few quick explanations for the classes above, note the following
+attributes:
+
+* **the classes have an automatically generated __init__() method** - both classes by default
+ receive an ``__init__()`` method that allows for parameterized construction
+ of the objects. We are free to provide our own ``__init__()`` method as well.
+ The ``__init__()`` allows us to create instances of ``User`` and ``Address``
+ passing attribute names, most of which above are linked directly to
+ :class:`_schema.Column` objects, as parameter names::
+
+ >>> sandy = User(name="sandy", fullname="Sandy Cheeks")
+
+ More detail on this method is at :ref:`mapped_class_default_constructor`.
+
+ ..
+
+* **we provided a __repr__() method** - this is **fully optional**, and is
+ strictly so that our custom classes have a descriptive string representation
+ and is not otherwise required::
+
+ >>> sandy
+ User(id=None, name='sandy', fullname='Sandy Cheeks')
+
+ ..
+
+ An interesting thing to note above is that the ``id`` attribute automatically
+ returns ``None`` when accessed, rather than raising ``AttributeError`` as
+ would be the usual Python behavior for missing attributes.
+
+* **we also included a bidirectional relationship** - this is another **fully optional**
+ construct, where we made use of an ORM construct called
+ :func:`_orm.relationship` on both classes, which indicates to the ORM that
+ these ``User`` and ``Address`` classes refer to each other in a :term:`one to
+ many` / :term:`many to one` relationship. The use of
+ :func:`_orm.relationship` above is so that we may demonstrate its behavior
+ later in this tutorial; it is **not required** in order to define the
+ :class:`_schema.Table` structure.
+
+
+Emitting DDL to the database
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This section is named the same as the section :ref:`tutorial_emitting_ddl`
+discussed in terms of Core. This is because emitting DDL with our
+ORM mapped classes is not any different. If we wanted to emit DDL
+for the :class:`_schema.Table` objects we've created as part of
+our declaratively mapped classes, we still can use
+:meth:`_schema.MetaData.create_all` as before.
+
+In our case, we have already generated the ``user`` and ``address`` tables
+in our SQLite database. If we had not done so already, we would be free to
+make use of the :class:`_schema.MetaData` associated with our
+:class:`_orm.registry` and ORM declarative base class in order to do so,
+using :meth:`_schema.MetaData.create_all`::
+
+ # emit CREATE statements given ORM registry
+ mapper_registry.metadata.create_all(engine)
+
+ # the identical MetaData object is also present on the
+ # declarative base
+ Base.metadata.create_all(engine)
+
+
+Combining Core Table Declarations with ORM Declarative
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As an alternative approach to the mapping process shown previously
+at :ref:`tutorial_declaring_mapped_classes`, we may also make
+use of the :class:`_schema.Table` objects we created directly in the section
+:ref:`tutorial_core_metadata` in conjunction with
+declarative mapped classes from a :func:`_orm.declarative_base` generated base
+class.
+
+This form is called :ref:`hybrid table <orm_imperative_table_configuration>`,
+and it consists of assigning to the ``.__table__`` attribute directly, rather
+than having the declarative process generate it::
+
+ class User(Base):
+ __table__ = user_table
+
+ addresses = relationship("Address", back_populates="user")
+
+ def __repr__(self):
+ return f"User({self.name!r}, {self.fullname!r})"
+
+ class Address(Base):
+ __table__ = address_table
+
+ user = relationship("User", back_populates="addresses")
+
+ def __repr__(self):
+ return f"Address({self.email_address!r})"
+
+The above two classes are equivalent to those which we declared in the
+previous mapping example.
+
+The traditional "declarative base" approach using ``__tablename__`` to
+automatically generate :class:`_schema.Table` objects remains the most popular
+method to declare table metadata. However, disregarding the ORM mapping
+functionality it achieves, as far as table declaration it's merely a syntactical
+convenience on top of the :class:`_schema.Table` constructor.
+
+We will next refer to our ORM mapped classes above when we talk about data
+manipulation in terms of the ORM, in the section :ref:`tutorial_inserting_orm`.
+
+
+.. rst-class:: core-header
+
+.. _tutorial_table_reflection:
+
+Table Reflection
+-------------------------------
+
+To round out the section on working with table metadata, we will illustrate
+another operation that was mentioned at the beginning of the section,
+that of **table reflection**. Table reflection refers to the process of
+generating :class:`_schema.Table` and related objects by reading the current
+state of a database. Whereas in the previous sections we've been declaring
+:class:`_schema.Table` objects in Python and then emitting DDL to the database,
+the reflection process does it in reverse.
+
+As an example of reflection, we will create a new :class:`_schema.Table`
+object which represents the ``some_table`` object we created manually in
+the earler sections of this document. There are again some varieties of
+how this is performed, however the most basic is to construct a
+:class:`_schema.Table` object, given the name of the table and a
+:class:`_schema.MetaData` collection to which it will belong, then
+instead of indicating individual :class:`_schema.Column` and
+:class:`_schema.Constraint` objects, pass it the target :class:`_future.Engine`
+using the :paramref:`_schema.Table.autoload_with` parameter:
+
+.. sourcecode:: pycon+sql
+
+ >>> some_table = Table("some_table", metadata, autoload_with=engine)
+ {opensql}BEGIN (implicit)
+ PRAGMA main.table_...info("some_table")
+ [raw sql] ()
+ SELECT sql FROM (SELECT * FROM sqlite_master UNION ALL SELECT * FROM sqlite_temp_master) WHERE name = ? AND type = 'table'
+ [raw sql] ('some_table',)
+ PRAGMA main.foreign_key_list("some_table")
+ ...
+ PRAGMA main.index_list("some_table")
+ ...
+ ROLLBACK{stop}
+
+At the end of the process, the ``some_table`` object now contains the
+information about the :class:`_schema.Column` objects present in the table, and
+the object is usable in exactly the same way as a :class:`_schema.Table` that
+we declared explicitly.::
+
+ >>> some_table
+ Table('some_table', MetaData(),
+ Column('x', INTEGER(), table=<some_table>),
+ Column('y', INTEGER(), table=<some_table>),
+ schema=None)
+
+.. seealso::
+
+ Read more about table and schema reflection at :ref:`metadata_reflection_toplevel`.
+
+ For ORM-related variants of table reflection, the section
+ :ref:`orm_declarative_reflected` includes an overview of the available
+ options.
diff --git a/doc/build/tutorial/orm_data_manipulation.rst b/doc/build/tutorial/orm_data_manipulation.rst
new file mode 100644
index 000000000..469d1096b
--- /dev/null
+++ b/doc/build/tutorial/orm_data_manipulation.rst
@@ -0,0 +1,577 @@
+.. |prev| replace:: :doc:`data`
+.. |next| replace:: :doc:`orm_related_objects`
+
+.. include:: tutorial_nav_include.rst
+
+.. rst-class:: orm-header
+
+.. _tutorial_orm_data_manipulation:
+
+Data Manipulation with the ORM
+==============================
+
+The previous section :ref:`tutorial_working_with_data` remained focused on
+the SQL Expression Language from a Core perspective, in order to provide
+continuity across the major SQL statement constructs. This section will
+then build out the lifecycle of the :class:`_orm.Session` and how it interacts
+with these constructs.
+
+**Prerequisite Sections** - the ORM focused part of the tutorial builds upon
+two previous ORM-centric sections in this document:
+
+* :ref:`tutorial_executing_orm_session` - introduces how to make an ORM :class:`_orm.Session` object
+
+* :ref:`tutorial_orm_table_metadata` - where we set up our ORM mappings of the ``User`` and ``Address`` entities
+
+* :ref:`tutorial_selecting_orm_entities` - a few examples on how to run SELECT statements for entities like ``User``
+
+.. _tutorial_inserting_orm:
+
+Inserting Rows with the ORM
+---------------------------
+
+When using the ORM, the :class:`_orm.Session` object is responsible for
+constructing :class:`_sql.Insert` constructs and emitting them for us in a
+transaction. The way we instruct the :class:`_orm.Session` to do so is by
+**adding** object entries to it; the :class:`_orm.Session` then makes sure
+these new entries will be emitted to the database when they are needed, using
+a process known as a **flush**.
+
+Instances of Classes represent Rows
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Whereas in the previous example we emitted an INSERT using Python dictionaries
+to indicate the data we wanted to add, with the ORM we make direct use of the
+custom Python classes we defined, back at
+:ref:`tutorial_orm_table_metadata`. At the class level, the ``User`` and
+``Address`` classes served as a place to define what the corresponding
+database tables should look like. These classes also serve as extensible
+data objects that we use to create and manipulate rows within a transaction
+as well. Below we will create two ``User`` objects each representing a
+potential database row to be INSERTed::
+
+ >>> squidward = User(name="squidward", fullname="Squidward Tentacles")
+ >>> krabs = User(name="ehkrabs", fullname="Eugene H. Krabs")
+
+We are able to construct these objects using the names of the mapped columns as
+keyword arguments in the constructor. This is possible as the ``User`` class
+includes an automatically generated ``__init__()`` constructor that was
+provided by the ORM mapping so that we could create each object using column
+names as keys in the constructor.
+
+In a similar manner as in our Core examples of :class:`_sql.Insert`, we did not
+include a primary key (i.e. an entry for the ``id`` column), since we would
+like to make use of SQLite's auto-incrementing primary key feature which the
+ORM also integrates with. The value of the ``id`` attribute on the above
+objects, if we were to view it, displays itself as ``None``::
+
+ >>> squidward
+ User(id=None, name='squidward', fullname='Squidward Tentacles')
+
+The ``None`` value is provided by SQLAlchemy to indicate that the attribute
+has no value as of yet. SQLAlchemy-mapped attributes always return a value
+in Python and don't raise ``AttributeError`` if they're missing, when
+dealing with a new object that has not had a value assigned.
+
+At the moment, our two objects above are said to be in a state called
+:term:`transient` - they are not associated with any database state and are yet
+to be associated with a :class:`_orm.Session` object that can generate
+INSERT statements for them.
+
+Adding objects to a Session
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To illustrate the addition process step by step, we will create a
+:class:`_orm.Session` without using a context manager (and hence we must
+make sure we close it later!)::
+
+ >>> session = Session(engine)
+
+The objects are then added to the :class:`_orm.Session` using the
+:meth:`_orm.Session.add` method. When this is called, the objects are in a
+state known as :term:`pending` and have not been inserted yet::
+
+ >>> session.add(squidward)
+ >>> session.add(krabs)
+
+When we have pending objects, we can see this state by looking at a
+collection on the :class:`_orm.Session` called :attr:`_orm.Session.new`::
+
+ >>> session.new
+ IdentitySet([User(id=None, name='squidward', fullname='Squidward Tentacles'), User(id=None, name='ehkrabs', fullname='Eugene H. Krabs')])
+
+The above view is using a collection called :class:`.IdentitySet` that is
+essentially a Python set that hashes on object identity in all cases (i.e.,
+using Python built-in ``id()`` function, rather than the Python ``hash()`` function).
+
+Flushing
+^^^^^^^^
+
+The :class:`_orm.Session` makes use of a pattern known as :term:`unit of work`.
+This generally means it accumulates changes one at a time, but does not actually
+communicate them to the database until needed. This allows it to make
+better decisions about how SQL DML should be emitted in the transaction based
+on a given set of pending changes. When it does emit SQL to the database
+to push out the current set of changes, the process is known as a **flush**.
+
+We can illustrate the flush process manually by calling the :meth:`_orm.Session.flush`
+method:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.flush()
+ {opensql}BEGIN (implicit)
+ INSERT INTO user_account (name, fullname) VALUES (?, ?)
+ [...] ('squidward', 'Squidward Tentacles')
+ INSERT INTO user_account (name, fullname) VALUES (?, ?)
+ [...] ('ehkrabs', 'Eugene H. Krabs')
+
+Above we observe the :class:`_orm.Session` was first called upon to emit
+SQL, so it created a new transaction and emitted the appropriate INSERT
+statements for the two objects. The transaction now **remains open**
+until we call the :meth:`_orm.Session.commit` method.
+
+While :meth:`_orm.Session.flush` may be used to manually push out pending
+changes to the current transaction, it is usually unnecessary as the
+:class:`_orm.Session` features a behavior known as **autoflush**, which
+we will illustrate later. It also flushes out changes whenever
+:meth:`_orm.Session.commit` is called.
+
+
+Autogenerated primary key attributes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Once the rows are inserted, the two Python objects we've created are in a
+state known as :term:`persistent`, where they are associated with the
+:class:`_orm.Session` object in which they were added or loaded, and feature lots of
+other behaviors that will be covered later.
+
+Another effect of the INSERT that occurred was that the ORM has retrieved the
+new primary key identifiers for each new object; internally it normally uses
+the same :attr:`_engine.CursorResult.inserted_primary_key` accessor we
+introduced previously. The ``squidward`` and ``krabs`` objects now have these new
+primary key identifiers associated with them and we can view them by acesssing
+the ``id`` attribute::
+
+ >>> squidward.id
+ 4
+ >>> krabs.id
+ 5
+
+.. tip:: Why did the ORM emit two separate INSERT statements when it could have
+ used :ref:`executemany <tutorial_multiple_parameters>`? As we'll see in the
+ next section, the
+ :class:`_orm.Session` when flushing objects always needs to know the
+ primary key of newly inserted objects. If a feature such as SQLite's autoincrement is used
+ (other examples include PostgreSQL IDENTITY or SERIAL, using sequences,
+ etc.), the :attr:`_engine.CursorResult.inserted_primary_key` feature
+ usually requires that each INSERT is emitted one row at a time. If we had provided values for the primary keys ahead of
+ time, the ORM would have been able to optimize the operation better. Some
+ database backends such as :ref:`psycopg2 <postgresql_psycopg2>` can also
+ INSERT many rows at once while still being able to retrieve the primary key
+ values.
+
+Identity Map
+^^^^^^^^^^^^
+
+The primary key identity of the objects are significant to the :class:`_orm.Session`,
+as the objects are now linked to this identity in memory using a feature
+known as the :term:`identity map`. The identity map is an in-memory store
+that links all objects currently loaded in memory to their primary key
+identity. We can observe this by retrieving one of the above objects
+using the :meth:`_orm.Session.get` method, which will return an entry
+from the identity map if locally present, otherwise emitting a SELECT::
+
+ >>> some_squidward = session.get(User, 4)
+ >>> some_squidward
+ User(id=4, name='squidward', fullname='Squidward Tentacles')
+
+The important thing to note about the identity map is that it maintains a
+**unique instance** of a particular Python object per a particular database
+identity, within the scope of a particular :class:`_orm.Session` object. We
+may observe that the ``some_squidward`` refers to the **same object** as that
+of ``squidward`` previously::
+
+ >>> some_squidward is squidward
+ True
+
+The identity map is a critical feature that allows complex sets of objects
+to be manipulated within a transaction without things getting out of sync.
+
+
+Committing
+^^^^^^^^^^^
+
+There's much more to say about how the :class:`_orm.Session` works which will
+be discussed further. For now we will commit the transaction so that
+we can build up knowledge on how to SELECT rows before examining more ORM
+behaviors and features:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.commit()
+ COMMIT
+
+.. _tutorial_orm_updating:
+
+Updating ORM Objects
+--------------------
+
+In the preceding section :ref:`tutorial_core_update_delete`, we introduced the
+:class:`_sql.Update` construct that represents a SQL UPDATE statement. When
+using the ORM, there are two ways in which this construct is used. The primary
+way is that it is emitted automatically as part of the :term:`unit of work`
+process used by the :class:`_orm.Session`, where an UPDATE statement is emitted
+on a per-primary key basis corresponding to individual objects that have
+changes on them. A second form of ORM enabled UPDATE is called an "ORM enabled
+UPDATE" and allows us to use the :class:`_sql.Update` construct with the
+:class:`_orm.Session` explicitly; this is described in the next section.
+
+Supposing we loaded the ``User`` object for the username ``sandy`` into
+a transaction (also showing off the :meth:`_sql.Select.filter_by` method
+as well as the :meth:`_engine.Result.scalar_one` method):
+
+.. sourcecode:: pycon+sql
+
+ {sql}>>> sandy = session.execute(select(User).filter_by(name="sandy")).scalar_one()
+ BEGIN (implicit)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('sandy',)
+
+The Python object ``sandy`` as mentioned before acts as a **proxy** for the
+row in the database, more specifically the database row **in terms of the
+current transaction**, that has the primary key identity of ``2``::
+
+ >>> sandy
+ User(id=2, name='sandy', fullname='Sandy Cheeks')
+
+If we alter the attributes of this object, the :class:`_orm.Session` tracks
+this change::
+
+ >>> sandy.fullname = "Sandy Squirrel"
+
+The object appears in a collection called :attr:`_orm.Session.dirty`, indicating
+the object is "dirty"::
+
+ >>> sandy in session.dirty
+ True
+
+When the :class:`_orm.Session` next emits a flush, an UPDATE will be emitted
+that updates this value in the database. As mentioned previously, a flush
+occurs automatically before we emit any SELECT, using a behavior known as
+**autoflush**. We can query directly for the ``User.fullname`` column
+from this row and we will get our updated value back:
+
+.. sourcecode:: pycon+sql
+
+ >>> sandy_fullname = session.execute(
+ ... select(User.fullname).where(User.id == 2)
+ ... ).scalar_one()
+ {opensql}UPDATE user_account SET fullname=? WHERE user_account.id = ?
+ [...] ('Sandy Squirrel', 2)
+ SELECT user_account.fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (2,){stop}
+ >>> print(sandy_fullname)
+ Sandy Squirrel
+
+We can see above that we requested that the :class:`_orm.Session` execute
+a single :func:`_sql.select` statement. However the SQL emitted shows
+that an UPDATE were emitted as well, which was the flush process pushing
+out pending changes. The ``sandy`` Python object is now no longer considered
+dirty::
+
+ >>> sandy in session.dirty
+ False
+
+However note we are **still in a transaction** and our changes have not
+been pushed to the database's permanent storage. Since Sandy's last name
+is in fact "Cheeks" not "Squirrel", we will repair this mistake later when
+we roll back the transction. But first we'll make some more data changes.
+
+
+.. seealso::
+
+ :ref:`session_flushing`- details the flush process as well as information
+ about the :paramref:`_orm.Session.autoflush` setting.
+
+.. _tutorial_orm_enabled_update:
+
+ORM-enabled UPDATE statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As previously mentioned, there's a second way to emit UPDATE statements in
+terms of the ORM, which is known as an **ORM enabled UPDATE statement**. This allows the use
+of a generic SQL UPDATE statement that can affect many rows at once. For example
+to emit an UPDATE that will change the ``User.fullname`` column based on
+a value in the ``User.name`` column:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.execute(
+ ... update(User).
+ ... where(User.name == "sandy").
+ ... values(fullname="Sandy Squirrel Extraodinaire")
+ ... )
+ {opensql}UPDATE user_account SET fullname=? WHERE user_account.name = ?
+ [...] ('Sandy Squirrel Extraodinaire', 'sandy'){stop}
+ <sqlalchemy.engine.cursor.CursorResult object ...>
+
+When invoking the ORM-enabled UPDATE statement, special logic is used to locate
+objects in the current session that match the given criteria, so that they
+are refreshed with the new data. Above, the ``sandy`` object identity
+was located in memory and refreshed::
+
+ >>> sandy.fullname
+ 'Sandy Squirrel Extraodinaire'
+
+The refresh logic is known as the ``synchronize_session`` option, and is described
+in detail in the section :ref:`orm_expression_update_delete`.
+
+.. seealso::
+
+ :ref:`orm_expression_update_delete` - describes ORM use of :func:`_sql.update`
+ and :func:`_sql.delete` as well as ORM synchronization options.
+
+
+.. _tutorial_orm_deleting:
+
+
+Deleting ORM Objects
+---------------------
+
+To round out the basic persistence operations, an individual ORM object
+may be marked for deletion by using the :meth:`_orm.Session.delete` method.
+Let's load up ``patrick`` from the database:
+
+.. sourcecode:: pycon+sql
+
+ {sql}>>> patrick = session.get(User, 3)
+ SELECT user_account.id AS user_account_id, user_account.name AS user_account_name,
+ user_account.fullname AS user_account_fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (3,)
+
+If we mark ``patrick`` for deletion, as is the case with other operations,
+nothing actually happens yet until a flush proceeds::
+
+ >>> session.delete(patrick)
+
+Current ORM behavior is that ``patrick`` stays in the :class:`_orm.Session`
+until the flush proceeds, which as mentioned before occurs if we emit a query:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.execute(select(User).where(User.name == "patrick")).first()
+ {opensql}SELECT address.id AS address_id, address.email_address AS address_email_address,
+ address.user_id AS address_user_id
+ FROM address
+ WHERE ? = address.user_id
+ [...] (3,)
+ DELETE FROM user_account WHERE user_account.id = ?
+ [...] (3,)
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('patrick',)
+
+Above, the SELECT we asked to emit was preceded by a DELETE, which indicated
+the pending deletion for ``patrick`` proceeded. There was also a ``SELECT``
+against the ``address`` table, which was prompted by the ORM looking for rows
+in this table which may be related to the target row; this behavior is part of
+a behavior known as :term:`cascade`, and can be tailored to work more
+efficiently by allowing the database to handle related rows in ``address``
+automatically; the section :ref:`cascade_delete` has all the detail on this.
+
+.. seealso::
+
+ :ref:`cascade_delete` - describes how to tune the behavior of
+ :meth:`_orm.Session.delete` in terms of how related rows in other tables
+ should be handled.
+
+Beyond that, the ``patrick`` object instance now being deleted is no longer
+considered to be persistent within the :class:`_orm.Session`, as is shown
+by the containment check::
+
+ >>> patrick in session
+ False
+
+However just like the UPDATEs we made to the ``sandy`` object, every change
+we've made here is local to an ongoing transaction, which won't become
+permanent if we don't commit it. As rolling the transaction back is actually
+more interesting at the moment, we will do that in the next section.
+
+.. _tutorial_orm_enabled_delete:
+
+ORM-enabled DELETE Statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Like UPDATE operations, there is also an ORM-enabled version of DELETE which we can
+illustrate by using the :func:`_sql.delete` construct with
+:meth:`_orm.Session.execute`. It also has a feature by which **non expired**
+objects that match the given deletion criteria will be automatically marked
+as "deleted" in the :class:`_orm.Session`:
+
+.. sourcecode:: pycon+sql
+
+ >>> # refresh the target object for demonstration purposes
+ >>> # only, not needed for the DELETE
+ {sql}>>> squidward = session.get(User, 4)
+ SELECT user_account.id AS user_account_id, user_account.name AS user_account_name,
+ user_account.fullname AS user_account_fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (4,){stop}
+
+ >>> session.execute(delete(User).where(User.name == "squidward"))
+ {opensql}DELETE FROM user_account WHERE user_account.name = ?
+ [...] ('squidward',){stop}
+ <sqlalchemy.engine.cursor.CursorResult object at 0x...>
+
+The ``squidward`` identity, like that of ``patrick``, is now also in a
+deleted state. Note that we had to re-load ``squidward`` above in order
+to demonstrate this; if the object were expired, the DELETE operation
+would not take the time to refresh expired objects just to see that they
+had been deleted::
+
+ >>> squidward in session
+ False
+
+
+
+Rolling Back
+-------------
+
+The :class:`_orm.Session` has a :meth:`_orm.Session.rollback` method that as
+expected emits a ROLLBACK on the SQL connection in progress. However, it also
+has an effect on the objects that are currently associated with the
+:class:`_orm.Session`, in our previous example the Python object ``sandy``.
+While we changed the ``.fullname`` of the ``sandy`` object to read ``"Sandy
+Squirrel"``, we want to roll back this change. Calling
+:meth:`_orm.Session.rollback` will not only roll back the transaction but also
+**expire** all objects currently associated with this :class:`_orm.Session`,
+which will have the effect that they will refresh themselves when next accessed
+using a process known as :term:`lazy loading`:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.rollback()
+ ROLLBACK
+
+To view the "expiration" process more closely, we may observe that the
+Python object ``sandy`` has no state left within its Python ``__dict__``,
+with the exception of a special SQLAlchemy internal state object::
+
+ >>> sandy.__dict__
+ {'_sa_instance_state': <sqlalchemy.orm.state.InstanceState object at 0x...>}
+
+This is the "expired" state; accessing the attribute again will autobegin
+a new transaction and refresh ``sandy`` with the current database row:
+
+.. sourcecode:: pycon+sql
+
+ >>> sandy.fullname
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id AS user_account_id, user_account.name AS user_account_name,
+ user_account.fullname AS user_account_fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (2,){stop}
+ 'Sandy Cheeks'
+
+We may now observe that the full database row was also populated into the
+``__dict__`` of the ``sandy`` object::
+
+ >>> sandy.__dict__ # doctest: +SKIP
+ {'_sa_instance_state': <sqlalchemy.orm.state.InstanceState object at 0x...>,
+ 'id': 2, 'name': 'sandy', 'fullname': 'Sandy Cheeks'}
+
+For deleted objects, when we earlier noted that ``patrick`` was no longer
+in the session, that object's identity is also restored::
+
+ >>> patrick in session
+ True
+
+and of course the database data is present again as well:
+
+
+.. sourcecode:: pycon+sql
+
+ {sql}>>> session.execute(select(User).where(User.name == 'patrick')).scalar_one() is patrick
+ SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.name = ?
+ [...] ('patrick',){stop}
+ True
+
+
+Closing a Session
+------------------
+
+Within the above sections we used a :class:`_orm.Session` object outside
+of a Python context manager, that is, we didn't use the ``with`` statement.
+That's fine, however if we are doing things this way, it's best that we explicitly
+close out the :class:`_orm.Session` when we are done with it:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.close()
+ {opensql}ROLLBACK
+
+Closing the :class:`_orm.Session`, which is what happens when we use it in
+a context manager as well, accomplishes the following things:
+
+* It :term:`releases` all connection resources to the connection pool, cancelling
+ out (e.g. rolling back) any transactions that were in progress.
+
+ This means that when we make use of a session to perform some read-only
+ tasks and then close it, we don't need to explicitly call upon
+ :meth:`_orm.Session.rollback` to make sure the transaction is rolled back;
+ the connection pool handles this.
+
+* It **expunges** all objects from the :class:`_orm.Session`.
+
+ This means that all the Python objects we had loaded for this :class:`_orm.Session`,
+ like ``sandy``, ``patrick`` and ``squidward``, are now in a state known
+ as :term:`detached`. In particular, we will note that objects that were still
+ in an :term:`expired` state, for example due to the call to :meth:`_orm.Session.commit`,
+ are now non-functional, as they don't contain the state of a current row and
+ are no longer associated with any database transaction in which to be
+ refreshed::
+
+ >>> squidward.name
+ Traceback (most recent call last):
+ ...
+ sqlalchemy.orm.exc.DetachedInstanceError: Instance <User at 0x...> is not bound to a Session; attribute refresh operation cannot proceed
+
+ The detached objects can be re-associated with the same, or a new
+ :class:`_orm.Session` using the :meth:`_orm.Session.add` method, which
+ will re-establish their relationship with their particular database row:
+
+ .. sourcecode:: pycon+sql
+
+ >>> session.add(squidward)
+ >>> squidward.name
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id AS user_account_id, user_account.name AS user_account_name, user_account.fullname AS user_account_fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (4,){stop}
+ 'squidward'
+
+ ..
+
+ .. tip::
+
+ Try to avoid using objects in their detached state, if possible. When the
+ :class:`_orm.Session` is closed, clean up references to all the
+ previously attached objects as well. For cases where detached objects
+ are necessary, typically the immediate display of just-committed objects
+ for a web application where the :class:`_orm.Session` is closed before
+ the view is rendered, set the :paramref:`_orm.Session.expire_on_commit`
+ flag to ``False``.
+ ..
diff --git a/doc/build/tutorial/orm_related_objects.rst b/doc/build/tutorial/orm_related_objects.rst
new file mode 100644
index 000000000..120492c28
--- /dev/null
+++ b/doc/build/tutorial/orm_related_objects.rst
@@ -0,0 +1,896 @@
+.. highlight:: pycon+sql
+
+.. |prev| replace:: :doc:`orm_data_manipulation`
+.. |next| replace:: :doc:`further_reading`
+
+.. include:: tutorial_nav_include.rst
+
+.. _tutorial_orm_related_objects:
+
+Working with Related Objects
+=============================
+
+In this section, we will cover one more essential ORM concept, which is that of
+how the ORM interacts with mapped classes that refer to other objects. In the
+section :ref:`tutorial_declaring_mapped_classes`, the mapped class examples
+made use of a construct called :func:`_orm.relationship`. This construct
+defines a linkage between two different mapped classes, or from a mapped class
+to itself, the latter of which is called a **self-referential** relationship.
+
+To describe the basic idea of :func:`_orm.relationship`, first we'll review
+the mapping in short form, omitting the :class:`_schema.Column` mappings
+and other directives:
+
+.. sourcecode:: python
+
+ from sqlalchemy.orm import relationship
+ class User(Base):
+ __tablename__ = 'user_account'
+
+ # ... Column mappings
+
+ addresses = relationship("Address", back_populates="user")
+
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ # ... Column mappings
+
+ user = relationship("User", back_populates="addresses")
+
+
+Above, the ``User`` class now has an attribute ``User.addresses`` and the
+``Address`` class has an attribute ``Address.user``. The
+:func:`_orm.relationship` construct will be used to inspect the table
+relationships between the :class:`_schema.Table` objects that are mapped to the
+``User`` and ``Address`` classes. As the :class:`_schema.Table` object
+representing the
+``address`` table has a :class:`_schema.ForeignKeyConstraint` which refers to
+the ``user_account`` table, the :func:`_orm.relationship` can determine
+unambiguously that there is as :term:`one to many` relationship from
+``User.addresses`` to ``User``; one particular row in the ``user_account``
+table may be referred towards by many rows in the ``address`` table.
+
+All one-to-many relationships naturally correspond to a :term:`many to one`
+relationship in the other direction, in this case the one noted by
+``Address.user``. The :paramref:`_orm.relationship.back_populates` parameter,
+seen above configured on both :func:`_orm.relationship` objects referring to
+the other name, establishes that each of these two :func:`_orm.relationship`
+constructs should be considered to be complimentary to each other; we will see
+how this plays out in the next section.
+
+
+Persisting and Loading Relationships
+-------------------------------------
+
+We can start by illustrating what :func:`_orm.relationship` does to instances
+of objects. If we make a new ``User`` object, we can note that there is a
+Python list when we access the ``.addresses`` element::
+
+ >>> u1 = User(name='pkrabs', fullname='Pearl Krabs')
+ >>> u1.addresses
+ []
+
+This object is a SQLAlchemy-specific version of Python ``list`` which
+has the ability to track and respond to changes made to it. The collection
+also appeared automatically when we accessed the attribute, even though we never assigned it to the object.
+This is similar to the behavior noted at :ref:`tutorial_inserting_orm` where
+it was observed that column-based attributes to which we don't explicitly
+assign a value also display as ``None`` automatically, rather than raising
+an ``AttributeError`` as would be Python's usual behavior.
+
+As the ``u1`` object is still :term:`transient` and the ``list`` that we got
+from ``u1.addresses`` has not been mutated (i.e. appended or extended), it's
+not actually associated with the object yet, but as we make changes to it,
+it will become part of the state of the ``User`` object.
+
+The collection is specific to the ``Address`` class which is the only type
+of Python object that may be persisted within it. Using the ``list.append()``
+method we may add an ``Address`` object::
+
+ >>> a1 = Address(email_address="pearl.krabs@gmail.com")
+ >>> u1.addresses.append(a1)
+
+At this point, the ``u1.addresses`` collection as expected contains the
+new ``Address`` object::
+
+ >>> u1.addresses
+ [Address(id=None, email_address='pearl.krabs@gmail.com')]
+
+As we associated the ``Address`` object with the ``User.addresses`` collection
+of the ``u1`` instance, another behavior also occurred, which is that the
+``User.addresses`` relationship synchronized itself with the ``Address.user``
+relationship, such that we can navigate not only from the ``User`` object
+to the ``Address`` object, we can also navigate from the ``Address`` object
+back to the "parent" ``User`` object::
+
+ >>> a1.user
+ User(id=None, name='pkrabs', fullname='Pearl Krabs')
+
+This synchronization occurred as a result of our use of the
+:paramref:`_orm.relationship.back_populates` parameter between the two
+:func:`_orm.relationship` objects. This parameter names another
+:func:`_orm.relationship` for which complementary attribute assignment / list
+mutation should occur. It will work equally well in the other
+direction, which is that if we create another ``Address`` object and assign
+to its ``Address.user`` attribute, that ``Address`` becomes part of the
+``User.addresses`` collection on that ``User`` object::
+
+ >>> a2 = Address(email_address="pearl@aol.com", user=u1)
+ >>> u1.addresses
+ [Address(id=None, email_address='pearl.krabs@gmail.com'), Address(id=None, email_address='pearl@aol.com')]
+
+We actually made use of the ``user`` parameter as a keyword argument in the
+``Address`` consructor, which is accepted just like any other mapped attribute
+that was declared on the ``Address`` class. It is equivalent to assignment
+of the ``Address.user`` attribute after the fact::
+
+ # equivalent effect as a2 = Address(user=u1)
+ >>> a2.user = u1
+
+Cascading Objects into the Session
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+We now have a ``User`` and two ``Address`` objects that are associated in a
+bidirectional structure
+in memory, but as noted previously in :ref:`tutorial_inserting_orm` ,
+these objects are said to be in the :term:`transient` state until they
+are associated with a :class:`_orm.Session` object.
+
+We make use of the :class:`_orm.Session` that's still ongoing, and note that
+when we apply the :meth:`_orm.Session.add` method to the lead ``User`` object,
+the related ``Address`` object also gets added to that same :class:`_orm.Session`::
+
+ >>> session.add(u1)
+ >>> u1 in session
+ True
+ >>> a1 in session
+ True
+ >>> a2 in session
+ True
+
+The above behavior, where the :class:`_orm.Session` received a ``User`` object,
+and followed along the ``User.addresses`` relationship to locate a related
+``Address`` object, is known as the **save-update cascade** and is discussed
+in detail in the ORM reference documentation at :ref:`unitofwork_cascades`.
+
+The three objects are now in the :term:`pending` state; this means they are
+ready to be the subject of an INSERT operation but this has not yet proceeded;
+all three objects have no primary key assigned yet, and in addition, the ``a1``
+and ``a2`` objects have an attribute called ``user_id`` which refers to the
+:class:`_schema.Column` that has a :class:`_schema.ForeignKeyConsraint`
+referring to the ``user_account.id`` column; these are also ``None`` as the
+objects are not yet associated with a real database row::
+
+ >>> print(u1.id)
+ None
+ >>> print(a1.user_id)
+ None
+
+It's at this stage that we can see the very great utility that the unit of
+work process provides; recall in the section :ref:`tutorial_core_insert_values_clause`,
+rows were inserted rows into the ``user_account`` and
+``address`` tables using some elaborate syntaxes in order to automatically
+associate the ``address.user_id`` columns with those of the ``user_account``
+rows. Additionally, it was necessary that we emit INSERT for ``user_account``
+rows first, before those of ``address``, since rows in ``address`` are
+**dependent** on their parent row in ``user_account`` for a value in their
+``user_id`` column.
+
+When using the :class:`_orm.Session`, all this tedium is handled for us and
+even the most die-hard SQL purist can benefit from automation of INSERT,
+UPDATE and DELETE statements. When we :meth:`_orm.Session.commit` the
+transaction all steps invoke in the correct order, and furthermore the
+newly generated primary key of the ``user_account`` row is applied to the
+``address.user_id`` column appropriately:
+
+.. sourcecode:: pycon+sql
+
+ >>> session.commit()
+ {opensql}INSERT INTO user_account (name, fullname) VALUES (?, ?)
+ [...] ('pkrabs', 'Pearl Krabs')
+ INSERT INTO address (email_address, user_id) VALUES (?, ?)
+ [...] ('pearl.krabs@gmail.com', 6)
+ INSERT INTO address (email_address, user_id) VALUES (?, ?)
+ [...] ('pearl@aol.com', 6)
+ COMMIT
+
+.. _tutorial_loading_relationships:
+
+Loading Relationships
+----------------------
+
+In the last step, we called :meth:`_orm.Session.commit` which emitted a COMMIT
+for the transaction, and then per
+:paramref:`_orm.Session.commit.expire_on_commit` expired all objects so that
+they refresh for the next transaction.
+
+When we next access an attribute on these objects, we'll see the SELECT
+emitted for the primary attributes of the row, such as when we view the
+newly generated primary key for the ``u1`` object:
+
+.. sourcecode:: pycon+sql
+
+ >>> u1.id
+ {opensql}BEGIN (implicit)
+ SELECT user_account.id AS user_account_id, user_account.name AS user_account_name,
+ user_account.fullname AS user_account_fullname
+ FROM user_account
+ WHERE user_account.id = ?
+ [...] (6,){stop}
+ 6
+
+The ``u1`` ``User`` object now has a persistent collection ``User.addresses``
+that we may also access. As this collection consists of an additional set
+of rows from the ``address`` table, when we access this collection as well
+we again see a :term:`lazy load` emitted in order to retrieve the objects:
+
+.. sourcecode:: pycon+sql
+
+ >>> u1.addresses
+ {opensql}SELECT address.id AS address_id, address.email_address AS address_email_address,
+ address.user_id AS address_user_id
+ FROM address
+ WHERE ? = address.user_id
+ [...] (6,){stop}
+ [Address(id=4, email_address='pearl.krabs@gmail.com'), Address(id=5, email_address='pearl@aol.com')]
+
+Collections and related attributes in the SQLAlchemy ORM are persistent in
+memory; once the collection or attribute is populated, SQL is no longer emitted
+until that collection or attribute is :term:`expired`. We may access
+``u1.addresses`` again as well as add or remove items and this will not
+incur any new SQL calls::
+
+ >>> u1.addresses
+ [Address(id=4, email_address='pearl.krabs@gmail.com'), Address(id=5, email_address='pearl@aol.com')]
+
+While the loading emitted by lazy loading can quickly become expensive if
+we don't take explicit steps to optimize it, the network of lazy loading
+at least is fairly well optimized to not perform redundant work; as the
+``u1.addresses`` collection was refreshed, per the :term:`identity map`
+these are in fact the same
+``Address`` instances as the ``a1`` and ``a2`` objects we've been dealing with
+already, so we're done loading all attributes in this particular object
+graph::
+
+ >>> a1
+ Address(id=4, email_address='pearl.krabs@gmail.com')
+ >>> a2
+ Address(id=5, email_address='pearl@aol.com')
+
+The issue of how relationships load, or not, is an entire subject onto
+itself. Some additional introduction to these concepts is later in this
+section at :ref:`tutorial_orm_loader_strategies`.
+
+.. _tutorial_select_relationships:
+
+Using Relationships in Queries
+-------------------------------
+
+The previous section introduced the behavior of the :func:`_orm.relationship`
+construct when working with **instances of a mapped class**, above, the
+``u1``, ``a1`` and ``a2`` instances of the ``User`` and ``Address`` class.
+In this section, we introduce the behavior of :func:`_orm.relationship` as it
+applies to **class level behavior of a mapped class**, where it serves in
+several ways to help automate the construction of SQL queries.
+
+.. _tutorial_joining_relationships:
+
+Using Relationships to Join
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The sections :ref:`tutorial_select_join` and
+:ref:`tutorial_select_join_onclause` introduced the usage of the
+:meth:`_sql.Select.join` and :meth:`_sql.Select.join_from` methods to compose
+SQL JOIN clauses. In order to describe how to join between tables, these
+methods either **infer** the ON clause based on the presence of a single
+unambiguous :class:`_schema.ForeignKeyConstraint` object within the table
+metadata structure that links the two tables, or otherwise we may provide an
+explicit SQL Expression construct that indicates a specific ON clause.
+
+When using ORM entities, an additional mechanism is available to help us set up
+the ON clause of a join, which is to make use of the :func:`_orm.relationship`
+objects that we set up in our user mapping, as was demonstrated at
+:ref:`tutorial_declaring_mapped_classes`. The class-bound attribute
+corresponding to the :func:`_orm.relationship` may be passed as the **single
+argument** to :meth:`_sql.Select.join`, where it serves to indicate both the
+right side of the join as well as the ON clause at once::
+
+ >>> print(
+ ... select(Address.email_address).
+ ... select_from(User).
+ ... join(User.addresses)
+ ... )
+ {opensql}SELECT address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+The presence of an ORM :func:`_orm.relationship` on a mapping is not used
+by :meth:`_sql.Select.join` or :meth:`_sql.Select.join_from` if we don't
+specify it; it is **not used for ON clause
+inference**. This means, if we join from ``User`` to ``Address`` without an
+ON clause, it works because of the :class:`_schema.ForeignKeyConstraint`
+between the two mapped :class:`_schema.Table` objects, not because of the
+:func:`_orm.relationship` objects on the ``User`` and ``Address`` classes::
+
+ >>> print(
+ ... select(Address.email_address).
+ ... join_from(User, Address)
+ ... )
+ {opensql}SELECT address.email_address
+ FROM user_account JOIN address ON user_account.id = address.user_id
+
+.. _tutorial_joining_relationships_aliased:
+
+Joining between Aliased targets
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In the section :ref:`tutorial_orm_entity_aliases` we introduced the
+:func:`_orm.aliased` construct, which is used to apply a SQL alias to an
+ORM entity. When using a :func:`_orm.relationship` to help construct SQL JOIN, the
+use case where the target of the join is to be an :func:`_orm.aliased` is suited
+by making use of the :meth:`_orm.PropComparator.of_type` modifier. To
+demonstrate we will construct the same join illustrated at :ref:`tutorial_orm_entity_aliases`
+using the :func:`_orm.relationship` attributes to join instead::
+
+ >>> print(
+ ... select(User).
+ ... join(User.addresses.of_type(address_alias_1)).
+ ... where(address_alias_1.email_address == 'patrick@aol.com').
+ ... join(User.addresses.of_type(address_alias_2)).
+ ... where(address_alias_2.email_address == 'patrick@gmail.com')
+ ... )
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ JOIN address AS address_1 ON user_account.id = address_1.user_id
+ JOIN address AS address_2 ON user_account.id = address_2.user_id
+ WHERE address_1.email_address = :email_address_1
+ AND address_2.email_address = :email_address_2
+
+To make use of a :func:`_orm.relationship` to construct a join **from** an
+aliased entity, the attribute is available from the :func:`_orm.aliased`
+construct directly::
+
+ >>> user_alias_1 = aliased(User)
+ >>> print(
+ ... select(user_alias_1.name).
+ ... join(user_alias_1.addresses)
+ ... )
+ {opensql}SELECT user_account_1.name
+ FROM user_account AS user_account_1
+ JOIN address ON user_account_1.id = address.user_id
+
+.. _tutorial_joining_relationships_augmented:
+
+Augmenting the ON Criteria
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ON clause generated by the :func:`_orm.relationship` construct may
+also be augmented with additional criteria. This is useful both for
+quick ways to limit the scope of a particular join over a relationship path,
+and also for use cases like configuring loader strategies, introduced below
+at :ref:`tutorial_orm_loader_strategies`. The :meth:`_orm.PropComparator.and_`
+method accepts a series of SQL expressions positionally that will be joined
+to the ON clause of the JOIN via AND. For example if we wanted to
+JOIN from ``User`` to ``Address`` but also limit the ON criteria to only certain
+email addresses:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = (
+ ... select(User.fullname).
+ ... join(User.addresses.and_(Address.email_address == 'pearl.krabs@gmail.com'))
+ ... )
+ >>> session.execute(stmt).all()
+ {opensql}SELECT user_account.fullname
+ FROM user_account
+ JOIN address ON user_account.id = address.user_id AND address.email_address = ?
+ [...] ('pearl.krabs@gmail.com',){stop}
+ [('Pearl Krabs',)]
+
+
+.. _tutorial_relationship_exists:
+
+EXISTS forms / has() / any()
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In the section :ref:`tutorial_exists`, we introduced the :class:`_sql.Exists`
+object that provides for the SQL EXISTS keyword in conjunction with a
+scalar subquery. The :func:`_orm.relationship` construct provides for some
+helper methods that may be used to generate some common EXISTS styles
+of queries in terms of the relationship.
+
+For a one-to-many relationship such as ``User.addresses``, an EXISTS against
+the ``address`` table that correlates back to the ``user_account`` table
+can be produced using :meth:`_orm.PropComparator.any`. This method accepts
+an optional WHERE criteria to limit the rows matched by the subquery:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = (
+ ... select(User.fullname).
+ ... where(User.addresses.any(Address.email_address == 'pearl.krabs@gmail.com'))
+ ... )
+ >>> session.execute(stmt).all()
+ {opensql}SELECT user_account.fullname
+ FROM user_account
+ WHERE EXISTS (SELECT 1
+ FROM address
+ WHERE user_account.id = address.user_id AND address.email_address = ?)
+ [...] ('pearl.krabs@gmail.com',){stop}
+ [('Pearl Krabs',)]
+
+As EXISTS tends to be more efficient for negative lookups, a common query
+is to locate entities where there are no related entities present. This
+is succinct using a phrase such as ``~User.addresses.any()``, to select
+for ``User`` entities that have no related ``Address`` rows:
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = (
+ ... select(User.fullname).
+ ... where(~User.addresses.any())
+ ... )
+ >>> session.execute(stmt).all()
+ {opensql}SELECT user_account.fullname
+ FROM user_account
+ WHERE NOT (EXISTS (SELECT 1
+ FROM address
+ WHERE user_account.id = address.user_id))
+ [...] (){stop}
+ [('Patrick McStar',), ('Squidward Tentacles',), ('Eugene H. Krabs',)]
+
+The :meth:`_orm.PropComparator.has` method works in mostly the same way as
+:meth:`_orm.PropComparator.any`, except that it's used for many-to-one
+relationships, such as if we wanted to locate all ``Address`` objects
+which belonged to "pearl":
+
+.. sourcecode:: pycon+sql
+
+ >>> stmt = (
+ ... select(Address.email_address).
+ ... where(Address.user.has(User.name=="pkrabs"))
+ ... )
+ >>> session.execute(stmt).all()
+ {opensql}SELECT address.email_address
+ FROM address
+ WHERE EXISTS (SELECT 1
+ FROM user_account
+ WHERE user_account.id = address.user_id AND user_account.name = ?)
+ [...] ('pkrabs',){stop}
+ [('pearl.krabs@gmail.com',), ('pearl@aol.com',)]
+
+.. _tutorial_relationship_operators:
+
+Common Relationship Operators
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+There are some additional varieties of SQL generation helpers that come with
+:func:`_orm.relationship`, including:
+
+* **many to one equals comparison** - a specific object instance can be
+ compared to many-to-one relationship, to select rows where the
+ foreign key of the target entity matches the primary key value of the
+ object given::
+
+ >>> print(select(Address).where(Address.user == u1))
+ {opensql}SELECT address.id, address.email_address, address.user_id
+ FROM address
+ WHERE :param_1 = address.user_id
+
+ ..
+
+* **many to one not equals comparison** - the not equals operator may also
+ be used::
+
+ >>> print(select(Address).where(Address.user != u1))
+ {opensql}SELECT address.id, address.email_address, address.user_id
+ FROM address
+ WHERE address.user_id != :user_id_1 OR address.user_id IS NULL
+
+ ..
+
+* **object is contained in a one-to-many collection** - this is essentially
+ the one-to-many version of the "equals" comparison, select rows where the
+ primary key equals the value of the foreign key in a related object::
+
+ >>> print(select(User).where(User.addresses.contains(a1)))
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account
+ WHERE user_account.id = :param_1
+
+ ..
+
+* **An object has a particular parent from a one-to-many perspective** - the
+ :func:`_orm.with_parent` function produces a comparison that returns rows
+ which are referred towards by a given parent, this is essentially the
+ same as using the ``==`` operator with the many-to-one side::
+
+ >>> from sqlalchemy.orm import with_parent
+ >>> print(select(Address).where(with_parent(u1, User.addresses)))
+ {opensql}SELECT address.id, address.email_address, address.user_id
+ FROM address
+ WHERE :param_1 = address.user_id
+
+ ..
+
+.. _tutorial_orm_loader_strategies:
+
+Loader Strategies
+-----------------
+
+In the section :ref:`tutorial_loading_relationships` we introduced the concept
+that when we work with instances of mapped objects, accessing the attributes
+that are mapped using :func:`_orm.relationship` in the default case will emit
+a :term:`lazy load` when the collection is not populated in order to load
+the objects that should be present in this collection.
+
+Lazy loading is one of the most famous ORM patterns, and is also the one that
+is most controversial. When several dozen ORM objects in memory each refer to
+a handful of unloaded attributes, routine manipulation of these objects can
+spin off many additional queries that can add up (otherwise known as the
+:term:`N plus one problem`), and to make matters worse they are emitted
+implicitly. These implicit queries may not be noticed, may cause errors
+when they are attempted after there's no longer a database tranasction
+available, or when using alternative concurrency patterns such as :ref:`asyncio
+<asyncio_toplevel>`, they actually won't work at all.
+
+At the same time, lazy loading is a vastly popular and useful pattern when it
+is compatible with the concurrency approach in use and isn't otherwise causing
+problems. For these reasons, SQLAlchemy's ORM places a lot of emphasis on
+being able to control and optimize this loading behavior.
+
+Above all, the first step in using ORM lazy loading effectively is to **test
+the application, turn on SQL echoing, and watch the SQL that is emitted**. If
+there seem to be lots of redundant SELECT statements that look very much like
+they could be rolled into one much more efficiently, if there are loads
+occurring inappropriately for objects that have been :term:`detached` from
+their :class:`_orm.Session`, that's when to look into using **loader
+strategies**.
+
+Loader strategies are represented as objects that may be associated with a
+SELECT statement using the :meth:`_sql.Select.options` method, e.g.:
+
+.. sourcecode:: python
+
+ for user_obj in session.execute(
+ select(User).options(selectinload(User.addresses))
+ ).scalars():
+ user_obj.addresses # access addresses collection already loaded
+
+They may be also configured as defaults for a :func:`_orm.relationship` using
+the :paramref:`_orm.relationship.lazy` option, e.g.:
+
+.. sourcecode:: python
+
+ from sqlalchemy.orm import relationship
+ class User(Base):
+ __tablename__ = 'user_account'
+
+ addresses = relationship("Address", back_populates="user", lazy="selectin")
+
+Each loader strategy object adds some kind of information to the statement that
+will be used later by the :class:`_orm.Session` when it is deciding how various
+attributes should be loaded and/or behave when they are accessed.
+
+The sections below will introduce a few of the most prominently used
+loader strategies.
+
+.. seealso::
+
+ Two sections in :ref:`loading_toplevel`:
+
+ * :ref:`relationship_lazy_option` - details on configuring the strategy
+ on :func:`_orm.relationship`
+
+ * :ref:`relationship_loader_options` - details on using query-time
+ loader strategies
+
+Selectin Load
+^^^^^^^^^^^^^^
+
+The most useful loader in modern SQLAlchemy is the
+:func:`_orm.selectinload` loader option. This option solves the most common
+form of the "N plus one" problem which is that of a set of objects that refer
+to related collections. :func:`_orm.selectinload` will ensure that a particular
+collection for a full series of objects are loaded up front using a single
+query. It does this using a SELECT form that in most cases can be emitted
+against the related table alone, without the introduction of JOINs or
+subqueries, and only queries for those parent objects for which the
+collection isn't already loaded. Below we illustrate :func:`_orm.selectinload`
+by loading all of the ``User`` objects and all of their related ``Address``
+objects; while we invoke :meth:`_orm.Session.execute` only once, given a
+:func:`_sql.select` construct, when the database is accessed, there are
+in fact two SELECT statements emitted, the second one being to fetch the
+related ``Address`` objects:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy.orm import selectinload
+ >>> stmt = (
+ ... select(User).options(selectinload(User.addresses)).order_by(User.id)
+ ... )
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.User.name} ({', '.join(a.email_address for a in row.User.addresses)})")
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account ORDER BY user_account.id
+ [...] ()
+ SELECT address.user_id AS address_user_id, address.id AS address_id,
+ address.email_address AS address_email_address
+ FROM address
+ WHERE address.user_id IN (?, ?, ?, ?, ?, ?)
+ [...] (1, 2, 3, 4, 5, 6){stop}
+ spongebob (spongebob@sqlalchemy.org)
+ sandy (sandy@sqlalchemy.org, sandy@squirrelpower.org)
+ patrick ()
+ squidward ()
+ ehkrabs ()
+ pkrabs (pearl.krabs@gmail.com, pearl@aol.com)
+
+.. seealso::
+
+ :ref:`selectin_eager_loading` - in :ref:`loading_toplevel`
+
+Joined Load
+^^^^^^^^^^^
+
+The :func:`_orm.joinedload` eager load strategy is the oldest eager loader in
+SQLAlchemy, which augments the SELECT statement that's being passed to the
+database with a JOIN (which may an outer or an inner join depending on options),
+which can then load in related objects.
+
+The :func:`_orm.joinedload` strategy is best suited towards loading
+related many-to-one objects, as this only requires that additional columns
+are added to a primary entity row that would be fetched in any case.
+For greater effiency, it also accepts an option :paramref:`_orm.joinedload.innerjoin`
+so that an inner join instead of an outer join may be used for a case such
+as below where we know that all ``Address`` objects have an associated
+``User``:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy.orm import joinedload
+ >>> stmt = (
+ ... select(Address).options(joinedload(Address.user, innerjoin=True)).order_by(Address.id)
+ ... )
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.Address.email_address} {row.Address.user.name}")
+ {opensql}SELECT address.id, address.email_address, address.user_id, user_account_1.id AS id_1,
+ user_account_1.name, user_account_1.fullname
+ FROM address
+ JOIN user_account AS user_account_1 ON user_account_1.id = address.user_id
+ ORDER BY address.id
+ [...] (){stop}
+ spongebob@sqlalchemy.org spongebob
+ sandy@sqlalchemy.org sandy
+ sandy@squirrelpower.org sandy
+ pearl.krabs@gmail.com pkrabs
+ pearl@aol.com pkrabs
+
+:func:`_orm.joinedload` also works for collections, however it has the effect
+of multiplying out primary rows per related item in a recursive way
+that grows the amount of data sent for a result set by orders of magnitude for
+nested collections and/or larger collections, so its use vs. another option
+such as :func:`_orm.selectinload` should be evaluated on a per-case basis.
+
+It's important to note that the WHERE and ORDER BY criteria of the enclosing
+:class:`_sql.Select` statement **do not target the table rendered by
+joinedload()**. Above, it can be seen in the SQL that an **anonymous alias**
+is applied to the ``user_account`` table such that is not directly addressible
+in the query. This concept is discussed in more detail in the section
+:ref:`zen_of_eager_loading`.
+
+The ON clause rendered by :func:`_orm.joinedload` may be affected directly by
+using the :meth:`_orm.PropComparator.and_` method described previously at
+:ref:`tutorial_joining_relationships_augmented`; examples of this technique
+with loader strategies are further below at :ref:`tutorial_loader_strategy_augmented`.
+However, more generally, "joined eager loading" may be applied to a
+:class:`_sql.Select` that uses :meth:`_sql.Select.join` using the approach
+described in the next section,
+:ref:`tutorial_orm_loader_strategies_contains_eager`.
+
+
+.. tip::
+
+ It's important to note that many-to-one eager loads are often not necessary,
+ as the "N plus one" problem is much less prevalent in the common case. When
+ many objects all refer to the same related object, such as many ``Address``
+ objects that each refer ot the same ``User``, SQL will be emitted only once
+ for that ``User`` object using normal lazy loading. The lazy load routine
+ will look up the related object by primary key in the current
+ :class:`_orm.Session` without emitting any SQL when possible.
+
+
+.. seealso::
+
+ :ref:`joined_eager_loading` - in :ref:`loading_toplevel`
+
+.. _tutorial_orm_loader_strategies_contains_eager:
+
+Explicit Join + Eager load
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If we were to load ``Address`` rows while joining to the ``user_account`` table
+using a method such as :meth:`_sql.Select.join` to render the JOIN, we could
+also leverage that JOIN in order to eagerly load the contents of the
+``Address.user`` attribute on each ``Address`` object returned. This is
+essentially that we are using "joined eager loading" but rendering the JOIN
+ourselves. This common use case is acheived by using the
+:func:`_orm.contains_eager` option. this option is very similar to
+:func:`_orm.joinedload`, except that it assumes we have set up the JOIN
+ourselves, and it instead only indicates that additional columns in the COLUMNS
+clause should be loaded into related attributes on each returned object, for
+example:
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy.orm import contains_eager
+ >>> stmt = (
+ ... select(Address).
+ ... join(Address.user).
+ ... where(User.name == 'pkrabs').
+ ... options(contains_eager(Address.user)).order_by(Address.id)
+ ... )
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.Address.email_address} {row.Address.user.name}")
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname,
+ address.id AS id_1, address.email_address, address.user_id
+ FROM address JOIN user_account ON user_account.id = address.user_id
+ WHERE user_account.name = ? ORDER BY address.id
+ [...] ('pkrabs',){stop}
+ pearl.krabs@gmail.com pkrabs
+ pearl@aol.com pkrabs
+
+Above, we both filtered the rows on ``user_account.name`` and also loaded
+rows from ``user_account`` into the ``Address.user`` attribute of the returned
+rows. If we had applied :func:`_orm.joinedload` separately, we would get a
+SQL query that unnecessarily joins twice::
+
+ >>> stmt = (
+ ... select(Address).
+ ... join(Address.user).
+ ... where(User.name == 'pkrabs').
+ ... options(joinedload(Address.user)).order_by(Address.id)
+ ... )
+ >>> print(stmt) # SELECT has a JOIN and LEFT OUTER JOIN unnecessarily
+ {opensql}SELECT address.id, address.email_address, address.user_id,
+ user_account_1.id AS id_1, user_account_1.name, user_account_1.fullname
+ FROM address JOIN user_account ON user_account.id = address.user_id
+ LEFT OUTER JOIN user_account AS user_account_1 ON user_account_1.id = address.user_id
+ WHERE user_account.name = :name_1 ORDER BY address.id
+
+.. seealso::
+
+ Two sections in :ref:`loading_toplevel`:
+
+ * :ref:`zen_of_eager_loading` - describes the above problem in detail
+
+ * :ref:`contains_eager` - using :func:`.contains_eager`
+
+.. _tutorial_loader_strategy_augmented:
+
+Augmenting Loader Strategy Paths
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In :ref:`tutorial_joining_relationships_augmented` we illustrated how to add
+arbitrary criteria to a JOIN rendered with :func:`_orm.relationship` to also
+include additional criteria in the ON clause. The :meth:`_orm.PropComparator.and_`
+method is in fact generally available for most loader options. For example,
+if we wanted to re-load the names of users and their email addresses, but omitting
+the email addresses at the ``sqlalchemy.org`` domain, we can apply
+:meth:`_orm.PropComparator.and_` to the argument passed to
+:func:`_orm.selectinload` to limit this criteria:
+
+
+.. sourcecode:: pycon+sql
+
+ >>> from sqlalchemy.orm import selectinload
+ >>> stmt = (
+ ... select(User).
+ ... options(
+ ... selectinload(
+ ... User.addresses.and_(
+ ... ~Address.email_address.endswith("sqlalchemy.org")
+ ... )
+ ... )
+ ... ).
+ ... order_by(User.id).
+ ... execution_options(populate_existing=True)
+ ... )
+ >>> for row in session.execute(stmt):
+ ... print(f"{row.User.name} ({', '.join(a.email_address for a in row.User.addresses)})")
+ {opensql}SELECT user_account.id, user_account.name, user_account.fullname
+ FROM user_account ORDER BY user_account.id
+ [...] ()
+ SELECT address.user_id AS address_user_id, address.id AS address_id,
+ address.email_address AS address_email_address
+ FROM address
+ WHERE address.user_id IN (?, ?, ?, ?, ?, ?)
+ AND (address.email_address NOT LIKE '%' || ?)
+ [...] (1, 2, 3, 4, 5, 6, 'sqlalchemy.org'){stop}
+ spongebob ()
+ sandy (sandy@squirrelpower.org)
+ patrick ()
+ squidward ()
+ ehkrabs ()
+ pkrabs (pearl.krabs@gmail.com, pearl@aol.com)
+
+
+A very important thing to note above is that a special option is added with
+``.execution_options(populate_existing=True)``. This option which takes
+effect when rows are being fetched indicates that the loader option we are
+using should **replace** the existing contents of collections on the objects,
+if they are already loaded. As we are working with a single
+:class:`_orm.Session` repeatedly, the objects we see being loaded above are the
+same Python instances as those that were first persisted at the start of the
+ORM section of this tutorial.
+
+
+.. seealso::
+
+ :ref:`loader_option_criteria` - in :ref:`loading_toplevel`
+
+ :ref:`orm_queryguide_populate_existing` - in :ref:`queryguide_toplevel`
+
+
+Raiseload
+^^^^^^^^^
+
+One additional loader strategy worth mentioning is :func:`_orm.raiseload`.
+This option is used to completely block an application from having the
+:term:`N plus one` problem at all by causing what would normally be a lazy
+load to raise instead. It has two variants that are controlled via
+the :paramref:`_orm.raiseload.sql_only` option to block either lazy loads
+that require SQL, versus all "load" operations including those which
+only need to consult the current :class:`_orm.Session`.
+
+One way to use :func:`_orm.raiseload` is to configure it on
+:func:`_orm.relationship` itself, by setting :paramref:`_orm.relationship.lazy`
+to the value ``"raise_on_sql"``, so that for a particular mapping, a certain
+relationship will never try to emit SQL:
+
+.. sourcecode:: python
+
+ class User(Base):
+ __tablename__ = 'user_account'
+
+ # ... Column mappings
+
+ addresses = relationship("Address", back_populates="user", lazy="raise_on_sql")
+
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ # ... Column mappings
+
+ user = relationship("User", back_populates="addresses", lazy="raise_on_sql")
+
+
+Using such a mapping, the application is blocked from lazy loading,
+indicating that a particular query would need to specify a loader strategy:
+
+.. sourcecode:: python
+
+ u1 = s.execute(select(User)).scalars().first()
+ u1.addresses
+ sqlalchemy.exc.InvalidRequestError: 'User.addresses' is not available due to lazy='raise_on_sql'
+
+
+The exception would indicate that this collection should be loaded up front
+instead:
+
+.. sourcecode:: python
+
+ u1 = s.execute(select(User).options(selectinload(User.addresses))).scalars().first()
+
+The ``lazy="raise_on_sql"`` option tries to be smart about many-to-one
+relationships as well; above, if the ``Address.user`` attribute of an
+``Address`` object were not loaded, but that ``User`` object were locally
+present in the same :class:`_orm.Session`, the "raiseload" strategy would not
+raise an error.
+
+.. seealso::
+
+ :ref:`prevent_lazy_with_raiseload` - in :ref:`loading_toplevel`
+
diff --git a/doc/build/tutorial/tutorial_nav_include.rst b/doc/build/tutorial/tutorial_nav_include.rst
new file mode 100644
index 000000000..c4ee772a8
--- /dev/null
+++ b/doc/build/tutorial/tutorial_nav_include.rst
@@ -0,0 +1,14 @@
+.. note *_include.rst is a naming convention in conf.py
+
+.. |tutorial_title| replace:: SQLAlchemy 1.4 / 2.0 Tutorial
+
+.. topic:: |tutorial_title|
+
+ This page is part of the :doc:`index`.
+
+ Previous: |prev| | Next: |next|
+
+.. footer_topic:: |tutorial_title|
+
+ Next Tutorial Section: |next|
+
diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py
index a7f71f5e5..fddbc501a 100644
--- a/lib/sqlalchemy/engine/interfaces.py
+++ b/lib/sqlalchemy/engine/interfaces.py
@@ -1139,7 +1139,8 @@ class CreateEnginePlugin(object):
:class:`_engine.URL` object should impliement the
:meth:`_engine.CreateEnginePlugin.update_url` method.
- :param kwargs: The keyword arguments passed to :func:`.create_engine`.
+ :param kwargs: The keyword arguments passed to
+ :func:`_sa.create_engine`.
"""
self.url = url
diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py
index cb452ac73..73b07e540 100644
--- a/lib/sqlalchemy/engine/result.py
+++ b/lib/sqlalchemy/engine/result.py
@@ -710,6 +710,10 @@ class Result(_WithKeys, ResultInternal):
:class:`.ResultProxy` interface. When using the ORM, a higher level
object called :class:`.ChunkedIteratorResult` is normally used.
+ .. seealso::
+
+ :ref:`tutorial_fetching_rows` - in the :doc:`/tutorial/index`
+
"""
_process_row = Row
diff --git a/lib/sqlalchemy/engine/row.py b/lib/sqlalchemy/engine/row.py
index 288f08e29..60954fcec 100644
--- a/lib/sqlalchemy/engine/row.py
+++ b/lib/sqlalchemy/engine/row.py
@@ -500,10 +500,14 @@ class RowMapping(BaseRow, collections_abc.Mapping):
"""A ``Mapping`` that maps column names and objects to :class:`.Row` values.
The :class:`.RowMapping` is available from a :class:`.Row` via the
- :attr:`.Row._mapping` attribute and supplies Python mapping (i.e.
- dictionary) access to the contents of the row. This includes support
- for testing of containment of specific keys (string column names or
- objects), as well as iteration of keys, values, and items::
+ :attr:`.Row._mapping` attribute, as well as from the iterable interface
+ provided by the :class:`.MappingResult` object returned by the
+ :meth:`_engine.Result.mappings` method.
+
+ :class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to
+ the contents of the row. This includes support for testing of
+ containment of specific keys (string column names or objects), as well
+ as iteration of keys, values, and items::
for row in result:
if 'a' in row._mapping:
diff --git a/lib/sqlalchemy/orm/context.py b/lib/sqlalchemy/orm/context.py
index 5e9cf9cce..12759f018 100644
--- a/lib/sqlalchemy/orm/context.py
+++ b/lib/sqlalchemy/orm/context.py
@@ -216,6 +216,16 @@ class ORMCompileState(CompileState):
statement._execution_options,
)
+ if "yield_per" in execution_options or load_options._yield_per:
+ execution_options = execution_options.union(
+ {
+ "stream_results": True,
+ "max_row_buffer": execution_options.get(
+ "yield_per", load_options._yield_per
+ ),
+ }
+ )
+
bind_arguments["clause"] = statement
# new in 1.4 - the coercions system is leveraged to allow the
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index b1ff1a049..64f561cbd 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -499,6 +499,8 @@ class PropComparator(operators.ColumnOperators):
.. seealso::
+ :ref:`orm_queryguide_join_on_augmented`
+
:ref:`loader_option_criteria`
:func:`.with_loader_criteria`
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index 277dda6fb..f79c19849 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -770,66 +770,18 @@ class Query(
(e.g. approximately 1000) is used, even with DBAPIs that buffer
rows (which are most).
- The :meth:`_query.Query.yield_per` method **is not compatible
- subqueryload eager loading or joinedload eager loading when
- using collections**. It is potentially compatible with "select in"
- eager loading, **provided the database driver supports multiple,
- independent cursors** (pysqlite and psycopg2 are known to work,
- MySQL and SQL Server ODBC drivers do not).
-
- Therefore in some cases, it may be helpful to disable
- eager loads, either unconditionally with
- :meth:`_query.Query.enable_eagerloads`::
-
- q = sess.query(Object).yield_per(100).enable_eagerloads(False)
-
- Or more selectively using :func:`.lazyload`; such as with
- an asterisk to specify the default loader scheme::
-
- q = sess.query(Object).yield_per(100).\
- options(lazyload('*'), joinedload(Object.some_related))
-
- .. warning::
-
- Use this method with caution; if the same instance is
- present in more than one batch of rows, end-user changes
- to attributes will be overwritten.
-
- In particular, it's usually impossible to use this setting
- with eagerly loaded collections (i.e. any lazy='joined' or
- 'subquery') since those collections will be cleared for a
- new load when encountered in a subsequent result batch.
- In the case of 'subquery' loading, the full result for all
- rows is fetched which generally defeats the purpose of
- :meth:`~sqlalchemy.orm.query.Query.yield_per`.
-
- Also note that while
- :meth:`~sqlalchemy.orm.query.Query.yield_per` will set the
- ``stream_results`` execution option to True, currently
- this is only understood by
- :mod:`~sqlalchemy.dialects.postgresql.psycopg2`,
- :mod:`~sqlalchemy.dialects.mysql.mysqldb` and
- :mod:`~sqlalchemy.dialects.mysql.pymysql` dialects
- which will stream results using server side cursors
- instead of pre-buffer all rows for this query. Other
- DBAPIs **pre-buffer all rows** before making them
- available. The memory use of raw database rows is much less
- than that of an ORM-mapped object, but should still be taken into
- consideration when benchmarking.
-
- .. seealso::
-
- :ref:`engine_stream_results`
+ As of SQLAlchemy 1.4, the :meth:`_orm.Query.yield_per` method is
+ equvalent to using the ``yield_per`` execution option at the ORM level.
+ See the section :ref:`orm_queryguide_yield_per` for further background
+ on this option.
"""
self.load_options += {"_yield_per": count}
- self._execution_options = self._execution_options.union(
- {"stream_results": True, "max_row_buffer": count}
- )
@util.deprecated_20(
":meth:`_orm.Query.get`",
alternative="The method is now available as :meth:`_orm.Session.get`",
+ becomes_legacy=True,
)
def get(self, ident):
"""Return an instance based on the given primary key identifier,
@@ -983,10 +935,10 @@ class Query(
def autoflush(self, setting):
"""Return a Query with a specific 'autoflush' setting.
- Note that a Session with autoflush=False will
- not autoflush, even if this flag is set to True at the
- Query level. Therefore this flag is usually used only
- to disable autoflush for a specific Query.
+ As of SQLAlchemy 1.4, the :meth:`_orm.Query.autoflush` method
+ is equvalent to using the ``autoflush`` execution option at the
+ ORM level. See the section :ref:`orm_queryguide_autoflush` for
+ further background on this option.
"""
self.load_options += {"_autoflush": setting}
@@ -997,22 +949,10 @@ class Query(
that will expire and refresh all instances
as they are loaded, or reused from the current :class:`.Session`.
- :meth:`.populate_existing` does not improve behavior when
- the ORM is used normally - the :class:`.Session` object's usual
- behavior of maintaining a transaction and expiring all attributes
- after rollback or commit handles object state automatically.
- This method is not intended for general use.
-
- .. versionadded:: 1.4
-
- The :meth:`.populate_existing` method is equivalent to passing the
- ``populate_existing=True`` option to the
- :meth:`_orm.Query.execution_options` method.
-
- .. seealso::
-
- :ref:`session_expire` - in the ORM :class:`_orm.Session`
- documentation
+ As of SQLAlchemy 1.4, the :meth:`_orm.Query.populate_existing` method
+ is equvalent to using the ``populate_existing`` execution option at the
+ ORM level. See the section :ref:`orm_queryguide_populate_existing` for
+ further background on this option.
"""
self.load_options += {"_populate_existing": True}
@@ -1031,6 +971,7 @@ class Query(
@util.deprecated_20(
":meth:`_orm.Query.with_parent`",
alternative="Use the :func:`_orm.with_parent` standalone construct.",
+ becomes_legacy=True,
)
@util.preload_module("sqlalchemy.orm.relationships")
def with_parent(self, instance, property=None, from_entity=None): # noqa
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index af70de101..2fc2ad68c 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -957,18 +957,8 @@ class Session(_SessionClassMethods):
:ref:`session_committing`
-
- :param future: if True, use 2.0 style behavior for the
- :meth:`_orm.Session.execute` method. Future mode includes the
- following behaviors:
-
- * The :class:`_engine.Result` object returned by the
- :meth:`_orm.Session.execute` method will return new-style tuple
- :class:`_engine.Row` objects
-
- * The :meth:`_orm.Session.execute` method will invoke ORM style
- queries given objects like :class:`_sql.Select`,
- :class:`_sql.Update` and :class:`_sql.Delete` against ORM entities
+ :param future: if True, use 2.0 style transactional and engine
+ behavior. Future mode includes the following behaviors:
* The :class:`_orm.Session` will not use "bound" metadata in order
to locate an :class:`_engine.Engine`; the engine or engines in use
@@ -984,9 +974,6 @@ class Session(_SessionClassMethods):
flag on a :func:`_orm.relationship` will always assume
"False" behavior.
- The "future" flag is also available on a per-execution basis
- using the :paramref:`_orm.Session.execute.future` flag.
-
.. versionadded:: 1.4
.. seealso::
@@ -1929,7 +1916,9 @@ class Session(_SessionClassMethods):
def query(self, *entities, **kwargs):
"""Return a new :class:`_query.Query` object corresponding to this
- :class:`.Session`."""
+ :class:`_orm.Session`.
+
+ """
return self._query_cls(entities, self, **kwargs)
diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py
index c923bf651..dc2aacbea 100644
--- a/lib/sqlalchemy/sql/dml.py
+++ b/lib/sqlalchemy/sql/dml.py
@@ -582,14 +582,6 @@ class ValuesBase(UpdateBase):
:meth:`_expression.Update.ordered_values`
- .. seealso::
-
- :ref:`inserts_and_updates` - SQL Expression
- Language Tutorial
-
- :func:`_expression.insert` - produce an ``INSERT`` statement
-
- :func:`_expression.update` - produce an ``UPDATE`` statement
"""
if self._select_names:
@@ -777,9 +769,7 @@ class Insert(ValuesBase):
The :class:`_expression.Insert` object is created using the
:func:`_expression.insert()` function.
- .. seealso::
-
- :ref:`coretutorial_insert_expressions`
+ .. note - the __init__() method delivers the docstring for this object
"""
@@ -834,10 +824,26 @@ class Insert(ValuesBase):
):
"""Construct an :class:`_expression.Insert` object.
+ E.g.::
+
+ from sqlalchemy import insert
+
+ stmt = (
+ insert(user_table).
+ values(name='username', fullname='Full Username')
+ )
+
Similar functionality is available via the
:meth:`_expression.TableClause.insert` method on
:class:`_schema.Table`.
+ .. seealso::
+
+ :ref:`coretutorial_insert_expressions` - in the 1.x tutorial
+
+ :ref:`tutorial_core_insert` - in the 2.0 tutorial
+
+
:param table: :class:`_expression.TableClause`
which is the subject of the
insert.
@@ -976,15 +982,15 @@ class DMLWhereBase(object):
_where_criteria = ()
@_generative
- def where(self, whereclause):
- """Return a new construct with the given expression added to
+ def where(self, *whereclause):
+ """Return a new construct with the given expression(s) added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
- self._where_criteria += (
- coercions.expect(roles.WhereHavingRole, whereclause),
- )
+ for criterion in list(whereclause):
+ where_criteria = coercions.expect(roles.WhereHavingRole, criterion)
+ self._where_criteria += (where_criteria,)
def filter(self, *criteria):
"""A synonym for the :meth:`_dml.DMLWhereBase.where` method.
@@ -1032,9 +1038,7 @@ class DMLWhereBase(object):
class Update(DMLWhereBase, ValuesBase):
"""Represent an Update construct.
- The :class:`_expression.Update`
- object is created using the :func:`update()`
- function.
+ .. note - the __init__() method delivers the docstring for this object
"""
@@ -1090,16 +1094,23 @@ class Update(DMLWhereBase, ValuesBase):
from sqlalchemy import update
- stmt = update(users).where(users.c.id==5).\
- values(name='user #5')
+ stmt = (
+ update(user_table).
+ where(user_table.c.id == 5).
+ values(name='user #5')
+ )
Similar functionality is available via the
:meth:`_expression.TableClause.update` method on
- :class:`_schema.Table`::
+ :class:`_schema.Table`.
+
+ .. seealso::
+
+ :ref:`inserts_and_updates` - in the 1.x tutorial
+
+ :ref:`tutorial_core_update_delete` - in the 2.0 tutorial
+
- stmt = users.update().\
- where(users.c.id==5).\
- values(name='user #5')
:param table: A :class:`_schema.Table`
object representing the database
@@ -1279,9 +1290,7 @@ class Update(DMLWhereBase, ValuesBase):
class Delete(DMLWhereBase, UpdateBase):
"""Represent a DELETE construct.
- The :class:`_expression.Delete`
- object is created using the :func:`delete()`
- function.
+ .. note - the __init__() method delivers the docstring for this object
"""
@@ -1317,10 +1326,26 @@ class Delete(DMLWhereBase, UpdateBase):
):
r"""Construct :class:`_expression.Delete` object.
+ E.g.::
+
+ from sqlalchemy import delete
+
+ stmt = (
+ delete(user_table).
+ where(user_table.c.id == 5)
+ )
+
Similar functionality is available via the
:meth:`_expression.TableClause.delete` method on
:class:`_schema.Table`.
+ .. seealso::
+
+ :ref:`inserts_and_updates` - in the 1.x tutorial
+
+ :ref:`tutorial_core_update_delete` - in the 2.0 tutorial
+
+
:param table: The table to delete rows from.
:param whereclause: A :class:`_expression.ClauseElement`
diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py
index e268abc8a..550cbea24 100644
--- a/lib/sqlalchemy/sql/elements.py
+++ b/lib/sqlalchemy/sql/elements.py
@@ -1655,7 +1655,6 @@ class TextClause(
:ref:`sqlexpression_text` - in the Core tutorial
- :ref:`orm_tutorial_literal_sql` - in the ORM tutorial
"""
return TextClause(text, bind=bind)
diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py
index d764002a6..ccb1dd7e9 100644
--- a/lib/sqlalchemy/sql/schema.py
+++ b/lib/sqlalchemy/sql/schema.py
@@ -638,13 +638,14 @@ class Table(DialectKWArgs, SchemaItem, TableClause):
)
insp = inspection.inspect(autoload_with)
- insp.reflect_table(
- self,
- include_columns,
- exclude_columns,
- resolve_fks,
- _extend_on=_extend_on,
- )
+ with insp._inspection_context() as conn_insp:
+ conn_insp.reflect_table(
+ self,
+ include_columns,
+ exclude_columns,
+ resolve_fks,
+ _extend_on=_extend_on,
+ )
@property
def _sorted_constraints(self):
@@ -2824,7 +2825,16 @@ class DefaultClause(FetchedValue):
class Constraint(DialectKWArgs, SchemaItem):
- """A table-level SQL constraint."""
+ """A table-level SQL constraint.
+
+ :class:`_schema.Constraint` serves as the base class for the series of
+ constraint objects that can be associated with :class:`_schema.Table`
+ objects, including :class:`_schema.PrimaryKeyConstraint`,
+ :class:`_schema.ForeignKeyConstraint`
+ :class:`_schema.UniqueConstraint`, and
+ :class:`_schema.CheckConstraint`.
+
+ """
__visit_name__ = "constraint"
@@ -2856,28 +2866,18 @@ class Constraint(DialectKWArgs, SchemaItem):
.. versionadded:: 1.0.0
- :param _create_rule:
- a callable which is passed the DDLCompiler object during
- compilation. Returns True or False to signal inline generation of
- this Constraint.
-
- The AddConstraint and DropConstraint DDL constructs provide
- DDLElement's more comprehensive "conditional DDL" approach that is
- passed a database connection when DDL is being issued. _create_rule
- is instead called during any CREATE TABLE compilation, where there
- may not be any transaction/connection in progress. However, it
- allows conditional compilation of the constraint even for backends
- which do not support addition of constraints through ALTER TABLE,
- which currently includes SQLite.
-
- _create_rule is used by some types to create constraints.
- Currently, its call signature is subject to change at any time.
-
:param \**dialect_kw: Additional keyword arguments are dialect
specific, and passed in the form ``<dialectname>_<argname>``. See
the documentation regarding an individual dialect at
:ref:`dialect_toplevel` for detail on documented arguments.
+ :param _create_rule:
+ used internally by some datatypes that also create constraints.
+
+ :param _type_bound:
+ used internally to indicate that this constraint is associated with
+ a specific datatype.
+
"""
self.name = name
@@ -4158,7 +4158,10 @@ class MetaData(SchemaItem):
"""
def __repr__(self):
- return "MetaData(bind=%r)" % self.bind
+ if self.bind:
+ return "MetaData(bind=%r)" % self.bind
+ else:
+ return "MetaData()"
def __contains__(self, table_or_key):
if not isinstance(table_or_key, util.string_types):
diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py
index fd8832400..895a4532b 100644
--- a/lib/sqlalchemy/sql/selectable.py
+++ b/lib/sqlalchemy/sql/selectable.py
@@ -700,8 +700,7 @@ class FromClause(roles.AnonymizedFromClauseRole, Selectable):
@util.memoized_property
def columns(self):
"""A named-based collection of :class:`_expression.ColumnElement`
- objects
- maintained by this :class:`_expression.FromClause`.
+ objects maintained by this :class:`_expression.FromClause`.
The :attr:`.columns`, or :attr:`.c` collection, is the gateway
to the construction of SQL expressions using table-bound or
@@ -709,6 +708,8 @@ class FromClause(roles.AnonymizedFromClauseRole, Selectable):
select(mytable).where(mytable.c.somecolumn == 5)
+ :return: a :class:`.ColumnCollection` object.
+
"""
if "_columns" not in self.__dict__:
@@ -734,8 +735,12 @@ class FromClause(roles.AnonymizedFromClauseRole, Selectable):
@util.memoized_property
def primary_key(self):
- """Return the collection of :class:`_schema.Column` objects
- which comprise the primary key of this FromClause.
+ """Return the iterable collection of :class:`_schema.Column` objects
+ which comprise the primary key of this :class:`_selectable.FromClause`.
+
+ For a :class:`_schema.Table` object, this collection is represented
+ by the :class:`_schema.PrimaryKeyConstraint` which itself is an
+ iterable collection of :class:`_schema.Column` objects.
"""
self._init_collections()
@@ -771,7 +776,16 @@ class FromClause(roles.AnonymizedFromClauseRole, Selectable):
c = property(
attrgetter("columns"),
- doc="An alias for the :attr:`.columns` attribute.",
+ doc="""
+ A named-based collection of :class:`_expression.ColumnElement`
+ objects maintained by this :class:`_expression.FromClause`.
+
+ The :attr:`_sql.FromClause.c` attribute is an alias for the
+ :attr:`_sql.FromClause.columns` atttribute.
+
+ :return: a :class:`.ColumnCollection`
+
+ """,
)
_select_iterable = property(attrgetter("columns"))
@@ -1227,7 +1241,9 @@ class Join(roles.DMLTableRole, FromClause):
)
def bind(self):
"""Return the bound engine associated with either the left or right
- side of this :class:`_sql.Join`."""
+ side of this :class:`_sql.Join`.
+
+ """
return self.left.bind or self.right.bind
@@ -1441,10 +1457,14 @@ class AliasedReturnsRows(NoInit, FromClause):
@property
def description(self):
+ name = self.name
+ if isinstance(name, _anonymous_label):
+ name = "anon_1"
+
if util.py3k:
- return self.name
+ return name
else:
- return self.name.encode("ascii", "backslashreplace")
+ return name.encode("ascii", "backslashreplace")
@property
def original(self):
@@ -1693,8 +1713,18 @@ class CTE(Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows):
"""Represent a Common Table Expression.
The :class:`_expression.CTE` object is obtained using the
- :meth:`_expression.SelectBase.cte` method from any selectable.
- See that method for complete examples.
+ :meth:`_sql.SelectBase.cte` method from any SELECT statement. A less often
+ available syntax also allows use of the :meth:`_sql.HasCTE.cte` method
+ present on :term:`DML` constructs such as :class:`_sql.Insert`,
+ :class:`_sql.Update` and
+ :class:`_sql.Delete`. See the :meth:`_sql.HasCTE.cte` method for
+ usage details on CTEs.
+
+ .. seealso::
+
+ :ref:`tutorial_subqueries_ctes` - in the 2.0 tutorial
+
+ :meth:`_sql.HasCTE.cte` - examples of calling styles
"""
@@ -1955,7 +1985,7 @@ class HasCTE(roles.HasCTERole):
.. seealso::
- :meth:`.orm.query.Query.cte` - ORM version of
+ :meth:`_orm.Query.cte` - ORM version of
:meth:`_expression.HasCTE.cte`.
"""
@@ -2546,10 +2576,29 @@ class SelectBase(
def as_scalar(self):
return self.scalar_subquery()
+ def exists(self):
+ """Return an :class:`_sql.Exists` representation of this selectable,
+ which can be used as a column expression.
+
+ The returned object is an instance of :class:`_sql.Exists`.
+
+ .. seealso::
+
+ :func:`_sql.exists`
+
+ :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial.
+
+ .. versionadded:: 1.4
+
+ """
+ return Exists(self)
+
def scalar_subquery(self):
"""Return a 'scalar' representation of this selectable, which can be
used as a column expression.
+ The returned object is an instance of :class:`_sql.ScalarSelect`.
+
Typically, a select statement which has only one column in its columns
clause is eligible to be used as a scalar expression. The scalar
subquery can then be used in the WHERE clause or columns clause of
@@ -2563,6 +2612,12 @@ class SelectBase(
.. versionchanged: 1.4 - the ``.as_scalar()`` method was renamed to
:meth:`_expression.SelectBase.scalar_subquery`.
+ .. seealso::
+
+ :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+ :ref:`scalar_selects` - in the 1.x tutorial
+
"""
if self._label_style is not LABEL_STYLE_NONE:
self = self._set_label_style(LABEL_STYLE_NONE)
@@ -3777,12 +3832,12 @@ class SelectState(util.MemoizedSlots, CompileState):
if not len(froms):
raise exc.InvalidRequestError(
- "Select statement '%s"
+ "Select statement '%r"
"' returned no FROM clauses "
"due to auto-correlation; "
"specify correlate(<tables>) "
"to control correlation "
- "manually." % self
+ "manually." % self.statement
)
return froms
@@ -3994,7 +4049,9 @@ class Select(
:func:`_sql.select`
- :ref:`coretutorial_selecting` - in the Core tutorial
+ :ref:`coretutorial_selecting` - in the 1.x tutorial
+
+ :ref:`tutorial_selecting_data` - in the 2.0 tutorial
"""
@@ -4491,8 +4548,8 @@ class Select(
.. seealso::
- :ref:`orm_tutorial_literal_sql` - usage examples in the
- ORM tutorial
+ :ref:`orm_queryguide_selecting_text` - usage examples in the
+ ORM Querying Guide
"""
meth = SelectState.get_plugin_class(self).from_statement
@@ -4548,6 +4605,10 @@ class Select(
.. seealso::
+ :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+ :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
:meth:`_expression.Select.join_from`
:meth:`_expression.Select.outerjoin`
@@ -4599,6 +4660,10 @@ class Select(
.. seealso::
+ :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+ :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
:meth:`_expression.Select.join`
""" # noqa: E501
@@ -4648,6 +4713,10 @@ class Select(
.. seealso::
+ :ref:`tutorial_select_join` - in the :doc:`/tutorial/index`
+
+ :ref:`orm_queryguide_joins` - in the :ref:`queryguide_toplevel`
+
:meth:`_expression.Select.join`
"""
@@ -4903,7 +4972,7 @@ class Select(
_whereclause = whereclause
@_generative
- def where(self, whereclause):
+ def where(self, *whereclause):
"""Return a new :func:`_expression.select` construct with
the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
@@ -4911,9 +4980,10 @@ class Select(
"""
assert isinstance(self._where_criteria, tuple)
- self._where_criteria += (
- coercions.expect(roles.WhereHavingRole, whereclause),
- )
+
+ for criterion in list(whereclause):
+ where_criteria = coercions.expect(roles.WhereHavingRole, criterion)
+ self._where_criteria += (where_criteria,)
@_generative
def having(self, having):
@@ -5400,6 +5470,24 @@ class Select(
class ScalarSelect(roles.InElementRole, Generative, Grouping):
+ """Represent a scalar subquery.
+
+
+ A :class:`_sql.ScalarSubquery` is created by invoking the
+ :meth:`_sql.SelectBase.scalar_subquery` method. The object
+ then participates in other SQL expressions as a SQL column expression
+ within the :class:`_sql.ColumnElement` hierarchy.
+
+ .. seealso::
+
+ :meth:`_sql.SelectBase.scalar_subquery`
+
+ :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+ :ref:`scalar_selects` - in the 1.x tutorial
+
+ """
+
_from_objects = []
_is_from_container = True
_is_implicitly_boolean = False
@@ -5430,9 +5518,79 @@ class ScalarSelect(roles.InElementRole, Generative, Grouping):
def self_group(self, **kwargs):
return self
+ @_generative
+ def correlate(self, *fromclauses):
+ r"""Return a new :class:`_expression.ScalarSelect`
+ which will correlate the given FROM
+ clauses to that of an enclosing :class:`_expression.Select`.
+
+ This method is mirrored from the :meth:`_sql.Select.correlate` method
+ of the underlying :class:`_sql.Select`. The method applies the
+ :meth:_sql.Select.correlate` method, then returns a new
+ :class:`_sql.ScalarSelect` against that statement.
+
+ .. versionadded:: 1.4 Previously, the
+ :meth:`_sql.ScalarSelect.correlate`
+ method was only available from :class:`_sql.Select`.
+
+ :param \*fromclauses: a list of one or more
+ :class:`_expression.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate collection.
+
+ .. seealso::
+
+ :meth:`_expression.ScalarSelect.correlate_except`
+
+ :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+ :ref:`correlated_subqueries` - in the 1.x tutorial
+
+
+ """
+ self.element = self.element.correlate(*fromclauses)
+
+ @_generative
+ def correlate_except(self, *fromclauses):
+ r"""Return a new :class:`_expression.ScalarSelect`
+ which will omit the given FROM
+ clauses from the auto-correlation process.
+
+ This method is mirrored from the
+ :meth:`_sql.Select.correlate_except` method of the underlying
+ :class:`_sql.Select`. The method applies the
+ :meth:_sql.Select.correlate_except` method, then returns a new
+ :class:`_sql.ScalarSelect` against that statement.
+
+ .. versionadded:: 1.4 Previously, the
+ :meth:`_sql.ScalarSelect.correlate_except`
+ method was only available from :class:`_sql.Select`.
+
+ :param \*fromclauses: a list of one or more
+ :class:`_expression.FromClause`
+ constructs, or other compatible constructs (i.e. ORM-mapped
+ classes) to become part of the correlate-exception collection.
+
+ .. seealso::
+
+ :meth:`_expression.ScalarSelect.correlate`
+
+ :ref:`tutorial_scalar_subquery` - in the 2.0 tutorial
+
+ :ref:`correlated_subqueries` - in the 1.x tutorial
+
+
+ """
+
+ self.element = self.element.correlate_except(*fromclauses)
+
class Exists(UnaryExpression):
- """Represent an ``EXISTS`` clause."""
+ """Represent an ``EXISTS`` clause.
+
+ See :func:`_sql.exists` for a description of usage.
+
+ """
_from_objects = []
inherit_cache = True
@@ -5440,12 +5598,23 @@ class Exists(UnaryExpression):
def __init__(self, *args, **kwargs):
"""Construct a new :class:`_expression.Exists` construct.
- The modern form of :func:`.exists` is to invoke with no arguments,
- which will produce an ``"EXISTS *"`` construct. A WHERE clause
- is then added using the :meth:`.Exists.where` method::
+ The :func:`_sql.exists` can be invoked by itself to produce an
+ :class:`_sql.Exists` construct, which will accept simple WHERE
+ criteria::
exists_criteria = exists().where(table1.c.col1 == table2.c.col2)
+ However, for greater flexibility in constructing the SELECT, an
+ existing :class:`_sql.Select` construct may be converted to an
+ :class:`_sql.Exists`, most conveniently by making use of the
+ :meth:`_sql.SelectBase.exists` method::
+
+ exists_criteria = (
+ select(table2.c.col2).
+ where(table1.c.col1 == table2.c.col2).
+ exists()
+ )
+
The EXISTS criteria is then used inside of an enclosing SELECT::
stmt = select(table1.c.col1).where(exists_criteria)
@@ -5453,9 +5622,13 @@ class Exists(UnaryExpression):
The above statement will then be of the form::
SELECT col1 FROM table1 WHERE EXISTS
- (SELECT * FROM table2 WHERE table2.col2 = table1.col1)
+ (SELECT table2.col2 FROM table2 WHERE table2.col2 = table1.col1)
- """
+ .. seealso::
+
+ :ref:`tutorial_exists` - in the :term:`2.0 style` tutorial.
+
+ """ # noqa E501
if args and isinstance(args[0], (SelectBase, ScalarSelect)):
s = args[0]
else:
@@ -5524,6 +5697,13 @@ class Exists(UnaryExpression):
return Select._create_select_from_fromclause(self, [self], **kwargs)
def correlate(self, *fromclause):
+ """Apply correlation to the subquery noted by this :class:`_sql.Exists`.
+
+ .. seealso::
+
+ :meth:`_sql.ScalarSelect.correlate`
+
+ """
e = self._clone()
e.element = self._regroup(
lambda element: element.correlate(*fromclause)
@@ -5531,6 +5711,14 @@ class Exists(UnaryExpression):
return e
def correlate_except(self, *fromclause):
+ """Apply correlation to the subquery noted by this :class:`_sql.Exists`.
+
+ .. seealso::
+
+ :meth:`_sql.ScalarSelect.correlate_except`
+
+ """
+
e = self._clone()
e.element = self._regroup(
lambda element: element.correlate_except(*fromclause)
@@ -5544,6 +5732,11 @@ class Exists(UnaryExpression):
method of the select
statement contained.
+ .. note:: it is typically preferable to build a :class:`_sql.Select`
+ statement first, including the desired WHERE clause, then use the
+ :meth:`_sql.SelectBase.exists` method to produce an
+ :class:`_sql.Exists` object at once.
+
"""
e = self._clone()
e.element = self._regroup(lambda element: element.select_from(*froms))
@@ -5554,6 +5747,12 @@ class Exists(UnaryExpression):
given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
+
+ .. note:: it is typically preferable to build a :class:`_sql.Select`
+ statement first, including the desired WHERE clause, then use the
+ :meth:`_sql.SelectBase.exists` method to produce an
+ :class:`_sql.Exists` object at once.
+
"""
e = self._clone()
e.element = self._regroup(lambda element: element.where(clause))
diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py
index 614b70a41..bca6e9020 100644
--- a/lib/sqlalchemy/sql/type_api.py
+++ b/lib/sqlalchemy/sql/type_api.py
@@ -456,6 +456,28 @@ class TypeEngine(Traversible):
else:
return self.__class__
+ @classmethod
+ def _is_generic_type(cls):
+ n = cls.__name__
+ return n.upper() != n
+
+ def _generic_type_affinity(self):
+
+ for t in self.__class__.__mro__:
+ if (
+ t.__module__
+ in (
+ "sqlalchemy.sql.sqltypes",
+ "sqlalchemy.sql.type_api",
+ )
+ and t._is_generic_type()
+ ):
+ if t in (TypeEngine, UserDefinedType):
+ return NULLTYPE.__class__
+ return t
+ else:
+ return self.__class__
+
def dialect_impl(self, dialect):
"""Return a dialect-specific implementation for this
:class:`.TypeEngine`.
diff --git a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py
index 6c6518011..af6b382ae 100644
--- a/lib/sqlalchemy/testing/suite/test_unicode_ddl.py
+++ b/lib/sqlalchemy/testing/suite/test_unicode_ddl.py
@@ -188,7 +188,7 @@ class UnicodeSchemaTest(fixtures.TablesTest):
eq_(
repr(t),
(
- "Table('\\u6e2c\\u8a66', MetaData(bind=None), "
+ "Table('\\u6e2c\\u8a66', MetaData(), "
"Column('\\u6e2c\\u8a66_id', Integer(), "
"table=<\u6e2c\u8a66>), "
"schema=None)"
@@ -198,7 +198,7 @@ class UnicodeSchemaTest(fixtures.TablesTest):
eq_(
repr(t),
(
- "Table('測試', MetaData(bind=None), "
+ "Table('測試', MetaData(), "
"Column('測試_id', Integer(), "
"table=<測試>), "
"schema=None)"
diff --git a/lib/sqlalchemy/util/deprecations.py b/lib/sqlalchemy/util/deprecations.py
index 9f0ca0b1a..f46374601 100644
--- a/lib/sqlalchemy/util/deprecations.py
+++ b/lib/sqlalchemy/util/deprecations.py
@@ -81,10 +81,18 @@ def deprecated_cls(version, message, constructor="__init__"):
return decorate
-def deprecated_20_cls(clsname, alternative=None, constructor="__init__"):
+def deprecated_20_cls(
+ clsname, alternative=None, constructor="__init__", becomes_legacy=False
+):
message = (
".. deprecated:: 1.4 The %s class is considered legacy as of the "
- "1.x series of SQLAlchemy and will be removed in 2.0." % clsname
+ "1.x series of SQLAlchemy and %s in 2.0."
+ % (
+ clsname,
+ "will be removed"
+ if not becomes_legacy
+ else "becomes a legacy construct",
+ )
)
if alternative:
@@ -161,7 +169,7 @@ def moved_20(message, **kw):
)
-def deprecated_20(api_name, alternative=None, **kw):
+def deprecated_20(api_name, alternative=None, becomes_legacy=False, **kw):
type_reg = re.match("^:(attr|func|meth):", api_name)
if type_reg:
type_ = {"attr": "attribute", "func": "function", "meth": "method"}[
@@ -171,8 +179,14 @@ def deprecated_20(api_name, alternative=None, **kw):
type_ = "construct"
message = (
"The %s %s is considered legacy as of the "
- "1.x series of SQLAlchemy and will be removed in 2.0."
- % (api_name, type_)
+ "1.x series of SQLAlchemy and %s in 2.0."
+ % (
+ api_name,
+ type_,
+ "will be removed"
+ if not becomes_legacy
+ else "becomes a legacy construct",
+ )
)
if alternative:
diff --git a/test/base/test_tutorials.py b/test/base/test_tutorials.py
index 4ac3fb981..ea2c1e362 100644
--- a/test/base/test_tutorials.py
+++ b/test/base/test_tutorials.py
@@ -56,7 +56,10 @@ class DocTest(fixtures.TestBase):
self._teardown_create_table_patcher()
self._teardown_logger()
- def _run_doctest_for_content(self, name, content):
+ def _run_doctest(self, *fnames):
+ here = os.path.dirname(__file__)
+ sqla_base = os.path.normpath(os.path.join(here, "..", ".."))
+
optionflags = (
doctest.ELLIPSIS
| doctest.NORMALIZE_WHITESPACE
@@ -68,23 +71,33 @@ class DocTest(fixtures.TestBase):
optionflags=optionflags,
checker=_get_unicode_checker(),
)
- globs = {"print_function": print_function}
parser = doctest.DocTestParser()
- test = parser.get_doctest(content, globs, name, name, 0)
- runner.run(test)
- runner.summarize()
- assert not runner.failures
+ globs = {"print_function": print_function}
- def _run_doctest(self, fname):
- here = os.path.dirname(__file__)
- sqla_base = os.path.normpath(os.path.join(here, "..", ".."))
- path = os.path.join(sqla_base, "doc/build", fname)
- if not os.path.exists(path):
- config.skip_test("Can't find documentation file %r" % path)
- with open(path) as file_:
- content = file_.read()
- content = re.sub(r"{(?:stop|sql|opensql)}", "", content)
- self._run_doctest_for_content(fname, content)
+ for fname in fnames:
+ path = os.path.join(sqla_base, "doc/build", fname)
+ if not os.path.exists(path):
+ config.skip_test("Can't find documentation file %r" % path)
+ with open(path) as file_:
+ content = file_.read()
+ content = re.sub(r"{(?:stop|sql|opensql)}", "", content)
+
+ test = parser.get_doctest(content, globs, fname, fname, 0)
+ runner.run(test, clear_globs=False)
+ runner.summarize()
+ globs.update(test.globs)
+ assert not runner.failures
+
+ def test_20_style(self):
+ self._run_doctest(
+ "tutorial/index.rst",
+ "tutorial/engine.rst",
+ "tutorial/dbapi_transactions.rst",
+ "tutorial/metadata.rst",
+ "tutorial/data.rst",
+ "tutorial/orm_data_manipulation.rst",
+ "tutorial/orm_related_objects.rst",
+ )
def test_orm(self):
self._run_doctest("orm/tutorial.rst")
@@ -93,6 +106,12 @@ class DocTest(fixtures.TestBase):
def test_core(self):
self._run_doctest("core/tutorial.rst")
+ def test_core_operators(self):
+ self._run_doctest("core/operators.rst")
+
+ def test_orm_queryguide(self):
+ self._run_doctest("orm/queryguide.rst")
+
# unicode checker courtesy pytest
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 63f50d4f9..8cca45b27 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -4453,19 +4453,55 @@ class YieldTest(_fixtures.FixtureTest):
except StopIteration:
pass
- def test_yield_per_and_execution_options(self):
+ def test_yield_per_and_execution_options_legacy(self):
self._eagerload_mappings()
User = self.classes.User
sess = create_session()
+
+ @event.listens_for(sess, "do_orm_execute")
+ def check(ctx):
+ eq_(ctx.load_options._yield_per, 15)
+ eq_(
+ {
+ k: v
+ for k, v in ctx.execution_options.items()
+ if not k.startswith("_")
+ },
+ {"max_row_buffer": 15, "stream_results": True, "foo": "bar"},
+ )
+
q = sess.query(User).yield_per(15)
q = q.execution_options(foo="bar")
- assert q.load_options._yield_per
- eq_(
- q._execution_options,
- {"stream_results": True, "foo": "bar", "max_row_buffer": 15},
- )
+
+ q.all()
+
+ def test_yield_per_and_execution_options(self):
+ self._eagerload_mappings()
+
+ User = self.classes.User
+
+ sess = create_session()
+
+ @event.listens_for(sess, "do_orm_execute")
+ def check(ctx):
+ eq_(ctx.load_options._yield_per, 15)
+ eq_(
+ {
+ k: v
+ for k, v in ctx.execution_options.items()
+ if not k.startswith("_")
+ },
+ {
+ "max_row_buffer": 15,
+ "stream_results": True,
+ "yield_per": 15,
+ },
+ )
+
+ stmt = select(User).execution_options(yield_per=15)
+ sess.execute(stmt)
def test_no_joinedload_opt(self):
self._eagerload_mappings()
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index d3f8b6a9f..8e5f4a43a 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -1092,6 +1092,25 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT NOT (NOT (EXISTS (SELECT 1))) AS anon_1",
)
+ def test_exists_method(self):
+ subq = (
+ select(func.count(table2.c.otherid))
+ .where(table2.c.otherid == table1.c.myid)
+ .correlate(table1)
+ .group_by(table2.c.otherid)
+ .having(func.count(table2.c.otherid) > 1)
+ .exists()
+ )
+
+ self.assert_compile(
+ table1.select().where(subq),
+ "SELECT mytable.myid, mytable.name, mytable.description "
+ "FROM mytable WHERE EXISTS (SELECT count(myothertable.otherid) "
+ "AS count_1 FROM myothertable WHERE myothertable.otherid = "
+ "mytable.myid GROUP BY myothertable.otherid "
+ "HAVING count(myothertable.otherid) > :count_2)",
+ )
+
def test_where_subquery(self):
s = (
select(addresses.c.street)
@@ -1693,6 +1712,15 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL):
"SELECT mytable.myid FROM mytable",
)
+ def test_where_multiple(self):
+ self.assert_compile(
+ select(table1.c.myid).where(
+ table1.c.myid == 12, table1.c.name == "foobar"
+ ),
+ "SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1 "
+ "AND mytable.name = :name_1",
+ )
+
def test_order_by_nulls(self):
self.assert_compile(
table2.select().order_by(
@@ -5084,6 +5112,12 @@ class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL):
select(t2, s1.correlate(t2).scalar_subquery())
)
+ def test_correlate_semiauto_column_correlate_from_subq(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_column_correlated(
+ select(t2, s1.scalar_subquery().correlate(t2))
+ )
+
def test_correlate_semiauto_from(self):
t1, t2, s1 = self._fixture()
self._assert_from_uncorrelated(select(t2, s1.correlate(t2).alias()))
@@ -5094,6 +5128,12 @@ class CorrelateTest(fixtures.TestBase, AssertsCompiledSQL):
select(t2).having(t2.c.a == s1.correlate(t2).scalar_subquery())
)
+ def test_correlate_semiauto_having_from_subq(self):
+ t1, t2, s1 = self._fixture()
+ self._assert_having_correlated(
+ select(t2).having(t2.c.a == s1.scalar_subquery().correlate(t2))
+ )
+
def test_correlate_except_inclusion_where(self):
t1, t2, s1 = self._fixture()
self._assert_where_correlated(
diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py
index 980e2886a..b2dae4b22 100644
--- a/test/sql/test_metadata.py
+++ b/test/sql/test_metadata.py
@@ -741,7 +741,7 @@ class MetaDataTest(fixtures.TestBase, ComparesTables):
),
(
Table("bar", MetaData(), Column("x", String)),
- "Table('bar', MetaData(bind=None), "
+ "Table('bar', MetaData(), "
"Column('x', String(), table=<bar>), schema=None)",
),
(
@@ -4138,13 +4138,13 @@ class CatchAllEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
"Column->t1",
"ForeignKeyConstraint->Table",
"ForeignKeyConstraint->t1",
- "Table->MetaData(bind=None)",
+ "Table->MetaData()",
"Table->MetaData",
"PrimaryKeyConstraint->Table",
"PrimaryKeyConstraint->t2",
"Column->Table",
"Column->t2",
- "Table->MetaData(bind=None)",
+ "Table->MetaData()",
],
)