summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.zuul.yaml50
-rw-r--r--oslo_db/sqlalchemy/compat/__init__.py38
-rw-r--r--oslo_db/sqlalchemy/enginefacade.py24
-rw-r--r--oslo_db/sqlalchemy/engines.py81
-rw-r--r--oslo_db/sqlalchemy/exc_filters.py27
-rw-r--r--oslo_db/sqlalchemy/provision.py33
-rw-r--r--oslo_db/sqlalchemy/test_fixtures.py2
-rw-r--r--oslo_db/sqlalchemy/update_match.py4
-rw-r--r--oslo_db/sqlalchemy/utils.py47
-rw-r--r--oslo_db/tests/sqlalchemy/test_enginefacade.py4
-rw-r--r--oslo_db/tests/sqlalchemy/test_exc_filters.py365
-rw-r--r--oslo_db/tests/sqlalchemy/test_provision.py9
-rw-r--r--oslo_db/tests/sqlalchemy/test_sqlalchemy.py89
-rw-r--r--oslo_db/tests/sqlalchemy/test_utils.py107
-rw-r--r--releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml11
-rw-r--r--setup.cfg4
16 files changed, 600 insertions, 295 deletions
diff --git a/.zuul.yaml b/.zuul.yaml
index d4c4242..b583097 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -1,8 +1,56 @@
+- job:
+ name: oslodb-tox-py39-tips
+ parent: openstack-tox-py39
+ description: |
+ Run unit tests for oslo.db with main branch of important libs.
+ Takes advantage of the base tox job's install-siblings feature.
+ # The job only tests the latest and shouldn't be run on the stable branches
+ branches: ^(?!stable)
+ required-projects:
+ - name: github.com/sqlalchemy/sqlalchemy
+ override-checkout: main
+ - name: github.com/sqlalchemy/alembic
+ override-checkout: main
+ vars:
+ # Set work dir to oslo.db so that if it's triggered by one of the
+ # other repos the tests will run in the same place
+ zuul_work_dir: src/opendev.org/openstack/oslo.db
+
+- job:
+ name: oslodb-tox-py310-tips
+ parent: openstack-tox-py310
+ description: |
+ Run unit tests for oslo.db with main branch of important libs.
+ Takes advantage of the base tox job's install-siblings feature.
+ # The job only tests the latest and shouldn't be run on the stable branches
+ branches: ^(?!stable)
+ required-projects:
+ - name: github.com/sqlalchemy/sqlalchemy
+ override-checkout: main
+ - name: github.com/sqlalchemy/alembic
+ override-checkout: main
+ vars:
+ # Set work dir to oslo.db so that if it's triggered by one of the
+ # other repos the tests will run in the same place
+ zuul_work_dir: src/opendev.org/openstack/oslo.db
+
+- project-template:
+ name: oslodb-tox-unit-tips
+ check:
+ jobs:
+ - oslodb-tox-py39-tips
+ - oslodb-tox-py310-tips
+ gate:
+ jobs:
+ - oslodb-tox-py39-tips
+ - oslodb-tox-py310-tips
+
- project:
templates:
+ - oslodb-tox-unit-tips
- check-requirements
- lib-forward-testing-python3
- - openstack-python3-antelope-jobs
+ - openstack-python3-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
diff --git a/oslo_db/sqlalchemy/compat/__init__.py b/oslo_db/sqlalchemy/compat/__init__.py
new file mode 100644
index 0000000..d209207
--- /dev/null
+++ b/oslo_db/sqlalchemy/compat/__init__.py
@@ -0,0 +1,38 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_utils import versionutils
+
+from sqlalchemy import __version__
+
+
+_vers = versionutils.convert_version_to_tuple(__version__)
+sqla_2 = _vers >= (2, )
+
+native_pre_ping_event_support = _vers >= (2, 0, 5)
+
+
+def dialect_from_exception_context(ctx):
+ if sqla_2:
+ # SQLAlchemy 2.0 still has context.engine, however if the
+ # exception context is called in the context of a ping handler,
+ # engine is not present. need to use dialect instead
+ return ctx.dialect
+ else:
+ return ctx.engine.dialect
+
+
+def driver_connection(connection):
+ if sqla_2:
+ return connection.connection.driver_connection
+ else:
+ return connection.connection.connection
diff --git a/oslo_db/sqlalchemy/enginefacade.py b/oslo_db/sqlalchemy/enginefacade.py
index 3a316b3..39fb061 100644
--- a/oslo_db/sqlalchemy/enginefacade.py
+++ b/oslo_db/sqlalchemy/enginefacade.py
@@ -164,7 +164,6 @@ class _TransactionFactory(object):
}
self._maker_cfg = {
'expire_on_commit': _Default(False),
- '__autocommit': False,
}
self._transaction_ctx_cfg = {
'rollback_reader_sessions': False,
@@ -468,7 +467,6 @@ class _TransactionFactory(object):
def _maker_args_for_conf(self, conf):
maker_args = self._args_for_conf(self._maker_cfg, conf)
- maker_args['autocommit'] = maker_args.pop('__autocommit')
return maker_args
def dispose_pool(self):
@@ -1238,9 +1236,6 @@ class LegacyEngineFacade(object):
:param sqlite_fk: enable foreign keys in SQLite
:type sqlite_fk: bool
- :param autocommit: use autocommit mode for created Session instances
- :type autocommit: bool
-
:param expire_on_commit: expire session objects on commit
:type expire_on_commit: bool
@@ -1282,22 +1277,14 @@ class LegacyEngineFacade(object):
"""
def __init__(self, sql_connection, slave_connection=None,
- sqlite_fk=False, autocommit=False,
- expire_on_commit=False, _conf=None, _factory=None, **kwargs):
+ sqlite_fk=False, expire_on_commit=False, _conf=None,
+ _factory=None, **kwargs):
warnings.warn(
"EngineFacade is deprecated; please use "
"oslo_db.sqlalchemy.enginefacade",
warning.OsloDBDeprecationWarning,
stacklevel=2)
- if autocommit is True:
- warnings.warn(
- 'autocommit support will be removed in SQLAlchemy 2.0 and '
- 'should not be relied on; please rework your code to remove '
- 'reliance on this feature',
- warning.OsloDBDeprecationWarning,
- stacklevel=2)
-
if _factory:
self._factory = _factory
else:
@@ -1305,7 +1292,6 @@ class LegacyEngineFacade(object):
self._factory.configure(
sqlite_fk=sqlite_fk,
- __autocommit=autocommit,
expire_on_commit=expire_on_commit,
**kwargs
)
@@ -1371,7 +1357,7 @@ class LegacyEngineFacade(object):
@classmethod
def from_config(cls, conf,
- sqlite_fk=False, autocommit=False, expire_on_commit=False):
+ sqlite_fk=False, expire_on_commit=False):
"""Initialize EngineFacade using oslo.config config instance options.
:param conf: oslo.config config instance
@@ -1380,9 +1366,6 @@ class LegacyEngineFacade(object):
:param sqlite_fk: enable foreign keys in SQLite
:type sqlite_fk: bool
- :param autocommit: use autocommit mode for created Session instances
- :type autocommit: bool
-
:param expire_on_commit: expire session objects on commit
:type expire_on_commit: bool
@@ -1391,5 +1374,4 @@ class LegacyEngineFacade(object):
return cls(
None,
sqlite_fk=sqlite_fk,
- autocommit=autocommit,
expire_on_commit=expire_on_commit, _conf=conf)
diff --git a/oslo_db/sqlalchemy/engines.py b/oslo_db/sqlalchemy/engines.py
index 31dabf6..7c36c8a 100644
--- a/oslo_db/sqlalchemy/engines.py
+++ b/oslo_db/sqlalchemy/engines.py
@@ -16,6 +16,7 @@
"""Core SQLAlchemy connectivity routines.
"""
+import functools
import itertools
import logging
import os
@@ -29,10 +30,11 @@ import sqlalchemy
from sqlalchemy import event
from sqlalchemy import exc
from sqlalchemy import pool
-from sqlalchemy.sql.expression import select
+from sqlalchemy import select
from oslo_db import exception
+from oslo_db.sqlalchemy import compat
from oslo_db.sqlalchemy import exc_filters
from oslo_db.sqlalchemy import ndb
from oslo_db.sqlalchemy import utils
@@ -57,6 +59,13 @@ def _connect_ping_listener(connection, branch):
Ping the server at transaction begin and transparently reconnect
if a disconnect exception occurs.
+
+ This listener is used up until SQLAlchemy 2.0.5. At 2.0.5, we use the
+ ``pool_pre_ping`` parameter instead of this event handler.
+
+ Note the current test suite in test_exc_filters still **tests** this
+ handler using all SQLAlchemy versions including 2.0.5 and greater.
+
"""
if branch:
return
@@ -94,6 +103,14 @@ def _connect_ping_listener(connection, branch):
connection.rollback()
+# SQLAlchemy 2.0 is compatible here, however oslo.db's test suite
+# raises for all deprecation errors, so we have to check for 2.0
+# and wrap out a parameter that is deprecated
+if compat.sqla_2:
+ _connect_ping_listener = functools.partial(
+ _connect_ping_listener, branch=False)
+
+
def _setup_logging(connection_debug=0):
"""setup_logging function maps SQL debug level to Python log level.
@@ -181,15 +198,18 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
json_deserializer=None, connection_parameters=None):
"""Return a new SQLAlchemy engine."""
- url = sqlalchemy.engine.url.make_url(sql_connection)
+ url = utils.make_url(sql_connection)
if connection_parameters:
url = _extend_url_parameters(url, connection_parameters)
_vet_url(url)
+ _native_pre_ping = compat.native_pre_ping_event_support
+
engine_args = {
- "pool_recycle": connection_recycle_time,
+ 'pool_recycle': connection_recycle_time,
+ 'pool_pre_ping': _native_pre_ping,
'connect_args': {},
'logging_name': logging_name
}
@@ -198,11 +218,13 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
_init_connection_args(
url, engine_args,
- max_pool_size=max_pool_size,
- max_overflow=max_overflow,
- pool_timeout=pool_timeout,
- json_serializer=json_serializer,
- json_deserializer=json_deserializer,
+ dict(
+ max_pool_size=max_pool_size,
+ max_overflow=max_overflow,
+ pool_timeout=pool_timeout,
+ json_serializer=json_serializer,
+ json_deserializer=json_deserializer,
+ )
)
engine = sqlalchemy.create_engine(url, **engine_args)
@@ -223,8 +245,10 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
# register alternate exception handler
exc_filters.register_engine(engine)
- # register engine connect handler
- event.listen(engine, "engine_connect", _connect_ping_listener)
+ if not _native_pre_ping:
+ # register engine connect handler.
+
+ event.listen(engine, "engine_connect", _connect_ping_listener)
# initial connect + test
# NOTE(viktors): the current implementation of _test_connection()
@@ -237,9 +261,16 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
@utils.dispatch_for_dialect('*', multiple=True)
-def _init_connection_args(
- url, engine_args,
- max_pool_size=None, max_overflow=None, pool_timeout=None, **kw):
+def _init_connection_args(url, engine_args, kw):
+
+ # (zzzeek) kw is passed by reference rather than as **kw so that the
+ # init_connection_args routines can modify the contents of what
+ # will be passed to create_engine, including removing arguments that
+ # don't apply. This allows things such as replacing QueuePool with
+ # NUllPool, for example, as the latter pool would reject these parameters.
+ max_pool_size = kw.get("max_pool_size", None)
+ max_overflow = kw.get("max_overflow", None)
+ pool_timeout = kw.get("pool_timeout", None)
pool_class = url.get_dialect().get_pool_class(url)
if issubclass(pool_class, pool.QueuePool):
@@ -252,17 +283,25 @@ def _init_connection_args(
@_init_connection_args.dispatch_for("sqlite")
-def _init_connection_args(url, engine_args, **kw):
+def _init_connection_args(url, engine_args, kw):
pool_class = url.get_dialect().get_pool_class(url)
- # singletonthreadpool is used for :memory: connections;
- # replace it with StaticPool.
if issubclass(pool_class, pool.SingletonThreadPool):
+ # singletonthreadpool is used for :memory: connections;
+ # replace it with StaticPool.
engine_args["poolclass"] = pool.StaticPool
engine_args['connect_args']['check_same_thread'] = False
+ elif issubclass(pool_class, pool.QueuePool):
+ # SQLAlchemy 2.0 uses QueuePool for sqlite file DBs; put NullPool
+ # back to avoid compatibility issues
+ kw.pop("max_pool_size", None)
+ kw.pop("max_overflow", None)
+ engine_args.pop("max_pool_size", None)
+ engine_args.pop("max_overflow", None)
+ engine_args["poolclass"] = pool.NullPool
@_init_connection_args.dispatch_for("postgresql")
-def _init_connection_args(url, engine_args, **kw):
+def _init_connection_args(url, engine_args, kw):
if 'client_encoding' not in url.query:
# Set encoding using engine_args instead of connect_args since
# it's supported for PostgreSQL 8.*. More details at:
@@ -273,13 +312,13 @@ def _init_connection_args(url, engine_args, **kw):
@_init_connection_args.dispatch_for("mysql")
-def _init_connection_args(url, engine_args, **kw):
+def _init_connection_args(url, engine_args, kw):
if 'charset' not in url.query:
engine_args['connect_args']['charset'] = 'utf8'
@_init_connection_args.dispatch_for("mysql+mysqlconnector")
-def _init_connection_args(url, engine_args, **kw):
+def _init_connection_args(url, engine_args, kw):
# mysqlconnector engine (<1.0) incorrectly defaults to
# raise_on_warnings=True
# https://bitbucket.org/zzzeek/sqlalchemy/issue/2515
@@ -288,8 +327,7 @@ def _init_connection_args(url, engine_args, **kw):
@_init_connection_args.dispatch_for("mysql+mysqldb")
-@_init_connection_args.dispatch_for("mysql+oursql")
-def _init_connection_args(url, engine_args, **kw):
+def _init_connection_args(url, engine_args, kw):
# Those drivers require use_unicode=0 to avoid performance drop due
# to internal usage of Python unicode objects in the driver
# http://docs.sqlalchemy.org/en/rel_0_9/dialects/mysql.html
@@ -444,7 +482,6 @@ def _add_process_guards(engine):
"database connection, "
"which is being discarded and recreated.",
{"newproc": pid, "orig": connection_record.info['pid']})
- connection_record.connection = connection_proxy.connection = None
raise exc.DisconnectionError(
"Connection record belongs to pid %s, "
"attempting to check out in pid %s" %
diff --git a/oslo_db/sqlalchemy/exc_filters.py b/oslo_db/sqlalchemy/exc_filters.py
index e578987..420b5c7 100644
--- a/oslo_db/sqlalchemy/exc_filters.py
+++ b/oslo_db/sqlalchemy/exc_filters.py
@@ -20,7 +20,7 @@ from sqlalchemy import event
from sqlalchemy import exc as sqla_exc
from oslo_db import exception
-
+from oslo_db.sqlalchemy import compat
LOG = logging.getLogger(__name__)
@@ -377,6 +377,7 @@ def _raise_operational_errors_directly_filter(operational_error,
def _is_db_connection_error(operational_error, match, engine_name,
is_disconnect):
"""Detect the exception as indicating a recoverable error on connect."""
+
raise exception.DBConnectionError(operational_error)
@@ -423,13 +424,14 @@ def handler(context):
more specific exception class are attempted first.
"""
- def _dialect_registries(engine):
- if engine.dialect.name in _registry:
- yield _registry[engine.dialect.name]
+ def _dialect_registries(dialect):
+ if dialect.name in _registry:
+ yield _registry[dialect.name]
if '*' in _registry:
yield _registry['*']
- for per_dialect in _dialect_registries(context.engine):
+ dialect = compat.dialect_from_exception_context(context)
+ for per_dialect in _dialect_registries(dialect):
for exc in (
context.sqlalchemy_exception,
context.original_exception):
@@ -443,7 +445,7 @@ def handler(context):
fn(
exc,
match,
- context.engine.dialect.name,
+ dialect.name,
context.is_disconnect)
except exception.DBError as dbe:
if (
@@ -460,6 +462,19 @@ def handler(context):
if isinstance(
dbe, exception.DBConnectionError):
context.is_disconnect = True
+
+ # new in 2.0.5
+ if (
+ hasattr(context, "is_pre_ping") and
+ context.is_pre_ping
+ ):
+ # if this is a pre-ping, need to
+ # integrate with the built
+ # in pre-ping handler that doesnt know
+ # about DBConnectionError, just needs
+ # the updated status
+ return None
+
return dbe
diff --git a/oslo_db/sqlalchemy/provision.py b/oslo_db/sqlalchemy/provision.py
index 21eb90a..a6cc527 100644
--- a/oslo_db/sqlalchemy/provision.py
+++ b/oslo_db/sqlalchemy/provision.py
@@ -24,7 +24,6 @@ import re
import string
import sqlalchemy
-from sqlalchemy.engine import url as sa_url
from sqlalchemy import schema
from sqlalchemy import sql
import testresources
@@ -242,7 +241,6 @@ class Backend(object):
:raises: ``BackendNotAvailable`` if the backend is not available.
"""
-
if not self.verified:
try:
eng = self._ensure_backend_available(self.url)
@@ -259,7 +257,7 @@ class Backend(object):
@classmethod
def _ensure_backend_available(cls, url):
- url = sa_url.make_url(str(url))
+ url = utils.make_url(url)
try:
eng = sqlalchemy.create_engine(url)
except ImportError as i_e:
@@ -362,7 +360,7 @@ class Backend(object):
]
for url_str in configured_urls:
- url = sa_url.make_url(url_str)
+ url = utils.make_url(url_str)
m = re.match(r'([^+]+?)(?:\+(.+))?$', url.drivername)
database_type = m.group(1)
Backend.backends_by_database_type[database_type] = \
@@ -494,8 +492,7 @@ class BackendImpl(object, metaclass=abc.ABCMeta):
then emit a command to switch to the named database.
"""
-
- url = sa_url.make_url(str(base_url))
+ url = utils.make_url(base_url)
# TODO(zzzeek): remove hasattr() conditional in favor of "url.set()"
# when SQLAlchemy 1.4 is the minimum version in requirements
@@ -516,16 +513,14 @@ class MySQLBackendImpl(BackendImpl):
return "mysql+pymysql://openstack_citest:openstack_citest@localhost/"
def create_named_database(self, engine, ident, conditional=False):
- with engine.connect() as conn:
+ with engine.begin() as conn:
if not conditional or not self.database_exists(conn, ident):
- with conn.begin():
- conn.exec_driver_sql("CREATE DATABASE %s" % ident)
+ conn.exec_driver_sql("CREATE DATABASE %s" % ident)
def drop_named_database(self, engine, ident, conditional=False):
- with engine.connect() as conn:
+ with engine.begin() as conn:
if not conditional or self.database_exists(conn, ident):
- with conn.begin():
- conn.exec_driver_sql("DROP DATABASE %s" % ident)
+ conn.exec_driver_sql("DROP DATABASE %s" % ident)
def database_exists(self, engine, ident):
s = sql.text("SHOW DATABASES LIKE :ident")
@@ -571,7 +566,7 @@ class SQLiteBackendImpl(BackendImpl):
def provisioned_database_url(self, base_url, ident):
if base_url.database:
- return sa_url.make_url("sqlite:////tmp/%s.db" % ident)
+ return utils.make_url("sqlite:////tmp/%s.db" % ident)
else:
return base_url
@@ -586,19 +581,17 @@ class PostgresqlBackendImpl(BackendImpl):
isolation_level="AUTOCOMMIT",
) as conn:
if not conditional or not self.database_exists(conn, ident):
- with conn.begin():
- conn.exec_driver_sql("CREATE DATABASE %s" % ident)
+ conn.exec_driver_sql("CREATE DATABASE %s" % ident)
def drop_named_database(self, engine, ident, conditional=False):
with engine.connect().execution_options(
isolation_level="AUTOCOMMIT",
) as conn:
self._close_out_database_users(conn, ident)
- with conn.begin():
- if conditional:
- conn.exec_driver_sql("DROP DATABASE IF EXISTS %s" % ident)
- else:
- conn.exec_driver_sql("DROP DATABASE %s" % ident)
+ if conditional:
+ conn.exec_driver_sql("DROP DATABASE IF EXISTS %s" % ident)
+ else:
+ conn.exec_driver_sql("DROP DATABASE %s" % ident)
def drop_additional_objects(self, conn):
enums = [e['name'] for e in sqlalchemy.inspect(conn).get_enums()]
diff --git a/oslo_db/sqlalchemy/test_fixtures.py b/oslo_db/sqlalchemy/test_fixtures.py
index 8b69d3f..f7157c0 100644
--- a/oslo_db/sqlalchemy/test_fixtures.py
+++ b/oslo_db/sqlalchemy/test_fixtures.py
@@ -360,7 +360,7 @@ class AdHocDbFixture(SimpleDbFixture):
"""
def __init__(self, url=None):
if url:
- self.url = provision.sa_url.make_url(str(url))
+ self.url = utils.make_url(url)
driver = self.url.get_backend_name()
else:
driver = None
diff --git a/oslo_db/sqlalchemy/update_match.py b/oslo_db/sqlalchemy/update_match.py
index 559aa78..c2dd8d6 100644
--- a/oslo_db/sqlalchemy/update_match.py
+++ b/oslo_db/sqlalchemy/update_match.py
@@ -393,8 +393,8 @@ def update_returning_pk(query, values, surrogate_key):
mapper = inspect(entity).mapper
session = query.session
- bind = session.connection(mapper=mapper)
- if bind.dialect.implicit_returning:
+ bind = session.connection(bind_arguments=dict(mapper=mapper))
+ if bind.dialect.name == "postgresql":
pk_strategy = _pk_strategy_returning
elif bind.dialect.name == 'mysql' and \
len(mapper.primary_key) == 1 and \
diff --git a/oslo_db/sqlalchemy/utils.py b/oslo_db/sqlalchemy/utils.py
index ba0a607..58b2486 100644
--- a/oslo_db/sqlalchemy/utils.py
+++ b/oslo_db/sqlalchemy/utils.py
@@ -213,7 +213,7 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
null_order_by_stmt = {
"": None,
"nullsfirst": sort_key_attr.is_(None),
- "nullslast": sort_key_attr.isnot(None),
+ "nullslast": sort_key_attr.is_not(None),
}[null_sort_dir]
except KeyError:
raise ValueError(_("Unknown sort direction, "
@@ -1016,26 +1016,29 @@ def suspend_fk_constraints_for_col_alter(
yield
else:
with engine.connect() as conn:
- insp = inspect(conn)
- fks = []
- for ref_table_name in referents:
- for fk in insp.get_foreign_keys(ref_table_name):
- if not fk.get('name'):
- raise AssertionError("foreign key hasn't a name.")
- if fk['referred_table'] == table_name and \
- column_name in fk['referred_columns']:
- fk['source_table'] = ref_table_name
- if 'options' not in fk:
- fk['options'] = {}
- fks.append(fk)
-
- ctx = MigrationContext.configure(conn)
- op = Operations(ctx)
-
with conn.begin():
+ insp = inspect(conn)
+ fks = []
+ for ref_table_name in referents:
+ for fk in insp.get_foreign_keys(ref_table_name):
+ if not fk.get('name'):
+ raise AssertionError("foreign key hasn't a name.")
+ if fk['referred_table'] == table_name and \
+ column_name in fk['referred_columns']:
+ fk['source_table'] = ref_table_name
+ if 'options' not in fk:
+ fk['options'] = {}
+ fks.append(fk)
+
+ ctx = MigrationContext.configure(conn)
+ op = Operations(ctx)
+
for fk in fks:
op.drop_constraint(
- fk['name'], fk['source_table'], type_="foreignkey")
+ fk['name'],
+ fk['source_table'],
+ type_="foreignkey",
+ )
yield
@@ -1051,3 +1054,11 @@ def suspend_fk_constraints_for_col_alter(
deferrable=fk['options'].get('deferrable'),
initially=fk['options'].get('initially'),
)
+
+
+def make_url(target):
+ """Return a ``url.URL`` object"""
+ if isinstance(target, (str, sa_url.URL)):
+ return sa_url.make_url(target)
+ else:
+ return sa_url.make_url(str(target))
diff --git a/oslo_db/tests/sqlalchemy/test_enginefacade.py b/oslo_db/tests/sqlalchemy/test_enginefacade.py
index 4175880..c2b980f 100644
--- a/oslo_db/tests/sqlalchemy/test_enginefacade.py
+++ b/oslo_db/tests/sqlalchemy/test_enginefacade.py
@@ -357,11 +357,11 @@ class MockFacadeTest(test_base.BaseTestCase):
maker_factories = mock.Mock(side_effect=get_maker)
maker_factories(
- autocommit=False, engine=engines.writer,
+ engine=engines.writer,
expire_on_commit=False)
if self.slave_uri:
maker_factories(
- autocommit=False, engine=engines.async_reader,
+ engine=engines.async_reader,
expire_on_commit=False)
yield makers
diff --git a/oslo_db/tests/sqlalchemy/test_exc_filters.py b/oslo_db/tests/sqlalchemy/test_exc_filters.py
index 7e03fce..796ba6c 100644
--- a/oslo_db/tests/sqlalchemy/test_exc_filters.py
+++ b/oslo_db/tests/sqlalchemy/test_exc_filters.py
@@ -19,7 +19,6 @@ import itertools
from unittest import mock
import sqlalchemy as sqla
-from sqlalchemy.engine import url as sqla_url
from sqlalchemy import event
import sqlalchemy.exc
from sqlalchemy.orm import declarative_base
@@ -27,8 +26,10 @@ from sqlalchemy.orm import registry
from sqlalchemy import sql
from oslo_db import exception
+from oslo_db.sqlalchemy import compat
from oslo_db.sqlalchemy import engines
from oslo_db.sqlalchemy import exc_filters
+from oslo_db.sqlalchemy import utils
from oslo_db.tests import base as test_base
from oslo_db.tests.sqlalchemy import base as db_test_base
from oslo_db.tests import utils as test_utils
@@ -139,18 +140,29 @@ class TestsExceptionFilter(_SQLAExceptionMatcher, test_base.BaseTestCase):
# statement
self.engine.connect().close()
- with test_utils.nested(
+ patches = [
mock.patch.object(engine.dialect, "do_execute", do_execute),
# replace the whole DBAPI rather than patching "Error"
# as some DBAPIs might not be patchable (?)
mock.patch.object(engine.dialect,
"dbapi",
mock.Mock(Error=self.Error)),
+
mock.patch.object(engine.dialect, "name", dialect_name),
mock.patch.object(engine.dialect,
"is_disconnect",
lambda *args: is_disconnect)
- ):
+ ]
+ if compat.sqla_2:
+ patches.append(
+ mock.patch.object(
+ engine.dialect,
+ "loaded_dbapi",
+ mock.Mock(Error=self.Error),
+ )
+ )
+
+ with test_utils.nested(*patches):
yield
def _run_test(self, dialect_name, statement, raises, expected,
@@ -403,7 +415,7 @@ class TestNonExistentDatabase(
def setUp(self):
super(TestNonExistentDatabase, self).setUp()
- url = sqla_url.make_url(str(self.engine.url))
+ url = utils.make_url(self.engine.url)
# TODO(zzzeek): remove hasattr() conditional in favor of "url.set()"
# when SQLAlchemy 1.4 is the minimum version in requirements
@@ -419,7 +431,7 @@ class TestNonExistentDatabase(
matched = self.assertRaises(
exception.DBNonExistentDatabase,
engines.create_engine,
- sqla_url.make_url(
+ utils.make_url(
'sqlite:////non_existent_dir/non_existent_database')
)
self.assertIsNone(matched.database)
@@ -754,7 +766,7 @@ class TestExceptionCauseMySQLSavepoint(
session.execute(sql.text("select 1"))
# close underying DB connection
- session.connection().connection.connection.close()
+ compat.driver_connection(session.connection()).close()
# alternate approach, but same idea:
# conn_id = session.scalar("select connection_id()")
@@ -779,7 +791,7 @@ class TestExceptionCauseMySQLSavepoint(
session.execute(sql.text("select 1"))
# close underying DB connection
- session.connection().connection.connection.close()
+ compat.driver_connection(session.connection()).close()
# alternate approach, but same idea:
# conn_id = session.scalar("select connection_id()")
@@ -947,8 +959,8 @@ class TestDuplicate(TestsExceptionFilter):
class TestDeadlock(TestsExceptionFilter):
statement = ('SELECT quota_usages.created_at AS '
'quota_usages_created_at FROM quota_usages '
- 'WHERE quota_usages.project_id = %(project_id_1)s '
- 'AND quota_usages.deleted = %(deleted_1)s FOR UPDATE')
+ 'WHERE quota_usages.project_id = :project_id_1 '
+ 'AND quota_usages.deleted = :deleted_1 FOR UPDATE')
params = {
'project_id_1': '8891d4478bbf48ad992f050cdf55e9b5',
'deleted_1': 0
@@ -1178,41 +1190,16 @@ class IntegrationTest(db_test_base._DbTestCase):
self.assertIn("no such function", str(matched))
-class TestDBDisconnected(TestsExceptionFilter):
-
- @contextlib.contextmanager
- def _fixture(
- self,
- dialect_name, exception, num_disconnects, is_disconnect=True):
- engine = self.engine
-
- event.listen(engine, "engine_connect", engines._connect_ping_listener)
-
- real_do_execute = engine.dialect.do_execute
- counter = itertools.count(1)
-
- def fake_do_execute(self, *arg, **kw):
- if next(counter) > num_disconnects:
- return real_do_execute(self, *arg, **kw)
- else:
- raise exception
-
- with self._dbapi_fixture(dialect_name):
- with test_utils.nested(
- mock.patch.object(engine.dialect,
- "do_execute",
- fake_do_execute),
- mock.patch.object(engine.dialect,
- "is_disconnect",
- mock.Mock(return_value=is_disconnect))
- ):
- yield
+class TestDBDisconnectedFixture(TestsExceptionFilter):
+ native_pre_ping = False
def _test_ping_listener_disconnected(
self, dialect_name, exc_obj, is_disconnect=True,
):
- with self._fixture(dialect_name, exc_obj, 1, is_disconnect):
- conn = self.engine.connect()
+ with self._fixture(
+ dialect_name, exc_obj, False, is_disconnect,
+ ) as engine:
+ conn = engine.connect()
with conn.begin():
self.assertEqual(
1, conn.execute(sqla.select(1)).scalars().first(),
@@ -1221,19 +1208,145 @@ class TestDBDisconnected(TestsExceptionFilter):
self.assertFalse(conn.invalidated)
self.assertTrue(conn.in_transaction())
- with self._fixture(dialect_name, exc_obj, 2, is_disconnect):
+ with self._fixture(
+ dialect_name, exc_obj, True, is_disconnect,
+ ) as engine:
self.assertRaises(
exception.DBConnectionError,
- self.engine.connect
+ engine.connect
)
# test implicit execution
- with self._fixture(dialect_name, exc_obj, 1):
- with self.engine.connect() as conn:
+ with self._fixture(dialect_name, exc_obj, False) as engine:
+ with engine.connect() as conn:
self.assertEqual(
1, conn.execute(sqla.select(1)).scalars().first(),
)
+ @contextlib.contextmanager
+ def _fixture(
+ self,
+ dialect_name,
+ exception,
+ db_stays_down,
+ is_disconnect=True,
+ ):
+ """Fixture for testing the ping listener.
+
+ For SQLAlchemy 2.0, the mocking is placed more deeply in the
+ stack within the DBAPI connection / cursor so that we can also
+ effectively mock out the "pre ping" condition.
+
+ :param dialect_name: dialect to use. "postgresql" or "mysql"
+ :param exception: an exception class to raise
+ :param db_stays_down: if True, the database will stay down after the
+ first ping fails
+ :param is_disconnect: whether or not the SQLAlchemy dialect should
+ consider the exception object as a "disconnect error". Openstack's
+ own exception handlers upgrade various DB exceptions to be
+ "disconnect" scenarios that SQLAlchemy itself does not, such as
+ some specific Galera error messages.
+
+ The importance of an exception being a "disconnect error" means that
+ SQLAlchemy knows it can discard the connection and then reconnect.
+ If the error is not a "disconnection error", then it raises.
+ """
+ connect_args = {}
+ patchers = []
+ db_disconnected = False
+
+ class DisconnectCursorMixin:
+ def execute(self, *arg, **kw):
+ if db_disconnected:
+ raise exception
+ else:
+ return super().execute(*arg, **kw)
+
+ if dialect_name == "postgresql":
+ import psycopg2.extensions
+
+ class Curs(DisconnectCursorMixin, psycopg2.extensions.cursor):
+ pass
+
+ connect_args = {"cursor_factory": Curs}
+
+ elif dialect_name == "mysql":
+ import pymysql
+
+ def fake_ping(self, *arg, **kw):
+ if db_disconnected:
+ raise exception
+ else:
+ return True
+
+ class Curs(DisconnectCursorMixin, pymysql.cursors.Cursor):
+ pass
+
+ connect_args = {"cursorclass": Curs}
+
+ patchers.append(
+ mock.patch.object(
+ pymysql.Connection, "ping", fake_ping
+ )
+ )
+ else:
+ raise NotImplementedError()
+
+ with mock.patch.object(
+ compat,
+ "native_pre_ping_event_support",
+ self.native_pre_ping,
+ ):
+ engine = engines.create_engine(
+ self.engine.url, max_retries=0)
+
+ # 1. override how we connect. if we want the DB to be down
+ # for the moment, but recover, reset db_disconnected after
+ # connect is called. If we want the DB to stay down, then
+ # make sure connect raises the error also.
+ @event.listens_for(engine, "do_connect")
+ def _connect(dialect, connrec, cargs, cparams):
+ nonlocal db_disconnected
+
+ # while we're here, add our cursor classes to the DBAPI
+ # connect args
+ cparams.update(connect_args)
+
+ if db_disconnected:
+ if db_stays_down:
+ raise exception
+ else:
+ db_disconnected = False
+
+ # 2. initialize the dialect with a first connect
+ conn = engine.connect()
+ conn.close()
+
+ # 3. add additional patchers
+ patchers.extend([
+ mock.patch.object(
+ engine.dialect.dbapi,
+ "Error",
+ self.Error,
+ ),
+ mock.patch.object(
+ engine.dialect,
+ "is_disconnect",
+ mock.Mock(return_value=is_disconnect),
+ ),
+ ])
+
+ with test_utils.nested(*patchers):
+ # "disconnect" the DB
+ db_disconnected = True
+ yield engine
+
+
+class MySQLPrePingHandlerTests(
+ db_test_base._MySQLOpportunisticTestCase,
+ TestDBDisconnectedFixture,
+):
+
def test_mariadb_error_1927(self):
for code in [1927]:
self._test_ping_listener_disconnected(
@@ -1286,6 +1399,26 @@ class TestDBDisconnected(TestsExceptionFilter):
is_disconnect=False
)
+ def test_mysql_w_disconnect_flag(self):
+ for code in [2002, 2003, 2002]:
+ self._test_ping_listener_disconnected(
+ "mysql",
+ self.OperationalError('%d MySQL server has gone away' % code)
+ )
+
+ def test_mysql_wo_disconnect_flag(self):
+ for code in [2002, 2003]:
+ self._test_ping_listener_disconnected(
+ "mysql",
+ self.OperationalError('%d MySQL server has gone away' % code),
+ is_disconnect=False
+ )
+
+
+class PostgreSQLPrePingHandlerTests(
+ db_test_base._PostgreSQLOpportunisticTestCase,
+ TestDBDisconnectedFixture):
+
def test_postgresql_ping_listener_disconnected(self):
self._test_ping_listener_disconnected(
"postgresql",
@@ -1302,79 +1435,18 @@ class TestDBDisconnected(TestsExceptionFilter):
)
-class TestDBConnectRetry(TestsExceptionFilter):
-
- def _run_test(self, dialect_name, exception, count, retries):
- counter = itertools.count()
-
- engine = self.engine
-
- # empty out the connection pool
- engine.dispose()
-
- connect_fn = engine.dialect.connect
-
- def cant_connect(*arg, **kw):
- if next(counter) < count:
- raise exception
- else:
- return connect_fn(*arg, **kw)
+if compat.sqla_2:
+ class MySQLNativePrePingTests(MySQLPrePingHandlerTests):
+ native_pre_ping = True
- with self._dbapi_fixture(dialect_name):
- with mock.patch.object(engine.dialect, "connect", cant_connect):
- return engines._test_connection(engine, retries, .01)
+ class PostgreSQLNativePrePingTests(PostgreSQLPrePingHandlerTests):
+ native_pre_ping = True
- def test_connect_no_retries(self):
- conn = self._run_test(
- "mysql",
- self.OperationalError("Error: (2003) something wrong"),
- 2, 0
- )
- # didnt connect because nothing was tried
- self.assertIsNone(conn)
- def test_connect_inifinite_retries(self):
- conn = self._run_test(
- "mysql",
- self.OperationalError("Error: (2003) something wrong"),
- 2, -1
- )
- # conn is good
- self.assertEqual(1, conn.scalar(sqla.select(1)))
-
- def test_connect_retry_past_failure(self):
- conn = self._run_test(
- "mysql",
- self.OperationalError("Error: (2003) something wrong"),
- 2, 3
- )
- # conn is good
- self.assertEqual(1, conn.scalar(sqla.select(1)))
-
- def test_connect_retry_not_candidate_exception(self):
- self.assertRaises(
- sqla.exc.OperationalError, # remember, we pass OperationalErrors
- # through at the moment :)
- self._run_test,
- "mysql",
- self.OperationalError("Error: (2015) I can't connect period"),
- 2, 3
- )
-
- def test_connect_retry_stops_infailure(self):
- self.assertRaises(
- exception.DBConnectionError,
- self._run_test,
- "mysql",
- self.OperationalError("Error: (2003) something wrong"),
- 3, 2
- )
-
-
-class TestDBConnectPingWrapping(TestsExceptionFilter):
+class TestDBConnectPingListener(TestsExceptionFilter):
def setUp(self):
- super(TestDBConnectPingWrapping, self).setUp()
+ super().setUp()
event.listen(
self.engine, "engine_connect", engines._connect_ping_listener)
@@ -1463,6 +1535,75 @@ class TestDBConnectPingWrapping(TestsExceptionFilter):
)
+class TestDBConnectRetry(TestsExceptionFilter):
+
+ def _run_test(self, dialect_name, exception, count, retries):
+ counter = itertools.count()
+
+ engine = self.engine
+
+ # empty out the connection pool
+ engine.dispose()
+
+ connect_fn = engine.dialect.connect
+
+ def cant_connect(*arg, **kw):
+ if next(counter) < count:
+ raise exception
+ else:
+ return connect_fn(*arg, **kw)
+
+ with self._dbapi_fixture(dialect_name):
+ with mock.patch.object(engine.dialect, "connect", cant_connect):
+ return engines._test_connection(engine, retries, .01)
+
+ def test_connect_no_retries(self):
+ conn = self._run_test(
+ "mysql",
+ self.OperationalError("Error: (2003) something wrong"),
+ 2, 0
+ )
+ # didnt connect because nothing was tried
+ self.assertIsNone(conn)
+
+ def test_connect_inifinite_retries(self):
+ conn = self._run_test(
+ "mysql",
+ self.OperationalError("Error: (2003) something wrong"),
+ 2, -1
+ )
+ # conn is good
+ self.assertEqual(1, conn.scalar(sqla.select(1)))
+
+ def test_connect_retry_past_failure(self):
+ conn = self._run_test(
+ "mysql",
+ self.OperationalError("Error: (2003) something wrong"),
+ 2, 3
+ )
+ # conn is good
+ self.assertEqual(1, conn.scalar(sqla.select(1)))
+
+ def test_connect_retry_not_candidate_exception(self):
+ self.assertRaises(
+ sqla.exc.OperationalError, # remember, we pass OperationalErrors
+ # through at the moment :)
+ self._run_test,
+ "mysql",
+ self.OperationalError("Error: (2015) I can't connect period"),
+ 2, 3
+ )
+
+ def test_connect_retry_stops_infailure(self):
+ self.assertRaises(
+ exception.DBConnectionError,
+ self._run_test,
+ "mysql",
+ self.OperationalError("Error: (2003) something wrong"),
+ 3, 2
+ )
+
+
class TestsErrorHandler(TestsExceptionFilter):
def test_multiple_error_handlers(self):
handler = mock.MagicMock(return_value=None)
diff --git a/oslo_db/tests/sqlalchemy/test_provision.py b/oslo_db/tests/sqlalchemy/test_provision.py
index a6cedce..e0ab39f 100644
--- a/oslo_db/tests/sqlalchemy/test_provision.py
+++ b/oslo_db/tests/sqlalchemy/test_provision.py
@@ -13,6 +13,7 @@
import os
from unittest import mock
+from sqlalchemy.engine import url as sqla_url
from sqlalchemy import exc as sa_exc
from sqlalchemy import inspect
from sqlalchemy import schema
@@ -156,8 +157,8 @@ class AdHocURLTest(test_base.BaseTestCase):
fixture.setUp()
self.assertEqual(
- str(enginefacade._context_manager._factory._writer_engine.url),
- "sqlite:///foo.db"
+ enginefacade._context_manager._factory._writer_engine.url,
+ sqla_url.make_url("sqlite:///foo.db")
)
self.assertTrue(os.path.exists("foo.db"))
@@ -176,14 +177,14 @@ class AdHocURLTest(test_base.BaseTestCase):
self.addCleanup(
mysql_backend.drop_named_database, "adhoc_test"
)
- url = str(mysql_backend.provisioned_database_url("adhoc_test"))
+ url = mysql_backend.provisioned_database_url("adhoc_test")
fixture = test_fixtures.AdHocDbFixture(url)
fixture.setUp()
self.assertEqual(
- str(enginefacade._context_manager._factory._writer_engine.url),
+ enginefacade._context_manager._factory._writer_engine.url,
url
)
diff --git a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
index 8488edf..aa66ae5 100644
--- a/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
+++ b/oslo_db/tests/sqlalchemy/test_sqlalchemy.py
@@ -23,22 +23,23 @@ from unittest import mock
import fixtures
from oslo_config import cfg
-from oslo_utils import versionutils
import sqlalchemy
from sqlalchemy.engine import base as base_engine
from sqlalchemy import exc
+from sqlalchemy.pool import NullPool
from sqlalchemy import sql
from sqlalchemy import Column, MetaData, Table
-from sqlalchemy.engine import url
from sqlalchemy import Integer, String
from sqlalchemy.orm import declarative_base
from oslo_db import exception
from oslo_db import options as db_options
+from oslo_db.sqlalchemy import compat
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import engines
from oslo_db.sqlalchemy import models
from oslo_db.sqlalchemy import session
+from oslo_db.sqlalchemy import utils
from oslo_db.tests import base as test_base
from oslo_db.tests.sqlalchemy import base as db_test_base
@@ -388,9 +389,8 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
self.assertIsNot(ses1, ses2)
def test_get_session_arguments_override_default_settings(self):
- ses = self.facade.get_session(autocommit=False, expire_on_commit=True)
+ ses = self.facade.get_session(expire_on_commit=True)
- self.assertFalse(ses.autocommit)
self.assertTrue(ses.expire_on_commit)
@mock.patch('oslo_db.sqlalchemy.orm.get_maker')
@@ -410,7 +410,6 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
conf.set_override(optname, optvalue, group='database')
session.EngineFacade.from_config(conf,
- autocommit=False,
expire_on_commit=True)
create_engine.assert_called_once_with(
@@ -435,7 +434,6 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
logging_name=mock.ANY,
)
get_maker.assert_called_once_with(engine=create_engine(),
- autocommit=False,
expire_on_commit=True)
def test_slave_connection(self):
@@ -696,22 +694,24 @@ class CreateEngineTest(test_base.BaseTestCase):
def test_queuepool_args(self):
engines._init_connection_args(
- url.make_url("mysql+pymysql://u:p@host/test"), self.args,
- max_pool_size=10, max_overflow=10)
+ utils.make_url("mysql+pymysql://u:p@host/test"),
+ self.args,
+ {'max_pool_size': 10, 'max_overflow': 10},
+ )
self.assertEqual(10, self.args['pool_size'])
self.assertEqual(10, self.args['max_overflow'])
def test_sqlite_memory_pool_args(self):
for _url in ("sqlite://", "sqlite:///:memory:"):
engines._init_connection_args(
- url.make_url(_url), self.args,
- max_pool_size=10, max_overflow=10)
+ utils.make_url(_url),
+ self.args,
+ {'max_pool_size': 10, 'max_overflow': 10},
+ )
- # queuepool arguments are not peresnet
- self.assertNotIn(
- 'pool_size', self.args)
- self.assertNotIn(
- 'max_overflow', self.args)
+ # queuepool arguments are not present
+ self.assertNotIn('pool_size', self.args)
+ self.assertNotIn('max_overflow', self.args)
self.assertEqual(False,
self.args['connect_args']['check_same_thread'])
@@ -721,8 +721,10 @@ class CreateEngineTest(test_base.BaseTestCase):
def test_sqlite_file_pool_args(self):
engines._init_connection_args(
- url.make_url("sqlite:///somefile.db"), self.args,
- max_pool_size=10, max_overflow=10)
+ utils.make_url("sqlite:///somefile.db"),
+ self.args,
+ {'max_pool_size': 10, 'max_overflow': 10},
+ )
# queuepool arguments are not peresnet
self.assertNotIn('pool_size', self.args)
@@ -731,9 +733,12 @@ class CreateEngineTest(test_base.BaseTestCase):
self.assertFalse(self.args['connect_args'])
- # NullPool is the default for file based connections,
- # no need to specify this
- self.assertNotIn('poolclass', self.args)
+ if not compat.sqla_2:
+ # NullPool is the default for file based connections,
+ # no need to specify this
+ self.assertNotIn('poolclass', self.args)
+ else:
+ self.assertIs(self.args["poolclass"], NullPool)
def _test_mysql_connect_args_default(self, connect_args):
self.assertEqual({'charset': 'utf8', 'use_unicode': 1},
@@ -741,42 +746,37 @@ class CreateEngineTest(test_base.BaseTestCase):
def test_mysql_connect_args_default(self):
engines._init_connection_args(
- url.make_url("mysql://u:p@host/test"), self.args)
- self._test_mysql_connect_args_default(self.args['connect_args'])
-
- def test_mysql_oursql_connect_args_default(self):
- engines._init_connection_args(
- url.make_url("mysql+oursql://u:p@host/test"), self.args)
+ utils.make_url("mysql://u:p@host/test"), self.args, {})
self._test_mysql_connect_args_default(self.args['connect_args'])
def test_mysql_pymysql_connect_args_default(self):
engines._init_connection_args(
- url.make_url("mysql+pymysql://u:p@host/test"), self.args)
+ utils.make_url("mysql+pymysql://u:p@host/test"), self.args, {})
self.assertEqual({'charset': 'utf8'}, self.args['connect_args'])
def test_mysql_mysqldb_connect_args_default(self):
engines._init_connection_args(
- url.make_url("mysql+mysqldb://u:p@host/test"), self.args)
+ utils.make_url("mysql+mysqldb://u:p@host/test"), self.args, {})
self._test_mysql_connect_args_default(self.args['connect_args'])
def test_postgresql_connect_args_default(self):
engines._init_connection_args(
- url.make_url("postgresql://u:p@host/test"), self.args)
+ utils.make_url("postgresql://u:p@host/test"), self.args, {})
self.assertEqual('utf8', self.args['client_encoding'])
self.assertFalse(self.args['connect_args'])
def test_mysqlconnector_raise_on_warnings_default(self):
engines._init_connection_args(
- url.make_url("mysql+mysqlconnector://u:p@host/test"),
- self.args)
+ utils.make_url("mysql+mysqlconnector://u:p@host/test"),
+ self.args, {})
self.assertEqual(False, self.args['connect_args']['raise_on_warnings'])
def test_mysqlconnector_raise_on_warnings_override(self):
engines._init_connection_args(
- url.make_url(
+ utils.make_url(
"mysql+mysqlconnector://u:p@host/test"
"?raise_on_warnings=true"),
- self.args
+ self.args, {}
)
self.assertNotIn('raise_on_warnings', self.args['connect_args'])
@@ -806,14 +806,14 @@ class CreateEngineTest(test_base.BaseTestCase):
warn_interpolate):
engines._vet_url(
- url.make_url("mysql://scott:tiger@some_host/some_db"))
- engines._vet_url(url.make_url(
+ utils.make_url("mysql://scott:tiger@some_host/some_db"))
+ engines._vet_url(utils.make_url(
"mysql+mysqldb://scott:tiger@some_host/some_db"))
- engines._vet_url(url.make_url(
+ engines._vet_url(utils.make_url(
"mysql+pymysql://scott:tiger@some_host/some_db"))
- engines._vet_url(url.make_url(
+ engines._vet_url(utils.make_url(
"postgresql+psycopg2://scott:tiger@some_host/some_db"))
- engines._vet_url(url.make_url(
+ engines._vet_url(utils.make_url(
"postgresql://scott:tiger@some_host/some_db"))
self.assertEqual(
@@ -851,18 +851,18 @@ class ProcessGuardTest(db_test_base._DbTestCase):
with mock.patch("os.getpid", get_parent_pid):
with self.engine.connect() as conn:
- dbapi_id = id(conn.connection.connection)
+ dbapi_id = id(compat.driver_connection(conn))
with mock.patch("os.getpid", get_child_pid):
with self.engine.connect() as conn:
- new_dbapi_id = id(conn.connection.connection)
+ new_dbapi_id = id(compat.driver_connection(conn))
self.assertNotEqual(dbapi_id, new_dbapi_id)
# ensure it doesn't trip again
with mock.patch("os.getpid", get_child_pid):
with self.engine.connect() as conn:
- newer_dbapi_id = id(conn.connection.connection)
+ newer_dbapi_id = id(compat.driver_connection(conn))
self.assertEqual(new_dbapi_id, newer_dbapi_id)
@@ -906,13 +906,12 @@ class MySQLConnectPingListenerTest(db_test_base._MySQLOpportunisticTestCase):
with self.engine.begin() as conn:
self.assertTrue(isinstance(conn._transaction,
base_engine.RootTransaction))
- engines._connect_ping_listener(conn, False)
# TODO(ralonsoh): drop this check once SQLAlchemy minimum
# version is 2.0.
- sqla_version = versionutils.convert_version_to_tuple(
- sqlalchemy.__version__)
- if sqla_version[0] >= 2:
+ if compat.sqla_2:
+ engines._connect_ping_listener(conn)
self.assertIsNone(conn._transaction)
else:
+ engines._connect_ping_listener(conn, False)
self.assertTrue(isinstance(conn._transaction,
base_engine.RootTransaction))
diff --git a/oslo_db/tests/sqlalchemy/test_utils.py b/oslo_db/tests/sqlalchemy/test_utils.py
index 8fe11c5..059d015 100644
--- a/oslo_db/tests/sqlalchemy/test_utils.py
+++ b/oslo_db/tests/sqlalchemy/test_utils.py
@@ -23,11 +23,10 @@ from sqlalchemy import CheckConstraint
from sqlalchemy import MetaData, Table, Column
from sqlalchemy import ForeignKey, ForeignKeyConstraint
from sqlalchemy.dialects.postgresql import psycopg2
-from sqlalchemy.engine import url as sa_url
from sqlalchemy.exc import OperationalError
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import declarative_base
-from sqlalchemy.orm import mapper
+from sqlalchemy.orm import registry
from sqlalchemy.orm import Session
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import sql
@@ -132,7 +131,8 @@ fake_table = Table(
Column('key', String(50))
)
-mapper(FakeTableClassicalyMapped, fake_table)
+reg = registry()
+reg.map_imperatively(FakeTableClassicalyMapped, fake_table)
class FakeModel(object):
@@ -312,10 +312,16 @@ class TestPaginateQuery(test_base.BaseTestCase):
'another_crit',
]
- with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
- mock.patch.object(self.model.user_id, 'is_') as mock_is_a, \
- mock.patch.object(self.model.project_id, 'is_') as mock_is_b:
- mock_isnot.return_value = 'asc_null_1'
+ with mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_not'
+ ) as mock_is_not, \
+ mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_'
+ ) as mock_is_a, \
+ mock.patch.object(
+ self.model.project_id.comparator.expression, 'is_'
+ ) as mock_is_b:
+ mock_is_not.return_value = 'asc_null_1'
mock_is_a.side_effect = [
'desc_null_filter_1',
'desc_null_filter_2',
@@ -331,7 +337,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
sort_dirs=[
'asc-nullslast', 'desc-nullsfirst'])
- mock_isnot.assert_called_once_with(None)
+ mock_is_not.assert_called_once_with(None)
mock_is_a.assert_has_calls([
mock.call(None),
mock.call(None),
@@ -385,11 +391,17 @@ class TestPaginateQuery(test_base.BaseTestCase):
mock_and.return_value = 'some_crit'
mock_or.side_effect = ['or_1', 'some_f']
- with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
- mock.patch.object(self.model.updated_at, 'is_') as mock_is_a, \
- mock.patch.object(self.model.user_id, 'is_') as mock_is_b:
-
- mock_isnot.return_value = 'asc_null_1'
+ with mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_not'
+ ) as mock_is_not, \
+ mock.patch.object(
+ self.model.updated_at.comparator.expression, 'is_'
+ ) as mock_is_a, \
+ mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_'
+ ) as mock_is_b:
+
+ mock_is_not.return_value = 'asc_null_1'
mock_is_a.return_value = 'desc_null_1'
mock_is_b.side_effect = ['asc_null_filter_1', 'asc_null_filter_2']
@@ -398,7 +410,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
marker=self.marker,
sort_dirs=[
'asc-nullslast', 'desc-nullsfirst'])
- mock_isnot.assert_called_once_with(None)
+ mock_is_not.assert_called_once_with(None)
mock_is_a.assert_called_once_with(None)
mock_is_b.assert_has_calls([mock.call(None), mock.call(None)])
@@ -446,12 +458,20 @@ class TestPaginateQuery(test_base.BaseTestCase):
]
self.query.filter.return_value = self.query
- with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
- mock.patch.object(self.model.updated_at, 'is_') as mock_is_a, \
- mock.patch.object(self.model.user_id, 'is_') as mock_is_b, \
- mock.patch.object(self.model.project_id, 'is_') as mock_is_c:
-
- mock_isnot.return_value = 'asc_null_1'
+ with mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_not'
+ ) as mock_is_not, \
+ mock.patch.object(
+ self.model.updated_at.comparator.expression, 'is_'
+ ) as mock_is_a, \
+ mock.patch.object(
+ self.model.user_id.comparator.expression, 'is_'
+ ) as mock_is_b, \
+ mock.patch.object(
+ self.model.project_id.comparator.expression, 'is_'
+ ) as mock_is_c:
+
+ mock_is_not.return_value = 'asc_null_1'
mock_is_a.return_value = 'desc_null_1'
mock_is_b.side_effect = ['asc_null_filter_1', 'asc_null_filter_2']
mock_is_c.side_effect = ['desc_null_3', 'desc_null_filter_3']
@@ -462,7 +482,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
sort_dirs=['asc-nullslast', 'desc-nullsfirst',
'desc-nullsfirst'])
- mock_isnot.assert_called_once_with(None)
+ mock_is_not.assert_called_once_with(None)
mock_is_a.assert_called_once_with(None)
mock_is_b.assert_has_calls([mock.call(None), mock.call(None)])
mock_is_c.assert_has_calls([mock.call(None), mock.call(None)])
@@ -933,12 +953,12 @@ class TestConnectionUtils(test_base.BaseTestCase):
def setUp(self):
super(TestConnectionUtils, self).setUp()
- self.full_credentials = {'backend': 'postgresql',
+ self.full_credentials = {'backend': 'postgresql+psycopg2',
'database': 'test',
'user': 'dude',
'passwd': 'pass'}
- self.connect_string = 'postgresql://dude:pass@localhost/test'
+ self.connect_string = 'postgresql+psycopg2://dude:pass@localhost/test'
# NOTE(rpodolyaka): mock the dialect parts, so that we don't depend
# on psycopg2 (or any other DBAPI implementation) in these tests
@@ -946,8 +966,20 @@ class TestConnectionUtils(test_base.BaseTestCase):
@classmethod
def fake_dbapi(cls):
return mock.MagicMock()
- patch_dbapi = mock.patch.object(psycopg2.PGDialect_psycopg2, 'dbapi',
- new=fake_dbapi)
+
+ class OurDialect(psycopg2.PGDialect_psycopg2):
+ def dbapi(self):
+ return fake_dbapi
+
+ def import_dbapi(self):
+ return fake_dbapi
+
+ patch_dbapi = mock.patch.object(
+ psycopg2,
+ "PGDialect_psycopg2",
+ new=OurDialect,
+ )
+
patch_dbapi.start()
self.addCleanup(patch_dbapi.stop)
@@ -966,7 +998,7 @@ class TestConnectionUtils(test_base.BaseTestCase):
self.connect_string)
self.assertIsInstance(eng, sqlalchemy.engine.base.Engine)
- self.assertEqual(self.connect_string, str(eng.url))
+ self.assertEqual(utils.make_url(self.connect_string), eng.url)
mock_connect.assert_called_once()
fake_connection.close.assert_called_once()
@@ -983,10 +1015,10 @@ class TestConnectionUtils(test_base.BaseTestCase):
provision.Backend._ensure_backend_available,
self.connect_string)
self.assertEqual(
- "Backend 'postgresql' is unavailable: "
+ "Backend 'postgresql+psycopg2' is unavailable: "
"Could not connect", str(exc))
self.assertEqual(
- "The postgresql backend is unavailable: %s" % err,
+ "The postgresql+psycopg2 backend is unavailable: %s" % err,
log.output.strip())
def test_ensure_backend_available_no_dbapi_raises(self):
@@ -1001,13 +1033,13 @@ class TestConnectionUtils(test_base.BaseTestCase):
self.connect_string)
mock_create.assert_called_once_with(
- sa_url.make_url(self.connect_string))
+ utils.make_url(self.connect_string))
self.assertEqual(
- "Backend 'postgresql' is unavailable: "
+ "Backend 'postgresql+psycopg2' is unavailable: "
"No DBAPI installed", str(exc))
self.assertEqual(
- "The postgresql backend is unavailable: Can't import "
+ "The postgresql+psycopg2 backend is unavailable: Can't import "
"DBAPI module foobar", log.output.strip())
def test_get_db_connection_info(self):
@@ -1356,10 +1388,9 @@ class TestDialectFunctionDispatcher(test_base.BaseTestCase):
)
)
- mysqldb_url = sqlalchemy.engine.url.make_url("mysql+mysqldb://")
- mysqlconnector_url = sqlalchemy.engine.url.make_url(
- "mysql+mysqlconnector://")
- sqlite_url = sqlalchemy.engine.url.make_url("sqlite://")
+ mysqldb_url = utils.make_url("mysql+mysqldb://")
+ mysqlconnector_url = utils.make_url("mysql+mysqlconnector://")
+ sqlite_url = utils.make_url("sqlite://")
dispatcher(mysqldb_url, 1)
dispatcher(mysqlconnector_url, 2)
@@ -1395,8 +1426,7 @@ class TestDialectFunctionDispatcher(test_base.BaseTestCase):
)
def test_url_pymysql(self):
- url = sqlalchemy.engine.url.make_url(
- "mysql+pymysql://scott:tiger@localhost/test")
+ url = utils.make_url("mysql+pymysql://scott:tiger@localhost/test")
dispatcher, callable_fn = self._single_fixture()
dispatcher(url, 15)
@@ -1406,8 +1436,7 @@ class TestDialectFunctionDispatcher(test_base.BaseTestCase):
)
def test_url_mysql_generic(self):
- url = sqlalchemy.engine.url.make_url(
- "mysql://scott:tiger@localhost/test")
+ url = utils.make_url("mysql://scott:tiger@localhost/test")
dispatcher, callable_fn = self._single_fixture()
dispatcher(url, 15)
diff --git a/releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml b/releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml
new file mode 100644
index 0000000..b2c8d62
--- /dev/null
+++ b/releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml
@@ -0,0 +1,11 @@
+---
+features:
+ - |
+ oslo.db now supports SQLAlchemy 2.0.
+ - |
+ A new ``oslo_db.compat`` module has been added. This provides a number of
+ shims for handling differences between SQLAlchemy 1.x and 2.x.
+upgrade:
+ - |
+ The ability to create engine facades that used autocommit, first deprecated
+ in 12.1.0, has now been removed. This is not supported in SQLAlchemy 2.x.
diff --git a/setup.cfg b/setup.cfg
index 9cd074d..b085450 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -6,7 +6,7 @@ description_file =
author = OpenStack
author_email = openstack-discuss@lists.openstack.org
home_page = https://docs.openstack.org/oslo.db/latest
-python_requires = >=3.8
+python_requires = >=3.9
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
@@ -15,8 +15,8 @@ classifier =
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 3
- Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython