summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/build/changelog/changelog_09.rst10
-rw-r--r--doc/build/changelog/changelog_10.rst10
-rw-r--r--doc/build/orm/mapper_config.rst6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py14
-rw-r--r--lib/sqlalchemy/orm/identity.py5
-rw-r--r--lib/sqlalchemy/orm/mapper.py50
-rw-r--r--lib/sqlalchemy/orm/persistence.py422
-rw-r--r--lib/sqlalchemy/testing/engines.py112
-rw-r--r--lib/sqlalchemy/testing/plugin/pytestplugin.py3
-rw-r--r--lib/sqlalchemy/testing/profiling.py216
-rw-r--r--lib/sqlalchemy/testing/replay_fixture.py167
-rw-r--r--lib/sqlalchemy/util/_collections.py14
-rw-r--r--setup.cfg2
-rw-r--r--test/aaa_profiling/test_compiler.py2
-rw-r--r--test/aaa_profiling/test_zoomark.py155
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py233
-rw-r--r--test/engine/test_reconnect.py110
-rw-r--r--test/engine/test_transaction.py7
-rw-r--r--test/orm/test_dynamic.py10
-rw-r--r--test/orm/test_unitofworkv2.py48
-rw-r--r--test/profiles.txt617
-rw-r--r--test/requirements.py11
22 files changed, 985 insertions, 1239 deletions
diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst
index 0f92fb254..b6eec2e9d 100644
--- a/doc/build/changelog/changelog_09.rst
+++ b/doc/build/changelog/changelog_09.rst
@@ -14,6 +14,16 @@
:version: 0.9.8
.. change::
+ :tags: feature, postgresql, pg8000
+ :versions: 1.0.0
+ :pullreq: github:125
+
+ Support is added for "sane multi row count" with the pg8000 driver,
+ which applies mostly to when using versioning with the ORM.
+ The feature is version-detected based on pg8000 1.9.14 or greater
+ in use. Pull request courtesy Tony Locke.
+
+ .. change::
:tags: bug, engine
:versions: 1.0.0
:tickets: 3165
diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst
index fb639ddf7..1cbbec3b3 100644
--- a/doc/build/changelog/changelog_10.rst
+++ b/doc/build/changelog/changelog_10.rst
@@ -17,6 +17,16 @@
:version: 1.0.0
.. change::
+ :tags: bug, orm
+ :tickets: 3167
+
+ Fixed bug where attribute "set" events or columns with
+ ``@validates`` would have events triggered within the flush process,
+ when those columns were the targets of a "fetch and populate"
+ operation, such as an autoincremented primary key, a Python side
+ default, or a server-side default "eagerly" fetched via RETURNING.
+
+ .. change::
:tags: bug, orm, py3k
The :class:`.IdentityMap` exposed from :class:`.Session.identity`
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index 9139b53f0..d0679c721 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -667,6 +667,12 @@ issued when the ORM is populating the object::
assert '@' in address
return address
+.. versionchanged:: 1.0.0 - validators are no longer triggered within
+ the flush process when the newly fetched values for primary key
+ columns as well as some python- or server-side defaults are fetched.
+ Prior to 1.0, validators may be triggered in those cases as well.
+
+
Validators also receive collection append events, when items are added to a
collection::
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 68da5b6d7..4ccc90208 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -119,7 +119,7 @@ class PGDialect_pg8000(PGDialect):
supports_unicode_binds = True
default_paramstyle = 'format'
- supports_sane_multi_rowcount = False
+ supports_sane_multi_rowcount = True
execution_ctx_cls = PGExecutionContext_pg8000
statement_compiler = PGCompiler_pg8000
preparer = PGIdentifierPreparer_pg8000
@@ -133,6 +133,16 @@ class PGDialect_pg8000(PGDialect):
}
)
+ def initialize(self, connection):
+ if self.dbapi and hasattr(self.dbapi, '__version__'):
+ self._dbapi_version = tuple([
+ int(x) for x in
+ self.dbapi.__version__.split(".")])
+ else:
+ self._dbapi_version = (99, 99, 99)
+ self.supports_sane_multi_rowcount = self._dbapi_version >= (1, 9, 14)
+ super(PGDialect_pg8000, self).initialize(connection)
+
@classmethod
def dbapi(cls):
return __import__('pg8000')
@@ -172,11 +182,9 @@ class PGDialect_pg8000(PGDialect):
)
def do_begin_twophase(self, connection, xid):
- print("begin twophase", xid)
connection.connection.tpc_begin((0, xid, ''))
def do_prepare_twophase(self, connection, xid):
- print("prepare twophase", xid)
connection.connection.tpc_prepare()
def do_rollback_twophase(
diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py
index 4425fc3a6..0fa541194 100644
--- a/lib/sqlalchemy/orm/identity.py
+++ b/lib/sqlalchemy/orm/identity.py
@@ -181,7 +181,10 @@ class WeakInstanceDict(IdentityMap):
return iter(self.values())
def all_states(self):
- return self._dict.values()
+ if util.py2k:
+ return self._dict.values()
+ else:
+ return list(self._dict.values())
def discard(self, state):
if state.key in self._dict:
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index fc15769cd..89c092b58 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -1189,14 +1189,6 @@ class Mapper(InspectionAttr):
util.ordered_column_set(t.c).\
intersection(all_cols)
- # determine cols that aren't expressed within our tables; mark these
- # as "read only" properties which are refreshed upon INSERT/UPDATE
- self._readonly_props = set(
- self._columntoproperty[col]
- for col in self._columntoproperty
- if not hasattr(col, 'table') or
- col.table not in self._cols_by_table)
-
# if explicit PK argument sent, add those columns to the
# primary key mappings
if self._primary_key_argument:
@@ -1247,6 +1239,15 @@ class Mapper(InspectionAttr):
self.primary_key = tuple(primary_key)
self._log("Identified primary key columns: %s", primary_key)
+ # determine cols that aren't expressed within our tables; mark these
+ # as "read only" properties which are refreshed upon INSERT/UPDATE
+ self._readonly_props = set(
+ self._columntoproperty[col]
+ for col in self._columntoproperty
+ if self._columntoproperty[col] not in self._primary_key_props and
+ (not hasattr(col, 'table') or
+ col.table not in self._cols_by_table))
+
def _configure_properties(self):
# Column and other ClauseElement objects which are mapped
@@ -1893,14 +1894,27 @@ class Mapper(InspectionAttr):
"""
@_memoized_configured_property
- def _col_to_propkey(self):
+ def _insert_cols_as_none(self):
return dict(
(
table,
- [
- (col, self._columntoproperty[col].key)
+ frozenset(
+ col.key for col in columns
+ if not col.primary_key and
+ not col.server_default and not col.default)
+ )
+ for table, columns in self._cols_by_table.items()
+ )
+
+ @_memoized_configured_property
+ def _propkey_to_col(self):
+ return dict(
+ (
+ table,
+ dict(
+ (self._columntoproperty[col].key, col)
for col in columns
- ]
+ )
)
for table, columns in self._cols_by_table.items()
)
@@ -2342,18 +2356,26 @@ class Mapper(InspectionAttr):
dict_ = state.dict
manager = state.manager
return [
- manager[self._columntoproperty[col].key].
+ manager[prop.key].
impl.get(state, dict_,
attributes.PASSIVE_RETURN_NEVER_SET)
- for col in self.primary_key
+ for prop in self._primary_key_props
]
+ @_memoized_configured_property
+ def _primary_key_props(self):
+ return [self._columntoproperty[col] for col in self.primary_key]
+
def _get_state_attr_by_column(
self, state, dict_, column,
passive=attributes.PASSIVE_RETURN_NEVER_SET):
prop = self._columntoproperty[column]
return state.manager[prop.key].impl.get(state, dict_, passive=passive)
+ def _set_committed_state_attr_by_column(self, state, dict_, column, value):
+ prop = self._columntoproperty[column]
+ state.manager[prop.key].impl.set_committed_value(state, dict_, value)
+
def _set_state_attr_by_column(self, state, dict_, column, value):
prop = self._columntoproperty[column]
state.manager[prop.key].impl.set(state, dict_, value, None)
diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py
index 782d94dc8..8d3e90cf4 100644
--- a/lib/sqlalchemy/orm/persistence.py
+++ b/lib/sqlalchemy/orm/persistence.py
@@ -132,33 +132,60 @@ def save_obj(
save_obj(base_mapper, [state], uowtransaction, single=True)
return
- states_to_insert, states_to_update = _organize_states_for_save(
- base_mapper,
- states,
- uowtransaction)
-
+ states_to_update = []
+ states_to_insert = []
cached_connections = _cached_connection_dict(base_mapper)
+ for (state, dict_, mapper, connection,
+ has_identity, row_switch) in _organize_states_for_save(
+ base_mapper, states, uowtransaction
+ ):
+ if has_identity or row_switch:
+ states_to_update.append(
+ (state, dict_, mapper, connection,
+ has_identity, row_switch)
+ )
+ else:
+ states_to_insert.append(
+ (state, dict_, mapper, connection,
+ has_identity, row_switch)
+ )
+
for table, mapper in base_mapper._sorted_tables.items():
- insert = _collect_insert_commands(base_mapper, uowtransaction,
- table, states_to_insert)
+ if table not in mapper._pks_by_table:
+ continue
+ insert = (
+ (state, state_dict, sub_mapper, connection)
+ for state, state_dict, sub_mapper, connection, has_identity,
+ row_switch in states_to_insert
+ if table in sub_mapper._pks_by_table
+ )
+ insert = _collect_insert_commands(table, insert)
- update = _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update)
+ update = (
+ (state, state_dict, sub_mapper, connection, row_switch)
+ for state, state_dict, sub_mapper, connection, has_identity,
+ row_switch in states_to_update
+ if table in sub_mapper._pks_by_table
+ )
+ update = _collect_update_commands(uowtransaction, table, update)
- if update:
- _emit_update_statements(base_mapper, uowtransaction,
- cached_connections,
- mapper, table, update)
+ _emit_update_statements(base_mapper, uowtransaction,
+ cached_connections,
+ mapper, table, update)
- if insert:
- _emit_insert_statements(base_mapper, uowtransaction,
- cached_connections,
- mapper, table, insert)
+ _emit_insert_statements(base_mapper, uowtransaction,
+ cached_connections,
+ mapper, table, insert)
_finalize_insert_update_commands(
base_mapper, uowtransaction,
- states_to_insert, states_to_update)
+ (
+ (state, state_dict, mapper, connection, has_identity)
+ for state, state_dict, mapper, connection, has_identity,
+ row_switch in states_to_insert + states_to_update
+ )
+ )
def post_update(base_mapper, states, uowtransaction, post_update_cols):
@@ -168,19 +195,28 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols):
"""
cached_connections = _cached_connection_dict(base_mapper)
- states_to_update = _organize_states_for_post_update(
+ states_to_update = list(_organize_states_for_post_update(
base_mapper,
- states, uowtransaction)
+ states, uowtransaction))
for table, mapper in base_mapper._sorted_tables.items():
+ if table not in mapper._pks_by_table:
+ continue
+
+ update = (
+ (state, state_dict, sub_mapper, connection)
+ for
+ state, state_dict, sub_mapper, connection in states_to_update
+ if table in sub_mapper._pks_by_table
+ )
+
update = _collect_post_update_commands(base_mapper, uowtransaction,
- table, states_to_update,
+ table, update,
post_update_cols)
- if update:
- _emit_post_update_statements(base_mapper, uowtransaction,
- cached_connections,
- mapper, table, update)
+ _emit_post_update_statements(base_mapper, uowtransaction,
+ cached_connections,
+ mapper, table, update)
def delete_obj(base_mapper, states, uowtransaction):
@@ -193,18 +229,27 @@ def delete_obj(base_mapper, states, uowtransaction):
cached_connections = _cached_connection_dict(base_mapper)
- states_to_delete = _organize_states_for_delete(
+ states_to_delete = list(_organize_states_for_delete(
base_mapper,
states,
- uowtransaction)
+ uowtransaction))
table_to_mapper = base_mapper._sorted_tables
for table in reversed(list(table_to_mapper.keys())):
- delete = _collect_delete_commands(base_mapper, uowtransaction,
- table, states_to_delete)
-
mapper = table_to_mapper[table]
+ if table not in mapper._pks_by_table:
+ continue
+
+ delete = (
+ (state, state_dict, sub_mapper, connection)
+ for state, state_dict, sub_mapper, has_identity, connection
+ in states_to_delete if table in sub_mapper._pks_by_table
+ and has_identity
+ )
+
+ delete = _collect_delete_commands(base_mapper, uowtransaction,
+ table, delete)
_emit_delete_statements(base_mapper, uowtransaction,
cached_connections, mapper, table, delete)
@@ -226,10 +271,6 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
"""
- states_to_insert = []
- states_to_update = []
- instance_key = None
-
for state, dict_, mapper, connection in _connections_for_states(
base_mapper, uowtransaction,
states):
@@ -275,18 +316,8 @@ def _organize_states_for_save(base_mapper, states, uowtransaction):
uowtransaction.remove_state_actions(existing)
row_switch = existing
- if not has_identity and not row_switch:
- states_to_insert.append(
- (state, dict_, mapper, connection,
- has_identity, row_switch)
- )
- else:
- states_to_update.append(
- (state, dict_, mapper, connection,
- has_identity, row_switch)
- )
-
- return states_to_insert, states_to_update
+ yield (state, dict_, mapper, connection,
+ has_identity, row_switch)
def _organize_states_for_post_update(base_mapper, states,
@@ -299,8 +330,7 @@ def _organize_states_for_post_update(base_mapper, states,
the execution per state.
"""
- return list(_connections_for_states(base_mapper, uowtransaction,
- states))
+ return _connections_for_states(base_mapper, uowtransaction, states)
def _organize_states_for_delete(base_mapper, states, uowtransaction):
@@ -311,52 +341,46 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction):
mapper, the connection to use for the execution per state.
"""
- states_to_delete = []
-
for state, dict_, mapper, connection in _connections_for_states(
base_mapper, uowtransaction,
states):
mapper.dispatch.before_delete(mapper, connection, state)
- states_to_delete.append((state, dict_, mapper,
- bool(state.key), connection))
- return states_to_delete
+ yield state, dict_, mapper, bool(state.key), connection
-def _collect_insert_commands(base_mapper, uowtransaction, table,
- states_to_insert):
+def _collect_insert_commands(table, states_to_insert):
"""Identify sets of values to use in INSERT statements for a
list of states.
"""
- insert = []
- for state, state_dict, mapper, connection, has_identity, \
- row_switch in states_to_insert:
+ for state, state_dict, mapper, connection in states_to_insert:
- if table not in mapper._pks_by_table:
- continue
+ # assert table in mapper._pks_by_table
params = {}
value_params = {}
- for col, propkey in mapper._col_to_propkey[table]:
- if propkey in state_dict:
- value = state_dict[propkey]
- if isinstance(value, sql.ClauseElement):
- value_params[col.key] = value
- elif value is not None or (
- not col.primary_key and
- not col.server_default and
- not col.default):
- params[col.key] = value
+
+ propkey_to_col = mapper._propkey_to_col[table]
+
+ for propkey in set(propkey_to_col).intersection(state_dict):
+ value = state_dict[propkey]
+ col = propkey_to_col[propkey]
+ if value is None:
+ continue
+ elif isinstance(value, sql.ClauseElement):
+ value_params[col.key] = value
else:
- if not col.server_default \
- and not col.default and not col.primary_key:
- params[col.key] = None
+ params[col.key] = value
+
+ for colkey in mapper._insert_cols_as_none[table].\
+ difference(params).difference(value_params):
+ params[colkey] = None
has_all_pks = mapper._pk_keys_by_table[table].issubset(params)
- if base_mapper.eager_defaults:
+ if mapper.base_mapper.eager_defaults:
has_all_defaults = mapper._server_default_cols[table].\
issubset(params)
else:
@@ -368,14 +392,13 @@ def _collect_insert_commands(base_mapper, uowtransaction, table,
params[mapper.version_id_col.key] = \
mapper.version_id_generator(None)
- insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks,
- has_all_defaults))
- return insert
+ yield (
+ state, state_dict, params, mapper,
+ connection, value_params, has_all_pks,
+ has_all_defaults)
-def _collect_update_commands(base_mapper, uowtransaction,
- table, states_to_update):
+def _collect_update_commands(uowtransaction, table, states_to_update):
"""Identify sets of values to use in UPDATE statements for a
list of states.
@@ -387,110 +410,82 @@ def _collect_update_commands(base_mapper, uowtransaction,
"""
- update = []
- for state, state_dict, mapper, connection, has_identity, \
- row_switch in states_to_update:
- if table not in mapper._pks_by_table:
- continue
+ for state, state_dict, mapper, connection, row_switch in states_to_update:
+
+ # assert table in mapper._pks_by_table
pks = mapper._pks_by_table[table]
params = {}
value_params = {}
- hasdata = hasnull = False
+ propkey_to_col = mapper._propkey_to_col[table]
- for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col._label] = \
- mapper._get_committed_state_attr_by_column(
- row_switch or state,
- row_switch and row_switch.dict
- or state_dict,
- col)
+ for propkey in set(propkey_to_col).intersection(state.committed_state):
+ value = state_dict[propkey]
+ col = propkey_to_col[propkey]
- prop = mapper._columntoproperty[col]
- history = state.manager[prop.key].impl.get_history(
- state, state_dict, attributes.PASSIVE_NO_INITIALIZE
- )
- if history.added:
- params[col.key] = history.added[0]
- hasdata = True
+ if not state.manager[propkey].impl.is_equal(
+ value, state.committed_state[propkey]):
+ if isinstance(value, sql.ClauseElement):
+ value_params[col] = value
+ else:
+ params[col.key] = value
+
+ if mapper.version_id_col is not None:
+ col = mapper.version_id_col
+ params[col._label] = \
+ mapper._get_committed_state_attr_by_column(
+ row_switch if row_switch else state,
+ row_switch.dict if row_switch else state_dict,
+ col)
+
+ if col.key not in params and \
+ mapper.version_id_generator is not False:
+ val = mapper.version_id_generator(params[col._label])
+ params[col.key] = val
+
+ if not (params or value_params):
+ continue
+
+ pk_params = {}
+ for col in pks:
+ propkey = mapper._columntoproperty[col].key
+ history = state.manager[propkey].impl.get_history(
+ state, state_dict, attributes.PASSIVE_OFF)
+
+ if row_switch and not history.deleted and history.added:
+ # row switch present. convert a row that thought
+ # it would be an INSERT into an UPDATE, by removing
+ # the PK value from the SET clause and instead putting
+ # it in the WHERE clause.
+ del params[col.key]
+ pk_params[col._label] = history.added[0]
+ elif history.added:
+ # we're updating the PK value.
+ assert history.deleted, (
+ "New PK value without an old one not "
+ "possible for an UPDATE")
+ # check if an UPDATE of the PK value
+ # has already occurred as a result of ON UPDATE CASCADE.
+ # If so, use the new value to locate the row.
+ if ("pk_cascaded", state, col) in uowtransaction.attributes:
+ pk_params[col._label] = history.added[0]
else:
- if mapper.version_id_generator is not False:
- val = mapper.version_id_generator(params[col._label])
- params[col.key] = val
-
- # HACK: check for history, in case the
- # history is only
- # in a different table than the one
- # where the version_id_col is.
- for prop in mapper._columntoproperty.values():
- history = (
- state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_NO_INITIALIZE))
- if history.added:
- hasdata = True
+ # else, use the old value to locate the row
+ pk_params[col._label] = history.deleted[0]
else:
- prop = mapper._columntoproperty[col]
- history = state.manager[prop.key].impl.get_history(
- state, state_dict,
- attributes.PASSIVE_OFF if col in pks else
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- if isinstance(history.added[0],
- sql.ClauseElement):
- value_params[col] = history.added[0]
- else:
- value = history.added[0]
- params[col.key] = value
-
- if col in pks:
- if history.deleted and \
- not row_switch:
- # if passive_updates and sync detected
- # this was a pk->pk sync, use the new
- # value to locate the row, since the
- # DB would already have set this
- if ("pk_cascaded", state, col) in \
- uowtransaction.attributes:
- value = history.added[0]
- params[col._label] = value
- else:
- # use the old value to
- # locate the row
- value = history.deleted[0]
- params[col._label] = value
- hasdata = True
- else:
- # row switch logic can reach us here
- # remove the pk from the update params
- # so the update doesn't
- # attempt to include the pk in the
- # update statement
- del params[col.key]
- value = history.added[0]
- params[col._label] = value
- if value is None:
- hasnull = True
- else:
- hasdata = True
- elif col in pks:
- value = history.unchanged[0]
- if value is None:
- hasnull = True
- params[col._label] = value
+ pk_params[col._label] = history.unchanged[0]
- if hasdata:
- if hasnull:
+ if params or value_params:
+ if None in pk_params.values():
raise orm_exc.FlushError(
- "Can't update table "
- "using NULL for primary "
+ "Can't update table using NULL for primary "
"key value")
- update.append((state, state_dict, params, mapper,
- connection, value_params))
- return update
+ params.update(pk_params)
+ yield (
+ state, state_dict, params, mapper,
+ connection, value_params)
def _collect_post_update_commands(base_mapper, uowtransaction, table,
@@ -500,10 +495,10 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
"""
- update = []
for state, state_dict, mapper, connection in states_to_update:
- if table not in mapper._pks_by_table:
- continue
+
+ # assert table in mapper._pks_by_table
+
pks = mapper._pks_by_table[table]
params = {}
hasdata = False
@@ -525,9 +520,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table,
params[col.key] = value
hasdata = True
if hasdata:
- update.append((state, state_dict, params, mapper,
- connection))
- return update
+ yield params, connection
def _collect_delete_commands(base_mapper, uowtransaction, table,
@@ -535,15 +528,11 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
"""Identify values to use in DELETE statements for a list of
states to be deleted."""
- delete = util.defaultdict(list)
+ for state, state_dict, mapper, connection in states_to_delete:
- for state, state_dict, mapper, has_identity, connection \
- in states_to_delete:
- if not has_identity or table not in mapper._pks_by_table:
- continue
+ # assert table in mapper._pks_by_table
params = {}
- delete[connection].append(params)
for col in mapper._pks_by_table[table]:
params[col.key] = \
value = \
@@ -561,7 +550,7 @@ def _collect_delete_commands(base_mapper, uowtransaction, table,
mapper._get_committed_state_attr_by_column(
state, state_dict,
mapper.version_id_col)
- return delete
+ yield params, connection
def _emit_update_statements(base_mapper, uowtransaction,
@@ -602,8 +591,7 @@ def _emit_update_statements(base_mapper, uowtransaction,
lambda rec: (
rec[4],
tuple(sorted(rec[2])),
- bool(rec[5]))
- ):
+ bool(rec[5]))):
rows = 0
records = list(records)
@@ -625,12 +613,29 @@ def _emit_update_statements(base_mapper, uowtransaction,
value_params)
rows += c.rowcount
else:
- multiparams = [rec[2] for rec in records]
- c = cached_connections[connection].\
- execute(statement, multiparams)
+ if needs_version_id and \
+ not connection.dialect.supports_sane_multi_rowcount and \
+ connection.dialect.supports_sane_rowcount:
+ for state, state_dict, params, mapper, \
+ connection, value_params in records:
+ c = cached_connections[connection].\
+ execute(statement, params)
+ _postfetch(
+ mapper,
+ uowtransaction,
+ table,
+ state,
+ state_dict,
+ c,
+ c.context.compiled_parameters[0],
+ value_params)
+ rows += c.rowcount
+ else:
+ multiparams = [rec[2] for rec in records]
+ c = cached_connections[connection].\
+ execute(statement, multiparams)
- rows += c.rowcount
- if bookkeeping:
+ rows += c.rowcount
for state, state_dict, params, mapper, \
connection, value_params in records:
_postfetch(
@@ -726,13 +731,7 @@ def _emit_insert_statements(base_mapper, uowtransaction,
mapper._pks_by_table[table]):
prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
- # TODO: would rather say:
- # state_dict[prop.key] = pk
- mapper_rec._set_state_attr_by_column(
- state,
- state_dict,
- col, pk)
-
+ state_dict[prop.key] = pk
_postfetch(
mapper_rec,
uowtransaction,
@@ -765,11 +764,10 @@ def _emit_post_update_statements(base_mapper, uowtransaction,
# also group them into common (connection, cols) sets
# to support executemany().
for key, grouper in groupby(
- update, lambda rec: (rec[4], list(rec[2].keys()))
+ update, lambda rec: (rec[1], sorted(rec[0]))
):
connection = key[0]
- multiparams = [params for state, state_dict,
- params, mapper, conn in grouper]
+ multiparams = [params for params, conn in grouper]
cached_connections[connection].\
execute(statement, multiparams)
@@ -799,8 +797,15 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
return table.delete(clause)
- for connection, del_objects in delete.items():
- statement = base_mapper._memo(('delete', table), delete_stmt)
+ statement = base_mapper._memo(('delete', table), delete_stmt)
+ for connection, recs in groupby(
+ delete,
+ lambda rec: rec[1]
+ ):
+ del_objects = [
+ params
+ for params, connection in recs
+ ]
connection = cached_connections[connection]
@@ -853,15 +858,12 @@ def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
)
-def _finalize_insert_update_commands(base_mapper, uowtransaction,
- states_to_insert, states_to_update):
+def _finalize_insert_update_commands(base_mapper, uowtransaction, states):
"""finalize state on states that have been inserted or updated,
including calling after_insert/after_update events.
"""
- for state, state_dict, mapper, connection, has_identity, \
- row_switch in states_to_insert + \
- states_to_update:
+ for state, state_dict, mapper, connection, has_identity in states:
if mapper._readonly_props:
readonly = state.unmodified_intersection(
@@ -917,11 +919,11 @@ def _postfetch(mapper, uowtransaction, table,
for col in returning_cols:
if col.primary_key:
continue
- mapper._set_state_attr_by_column(state, dict_, col, row[col])
+ dict_[mapper._columntoproperty[col].key] = row[col]
for c in prefetch_cols:
if c.key in params and c in mapper._columntoproperty:
- mapper._set_state_attr_by_column(state, dict_, c, params[c.key])
+ dict_[mapper._columntoproperty[c].key] = params[c.key]
if postfetch_cols:
state._expire_attributes(state.dict,
diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py
index 9052df570..67c13231e 100644
--- a/lib/sqlalchemy/testing/engines.py
+++ b/lib/sqlalchemy/testing/engines.py
@@ -7,15 +7,12 @@
from __future__ import absolute_import
-import types
import weakref
-from collections import deque
from . import config
from .util import decorator
from .. import event, pool
import re
import warnings
-from .. import util
class ConnectionKiller(object):
@@ -339,112 +336,3 @@ def proxying_engine(conn_cls=DBAPIProxyConnection,
return testing_engine(options={'creator': mock_conn})
-class ReplayableSession(object):
- """A simple record/playback tool.
-
- This is *not* a mock testing class. It only records a session for later
- playback and makes no assertions on call consistency whatsoever. It's
- unlikely to be suitable for anything other than DB-API recording.
-
- """
-
- Callable = object()
- NoAttribute = object()
-
- if util.py2k:
- Natives = set([getattr(types, t)
- for t in dir(types) if not t.startswith('_')]).\
- difference([getattr(types, t)
- for t in ('FunctionType', 'BuiltinFunctionType',
- 'MethodType', 'BuiltinMethodType',
- 'LambdaType', 'UnboundMethodType',)])
- else:
- Natives = set([getattr(types, t)
- for t in dir(types) if not t.startswith('_')]).\
- union([type(t) if not isinstance(t, type)
- else t for t in __builtins__.values()]).\
- difference([getattr(types, t)
- for t in ('FunctionType', 'BuiltinFunctionType',
- 'MethodType', 'BuiltinMethodType',
- 'LambdaType', )])
-
- def __init__(self):
- self.buffer = deque()
-
- def recorder(self, base):
- return self.Recorder(self.buffer, base)
-
- def player(self):
- return self.Player(self.buffer)
-
- class Recorder(object):
- def __init__(self, buffer, subject):
- self._buffer = buffer
- self._subject = subject
-
- def __call__(self, *args, **kw):
- subject, buffer = [object.__getattribute__(self, x)
- for x in ('_subject', '_buffer')]
-
- result = subject(*args, **kw)
- if type(result) not in ReplayableSession.Natives:
- buffer.append(ReplayableSession.Callable)
- return type(self)(buffer, result)
- else:
- buffer.append(result)
- return result
-
- @property
- def _sqla_unwrap(self):
- return self._subject
-
- def __getattribute__(self, key):
- try:
- return object.__getattribute__(self, key)
- except AttributeError:
- pass
-
- subject, buffer = [object.__getattribute__(self, x)
- for x in ('_subject', '_buffer')]
- try:
- result = type(subject).__getattribute__(subject, key)
- except AttributeError:
- buffer.append(ReplayableSession.NoAttribute)
- raise
- else:
- if type(result) not in ReplayableSession.Natives:
- buffer.append(ReplayableSession.Callable)
- return type(self)(buffer, result)
- else:
- buffer.append(result)
- return result
-
- class Player(object):
- def __init__(self, buffer):
- self._buffer = buffer
-
- def __call__(self, *args, **kw):
- buffer = object.__getattribute__(self, '_buffer')
- result = buffer.popleft()
- if result is ReplayableSession.Callable:
- return self
- else:
- return result
-
- @property
- def _sqla_unwrap(self):
- return None
-
- def __getattribute__(self, key):
- try:
- return object.__getattribute__(self, key)
- except AttributeError:
- pass
- buffer = object.__getattribute__(self, '_buffer')
- result = buffer.popleft()
- if result is ReplayableSession.Callable:
- return self
- elif result is ReplayableSession.NoAttribute:
- raise AttributeError(key)
- else:
- return result
diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py
index f4c9efd55..005942913 100644
--- a/lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -74,6 +74,9 @@ def pytest_collection_modifyitems(session, config, items):
# new classes to a module on the fly.
rebuilt_items = collections.defaultdict(list)
+ items[:] = [
+ item for item in
+ items if isinstance(item.parent, pytest.Instance)]
test_classes = set(item.parent for item in items)
for test_class in test_classes:
for sub_cls in plugin_base.generate_sub_tests(
diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py
index 75baec987..fcb888f86 100644
--- a/lib/sqlalchemy/testing/profiling.py
+++ b/lib/sqlalchemy/testing/profiling.py
@@ -14,13 +14,12 @@ in a more fine-grained way than nose's profiling plugin.
import os
import sys
-from .util import gc_collect, decorator
+from .util import gc_collect
from . import config
from .plugin.plugin_base import SkipTest
import pstats
-import time
import collections
-from .. import util
+import contextlib
try:
import cProfile
@@ -30,64 +29,8 @@ from ..util import jython, pypy, win32, update_wrapper
_current_test = None
-
-def profiled(target=None, **target_opts):
- """Function profiling.
-
- @profiled()
- or
- @profiled(report=True, sort=('calls',), limit=20)
-
- Outputs profiling info for a decorated function.
-
- """
-
- profile_config = {'targets': set(),
- 'report': True,
- 'print_callers': False,
- 'print_callees': False,
- 'graphic': False,
- 'sort': ('time', 'calls'),
- 'limit': None}
- if target is None:
- target = 'anonymous_target'
-
- @decorator
- def decorate(fn, *args, **kw):
- elapsed, load_stats, result = _profile(
- fn, *args, **kw)
-
- graphic = target_opts.get('graphic', profile_config['graphic'])
- if graphic:
- os.system("runsnake %s" % filename)
- else:
- report = target_opts.get('report', profile_config['report'])
- if report:
- sort_ = target_opts.get('sort', profile_config['sort'])
- limit = target_opts.get('limit', profile_config['limit'])
- print(("Profile report for target '%s'" % (
- target, )
- ))
-
- stats = load_stats()
- stats.sort_stats(*sort_)
- if limit:
- stats.print_stats(limit)
- else:
- stats.print_stats()
-
- print_callers = target_opts.get(
- 'print_callers', profile_config['print_callers'])
- if print_callers:
- stats.print_callers()
-
- print_callees = target_opts.get(
- 'print_callees', profile_config['print_callees'])
- if print_callees:
- stats.print_callees()
-
- return result
- return decorate
+# ProfileStatsFile instance, set up in plugin_base
+_profile_stats = None
class ProfileStatsFile(object):
@@ -177,20 +120,23 @@ class ProfileStatsFile(object):
self._write()
def _header(self):
- return \
- "# %s\n"\
- "# This file is written out on a per-environment basis.\n"\
- "# For each test in aaa_profiling, the corresponding function and \n"\
- "# environment is located within this file. If it doesn't exist,\n"\
- "# the test is skipped.\n"\
- "# If a callcount does exist, it is compared to what we received. \n"\
- "# assertions are raised if the counts do not match.\n"\
- "# \n"\
- "# To add a new callcount test, apply the function_call_count \n"\
- "# decorator and re-run the tests using the --write-profiles \n"\
- "# option - this file will be rewritten including the new count.\n"\
- "# \n"\
- "" % (self.fname)
+ return (
+ "# %s\n"
+ "# This file is written out on a per-environment basis.\n"
+ "# For each test in aaa_profiling, the corresponding "
+ "function and \n"
+ "# environment is located within this file. "
+ "If it doesn't exist,\n"
+ "# the test is skipped.\n"
+ "# If a callcount does exist, it is compared "
+ "to what we received. \n"
+ "# assertions are raised if the counts do not match.\n"
+ "# \n"
+ "# To add a new callcount test, apply the function_call_count \n"
+ "# decorator and re-run the tests using the --write-profiles \n"
+ "# option - this file will be rewritten including the new count.\n"
+ "# \n"
+ ) % (self.fname)
def _read(self):
try:
@@ -239,72 +185,66 @@ def function_call_count(variance=0.05):
def decorate(fn):
def wrap(*args, **kw):
-
- if cProfile is None:
- raise SkipTest("cProfile is not installed")
-
- if not _profile_stats.has_stats() and not _profile_stats.write:
- # run the function anyway, to support dependent tests
- # (not a great idea but we have these in test_zoomark)
- fn(*args, **kw)
- raise SkipTest("No profiling stats available on this "
- "platform for this function. Run tests with "
- "--write-profiles to add statistics to %s for "
- "this platform." % _profile_stats.short_fname)
-
- gc_collect()
-
- timespent, load_stats, fn_result = _profile(
- fn, *args, **kw
- )
- stats = load_stats()
- callcount = stats.total_calls
-
- expected = _profile_stats.result(callcount)
- if expected is None:
- expected_count = None
- else:
- line_no, expected_count = expected
-
- print(("Pstats calls: %d Expected %s" % (
- callcount,
- expected_count
- )
- ))
- stats.print_stats()
- # stats.print_callers()
-
- if expected_count:
- deviance = int(callcount * variance)
- failed = abs(callcount - expected_count) > deviance
-
- if failed:
- if _profile_stats.write:
- _profile_stats.replace(callcount)
- else:
- raise AssertionError(
- "Adjusted function call count %s not within %s%% "
- "of expected %s. Rerun with --write-profiles to "
- "regenerate this callcount."
- % (
- callcount, (variance * 100),
- expected_count))
- return fn_result
+ with count_functions(variance=variance):
+ return fn(*args, **kw)
return update_wrapper(wrap, fn)
return decorate
-def _profile(fn, *args, **kw):
- filename = "%s.prof" % fn.__name__
-
- def load_stats():
- st = pstats.Stats(filename)
- os.unlink(filename)
- return st
+@contextlib.contextmanager
+def count_functions(variance=0.05):
+ if cProfile is None:
+ raise SkipTest("cProfile is not installed")
+
+ if not _profile_stats.has_stats() and not _profile_stats.write:
+ raise SkipTest("No profiling stats available on this "
+ "platform for this function. Run tests with "
+ "--write-profiles to add statistics to %s for "
+ "this platform." % _profile_stats.short_fname)
+
+ gc_collect()
+
+ pr = cProfile.Profile()
+ pr.enable()
+ #began = time.time()
+ yield
+ #ended = time.time()
+ pr.disable()
+
+ #s = compat.StringIO()
+ stats = pstats.Stats(pr, stream=sys.stdout)
+
+ #timespent = ended - began
+ callcount = stats.total_calls
+
+ expected = _profile_stats.result(callcount)
+ if expected is None:
+ expected_count = None
+ else:
+ line_no, expected_count = expected
+
+ print(("Pstats calls: %d Expected %s" % (
+ callcount,
+ expected_count
+ )
+ ))
+ stats.sort_stats("cumulative")
+ stats.print_stats()
+
+ if expected_count:
+ deviance = int(callcount * variance)
+ failed = abs(callcount - expected_count) > deviance
+
+ if failed:
+ if _profile_stats.write:
+ _profile_stats.replace(callcount)
+ else:
+ raise AssertionError(
+ "Adjusted function call count %s not within %s%% "
+ "of expected %s. Rerun with --write-profiles to "
+ "regenerate this callcount."
+ % (
+ callcount, (variance * 100),
+ expected_count))
- began = time.time()
- cProfile.runctx('result = fn(*args, **kw)', globals(), locals(),
- filename=filename)
- ended = time.time()
- return ended - began, load_stats, locals()['result']
diff --git a/lib/sqlalchemy/testing/replay_fixture.py b/lib/sqlalchemy/testing/replay_fixture.py
new file mode 100644
index 000000000..b8a0f6df1
--- /dev/null
+++ b/lib/sqlalchemy/testing/replay_fixture.py
@@ -0,0 +1,167 @@
+from . import fixtures
+from . import profiling
+from .. import util
+import types
+from collections import deque
+import contextlib
+from . import config
+from sqlalchemy import MetaData
+from sqlalchemy import create_engine
+from sqlalchemy.orm import Session
+
+
+class ReplayFixtureTest(fixtures.TestBase):
+
+ @contextlib.contextmanager
+ def _dummy_ctx(self, *arg, **kw):
+ yield
+
+ def test_invocation(self):
+
+ dbapi_session = ReplayableSession()
+ creator = config.db.pool._creator
+ recorder = lambda: dbapi_session.recorder(creator())
+ engine = create_engine(
+ config.db.url, creator=recorder,
+ use_native_hstore=False)
+ self.metadata = MetaData(engine)
+ self.engine = engine
+ self.session = Session(engine)
+
+ self.setup_engine()
+ self._run_steps(ctx=self._dummy_ctx)
+ self.teardown_engine()
+ engine.dispose()
+
+ player = lambda: dbapi_session.player()
+ engine = create_engine(
+ config.db.url, creator=player,
+ use_native_hstore=False)
+
+ self.metadata = MetaData(engine)
+ self.engine = engine
+ self.session = Session(engine)
+
+ self.setup_engine()
+ self._run_steps(ctx=profiling.count_functions)
+ self.teardown_engine()
+
+ def setup_engine(self):
+ pass
+
+ def teardown_engine(self):
+ pass
+
+ def _run_steps(self, ctx):
+ raise NotImplementedError()
+
+
+class ReplayableSession(object):
+ """A simple record/playback tool.
+
+ This is *not* a mock testing class. It only records a session for later
+ playback and makes no assertions on call consistency whatsoever. It's
+ unlikely to be suitable for anything other than DB-API recording.
+
+ """
+
+ Callable = object()
+ NoAttribute = object()
+
+ if util.py2k:
+ Natives = set([getattr(types, t)
+ for t in dir(types) if not t.startswith('_')]).\
+ difference([getattr(types, t)
+ for t in ('FunctionType', 'BuiltinFunctionType',
+ 'MethodType', 'BuiltinMethodType',
+ 'LambdaType', 'UnboundMethodType',)])
+ else:
+ Natives = set([getattr(types, t)
+ for t in dir(types) if not t.startswith('_')]).\
+ union([type(t) if not isinstance(t, type)
+ else t for t in __builtins__.values()]).\
+ difference([getattr(types, t)
+ for t in ('FunctionType', 'BuiltinFunctionType',
+ 'MethodType', 'BuiltinMethodType',
+ 'LambdaType', )])
+
+ def __init__(self):
+ self.buffer = deque()
+
+ def recorder(self, base):
+ return self.Recorder(self.buffer, base)
+
+ def player(self):
+ return self.Player(self.buffer)
+
+ class Recorder(object):
+ def __init__(self, buffer, subject):
+ self._buffer = buffer
+ self._subject = subject
+
+ def __call__(self, *args, **kw):
+ subject, buffer = [object.__getattribute__(self, x)
+ for x in ('_subject', '_buffer')]
+
+ result = subject(*args, **kw)
+ if type(result) not in ReplayableSession.Natives:
+ buffer.append(ReplayableSession.Callable)
+ return type(self)(buffer, result)
+ else:
+ buffer.append(result)
+ return result
+
+ @property
+ def _sqla_unwrap(self):
+ return self._subject
+
+ def __getattribute__(self, key):
+ try:
+ return object.__getattribute__(self, key)
+ except AttributeError:
+ pass
+
+ subject, buffer = [object.__getattribute__(self, x)
+ for x in ('_subject', '_buffer')]
+ try:
+ result = type(subject).__getattribute__(subject, key)
+ except AttributeError:
+ buffer.append(ReplayableSession.NoAttribute)
+ raise
+ else:
+ if type(result) not in ReplayableSession.Natives:
+ buffer.append(ReplayableSession.Callable)
+ return type(self)(buffer, result)
+ else:
+ buffer.append(result)
+ return result
+
+ class Player(object):
+ def __init__(self, buffer):
+ self._buffer = buffer
+
+ def __call__(self, *args, **kw):
+ buffer = object.__getattribute__(self, '_buffer')
+ result = buffer.popleft()
+ if result is ReplayableSession.Callable:
+ return self
+ else:
+ return result
+
+ @property
+ def _sqla_unwrap(self):
+ return None
+
+ def __getattribute__(self, key):
+ try:
+ return object.__getattribute__(self, key)
+ except AttributeError:
+ pass
+ buffer = object.__getattribute__(self, '_buffer')
+ result = buffer.popleft()
+ if result is ReplayableSession.Callable:
+ return self
+ elif result is ReplayableSession.NoAttribute:
+ raise AttributeError(key)
+ else:
+ return result
diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py
index fa27897a1..0904d454e 100644
--- a/lib/sqlalchemy/util/_collections.py
+++ b/lib/sqlalchemy/util/_collections.py
@@ -13,7 +13,6 @@ import operator
from .compat import threading, itertools_filterfalse
from . import py2k
import types
-from collections import MutableMapping
EMPTY_SET = frozenset()
@@ -265,13 +264,18 @@ class OrderedDict(dict):
def __iter__(self):
return iter(self._list)
- keys = MutableMapping.keys
- values = MutableMapping.values
- items = MutableMapping.items
+ def keys(self):
+ return list(self)
+
+ def values(self):
+ return [self[key] for key in self._list]
+
+ def items(self):
+ return [(key, self[key]) for key in self._list]
if py2k:
def itervalues(self):
- return iter([self[key] for key in self._list])
+ return iter(self.values())
def iterkeys(self):
return iter(self)
diff --git a/setup.cfg b/setup.cfg
index 7517220a6..698c4b037 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,7 +9,7 @@ first-package-wins = true
where = test
[pytest]
-addopts= --tb native -v -r fxX
+addopts= --tb native -v -r fxX --maxfail=25
python_files=test/*test_*.py
[upload]
diff --git a/test/aaa_profiling/test_compiler.py b/test/aaa_profiling/test_compiler.py
index 47a412e73..5eece4602 100644
--- a/test/aaa_profiling/test_compiler.py
+++ b/test/aaa_profiling/test_compiler.py
@@ -42,7 +42,7 @@ class CompileTest(fixtures.TestBase, AssertsExecutionResults):
def test_insert(self):
t1.insert().compile(dialect=self.dialect)
- @profiling.function_call_count()
+ @profiling.function_call_count(variance=.15)
def test_update(self):
t1.update().compile(dialect=self.dialect)
diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py
index 4c4708503..5b8a0f785 100644
--- a/test/aaa_profiling/test_zoomark.py
+++ b/test/aaa_profiling/test_zoomark.py
@@ -7,43 +7,42 @@ An adaptation of Robert Brewers' ZooMark speed tests. """
import datetime
from sqlalchemy import Table, Column, Integer, Unicode, Date, \
- DateTime, Time, Float, MetaData, Sequence, ForeignKey, create_engine, \
+ DateTime, Time, Float, Sequence, ForeignKey, \
select, join, and_, outerjoin, func
-from sqlalchemy.testing import fixtures, engines, profiling
-from sqlalchemy import testing
-ITERATIONS = 1
-dbapi_session = engines.ReplayableSession()
-metadata = None
-
+from sqlalchemy.testing import replay_fixture
-class ZooMarkTest(fixtures.TestBase):
+ITERATIONS = 1
- """Runs the ZooMark and squawks if method counts vary from the norm.
- Each test has an associated `call_range`, the total number of
- accepted function calls made during the test. The count can vary
- between Python 2.4 and 2.5.
+class ZooMarkTest(replay_fixture.ReplayFixtureTest):
- Unlike a unit test, this is a ordered collection of steps. Running
- components individually will fail.
+ """Runs the ZooMark and squawks if method counts vary from the norm."""
- """
__requires__ = 'cpython',
__only_on__ = 'postgresql+psycopg2'
- def test_baseline_0_setup(self):
- global metadata
- creator = testing.db.pool._creator
- recorder = lambda: dbapi_session.recorder(creator())
- engine = engines.testing_engine(options={'creator': recorder,
- 'use_reaper': False})
- metadata = MetaData(engine)
- engine.connect()
-
- def test_baseline_1_create_tables(self):
+ def _run_steps(self, ctx):
+ self._baseline_1_create_tables()
+ with ctx():
+ self._baseline_1a_populate()
+ with ctx():
+ self._baseline_2_insert()
+ with ctx():
+ self._baseline_3_properties()
+ with ctx():
+ self._baseline_4_expressions()
+ with ctx():
+ self._baseline_5_aggregates()
+ with ctx():
+ self._baseline_6_editing()
+ with ctx():
+ self._baseline_7_multiview()
+ self._baseline_8_drop()
+
+ def _baseline_1_create_tables(self):
Table(
'Zoo',
- metadata,
+ self.metadata,
Column('ID', Integer, Sequence('zoo_id_seq'),
primary_key=True, index=True),
Column('Name', Unicode(255)),
@@ -54,7 +53,7 @@ class ZooMarkTest(fixtures.TestBase):
)
Table(
'Animal',
- metadata,
+ self.metadata,
Column('ID', Integer, Sequence('animal_id_seq'),
primary_key=True),
Column('ZooID', Integer, ForeignKey('Zoo.ID'), index=True),
@@ -67,12 +66,12 @@ class ZooMarkTest(fixtures.TestBase):
Column('PreferredFoodID', Integer),
Column('AlternateFoodID', Integer),
)
- metadata.create_all()
+ self.metadata.create_all()
- def test_baseline_1a_populate(self):
- Zoo = metadata.tables['Zoo']
- Animal = metadata.tables['Animal']
- engine = metadata.bind
+ def _baseline_1a_populate(self):
+ Zoo = self.metadata.tables['Zoo']
+ Animal = self.metadata.tables['Animal']
+ engine = self.metadata.bind
wap = engine.execute(Zoo.insert(), Name='Wild Animal Park',
Founded=datetime.date(2000, 1, 1),
Opens=datetime.time(8, 15, 59),
@@ -137,16 +136,16 @@ class ZooMarkTest(fixtures.TestBase):
engine.execute(Animal.insert(inline=True), Species='Ape',
Name='Hua Mei', Legs=2, MotherID=bai_yun)
- def test_baseline_2_insert(self):
- Animal = metadata.tables['Animal']
+ def _baseline_2_insert(self):
+ Animal = self.metadata.tables['Animal']
i = Animal.insert(inline=True)
for x in range(ITERATIONS):
i.execute(Species='Tick', Name='Tick %d' % x, Legs=8)
- def test_baseline_3_properties(self):
- Zoo = metadata.tables['Zoo']
- Animal = metadata.tables['Animal']
- engine = metadata.bind
+ def _baseline_3_properties(self):
+ Zoo = self.metadata.tables['Zoo']
+ Animal = self.metadata.tables['Animal']
+ engine = self.metadata.bind
def fullobject(select):
"""Iterate over the full result row."""
@@ -171,10 +170,10 @@ class ZooMarkTest(fixtures.TestBase):
fullobject(Animal.select(Animal.c.Legs == 1000000))
fullobject(Animal.select(Animal.c.Species == 'Tick'))
- def test_baseline_4_expressions(self):
- Zoo = metadata.tables['Zoo']
- Animal = metadata.tables['Animal']
- engine = metadata.bind
+ def _baseline_4_expressions(self):
+ Zoo = self.metadata.tables['Zoo']
+ Animal = self.metadata.tables['Animal']
+ engine = self.metadata.bind
def fulltable(select):
"""Iterate over the full result table."""
@@ -280,10 +279,10 @@ class ZooMarkTest(fixtures.TestBase):
'day',
Animal.c.LastEscape) == 21))) == 1
- def test_baseline_5_aggregates(self):
- Animal = metadata.tables['Animal']
- Zoo = metadata.tables['Zoo']
- engine = metadata.bind
+ def _baseline_5_aggregates(self):
+ Animal = self.metadata.tables['Animal']
+ Zoo = self.metadata.tables['Zoo']
+ engine = self.metadata.bind
for x in range(ITERATIONS):
@@ -327,9 +326,9 @@ class ZooMarkTest(fixtures.TestBase):
distinct=True)).fetchall()]
legs.sort()
- def test_baseline_6_editing(self):
- Zoo = metadata.tables['Zoo']
- engine = metadata.bind
+ def _baseline_6_editing(self):
+ Zoo = self.metadata.tables['Zoo']
+ engine = self.metadata.bind
for x in range(ITERATIONS):
# Edit
@@ -364,10 +363,10 @@ class ZooMarkTest(fixtures.TestBase):
)).first()
assert SDZ['Founded'] == datetime.date(1935, 9, 13)
- def test_baseline_7_multiview(self):
- Zoo = metadata.tables['Zoo']
- Animal = metadata.tables['Animal']
- engine = metadata.bind
+ def _baseline_7_multiview(self):
+ Zoo = self.metadata.tables['Zoo']
+ Animal = self.metadata.tables['Animal']
+ engine = self.metadata.bind
def fulltable(select):
"""Iterate over the full result table."""
@@ -403,52 +402,6 @@ class ZooMarkTest(fixtures.TestBase):
Zoo.c.Name, Animal.c.Species],
from_obj=[outerjoin(Animal, Zoo)]))
- def test_baseline_8_drop(self):
- metadata.drop_all()
-
- # Now, run all of these tests again with the DB-API driver factored
- # out: the ReplayableSession playback stands in for the database.
- #
- # How awkward is this in a unittest framework? Very.
-
- def test_profile_0(self):
- global metadata
- player = lambda: dbapi_session.player()
- engine = create_engine('postgresql:///', creator=player,
- use_native_hstore=False)
- metadata = MetaData(engine)
- engine.connect()
-
- def test_profile_1_create_tables(self):
- self.test_baseline_1_create_tables()
-
- @profiling.function_call_count()
- def test_profile_1a_populate(self):
- self.test_baseline_1a_populate()
-
- @profiling.function_call_count()
- def test_profile_2_insert(self):
- self.test_baseline_2_insert()
-
- @profiling.function_call_count()
- def test_profile_3_properties(self):
- self.test_baseline_3_properties()
-
- @profiling.function_call_count()
- def test_profile_4_expressions(self):
- self.test_baseline_4_expressions()
-
- @profiling.function_call_count()
- def test_profile_5_aggregates(self):
- self.test_baseline_5_aggregates()
-
- @profiling.function_call_count()
- def test_profile_6_editing(self):
- self.test_baseline_6_editing()
-
- @profiling.function_call_count()
- def test_profile_7_multiview(self):
- self.test_baseline_7_multiview()
+ def _baseline_8_drop(self):
+ self.metadata.drop_all()
- def test_profile_8_drop(self):
- self.test_baseline_8_drop()
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index 6b781af9b..500d7c2cb 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -7,48 +7,52 @@ An adaptation of Robert Brewers' ZooMark speed tests. """
import datetime
from sqlalchemy import Table, Column, Integer, Unicode, Date, \
- DateTime, Time, Float, MetaData, Sequence, ForeignKey, create_engine, \
+ DateTime, Time, Float, Sequence, ForeignKey, \
select, and_, func
-from sqlalchemy.orm import sessionmaker, mapper
-from sqlalchemy.testing import fixtures, engines, profiling
-from sqlalchemy import testing
+from sqlalchemy.orm import mapper
+from sqlalchemy.testing import replay_fixture
+
ITERATIONS = 1
-dbapi_session = engines.ReplayableSession()
-metadata = None
Zoo = Animal = session = None
-class ZooMarkTest(fixtures.TestBase):
+class ZooMarkTest(replay_fixture.ReplayFixtureTest):
"""Runs the ZooMark and squawks if method counts vary from the norm.
- Each test has an associated `call_range`, the total number of
- accepted function calls made during the test. The count can vary
- between Python 2.4 and 2.5.
-
- Unlike a unit test, this is a ordered collection of steps. Running
- components individually will fail.
"""
__requires__ = 'cpython',
__only_on__ = 'postgresql+psycopg2'
- def test_baseline_0_setup(self):
- global metadata, session
- creator = testing.db.pool._creator
- recorder = lambda: dbapi_session.recorder(creator())
- engine = engines.testing_engine(
- options={'creator': recorder, 'use_reaper': False})
- metadata = MetaData(engine)
- session = sessionmaker(engine)()
- engine.connect()
-
- def test_baseline_1_create_tables(self):
+ def _run_steps(self, ctx):
+ #self._baseline_1_create_tables()
+ with ctx():
+ self._baseline_1a_populate()
+ with ctx():
+ self._baseline_2_insert()
+ with ctx():
+ self._baseline_3_properties()
+ with ctx():
+ self._baseline_4_expressions()
+ with ctx():
+ self._baseline_5_aggregates()
+ with ctx():
+ self._baseline_6_editing()
+ #self._baseline_7_drop()
+
+ def setup_engine(self):
+ self._baseline_1_create_tables()
+
+ def teardown_engine(self):
+ self._baseline_7_drop()
+
+ def _baseline_1_create_tables(self):
zoo = Table(
'Zoo',
- metadata,
+ self.metadata,
Column('ID', Integer, Sequence('zoo_id_seq'),
primary_key=True, index=True),
Column('Name', Unicode(255)),
@@ -59,7 +63,7 @@ class ZooMarkTest(fixtures.TestBase):
)
animal = Table(
'Animal',
- metadata,
+ self.metadata,
Column('ID', Integer, Sequence('animal_id_seq'),
primary_key=True),
Column('ZooID', Integer, ForeignKey('Zoo.ID'), index=True),
@@ -72,7 +76,7 @@ class ZooMarkTest(fixtures.TestBase):
Column('PreferredFoodID', Integer),
Column('AlternateFoodID', Integer),
)
- metadata.create_all()
+ self.metadata.create_all()
global Zoo, Animal
class Zoo(object):
@@ -90,131 +94,129 @@ class ZooMarkTest(fixtures.TestBase):
mapper(Zoo, zoo)
mapper(Animal, animal)
- def test_baseline_1a_populate(self):
+ def _baseline_1a_populate(self):
wap = Zoo(
Name='Wild Animal Park', Founded=datetime.date(
2000, 1, 1), Opens=datetime.time(
8, 15, 59), LastEscape=datetime.datetime(
2004, 7, 29, 5, 6, 7, ), Admission=4.95)
- session.add(wap)
+ self.session.add(wap)
sdz = Zoo(
Name='San Diego Zoo', Founded=datetime.date(
1835, 9, 13), Opens=datetime.time(
9, 0, 0), Admission=0)
- session.add(sdz)
+ self.session.add(sdz)
bio = Zoo(Name='Montr\xe9al Biod\xf4me',
Founded=datetime.date(1992, 6, 19),
Opens=datetime.time(9, 0, 0), Admission=11.75)
- session.add(bio)
+ self.session.add(bio)
seaworld = Zoo(Name='Sea_World', Admission=60)
- session.add(seaworld)
+ self.session.add(seaworld)
# Let's add a crazy futuristic Zoo to test large date values.
lp = Zoo(Name='Luna Park', Founded=datetime.date(2072, 7, 17),
Opens=datetime.time(0, 0, 0), Admission=134.95)
- session.add(lp)
- session.flush()
+ self.session.add(lp)
# Animals
leopard = Animal(Species='Leopard', Lifespan=73.5)
- session.add(leopard)
+ self.session.add(leopard)
leopard.ZooID = wap.ID
leopard.LastEscape = \
datetime.datetime(2004, 12, 21, 8, 15, 0, 999907, )
- session.add(Animal(Species='Lion', ZooID=wap.ID))
- session.add(Animal(Species='Slug', Legs=1, Lifespan=.75))
- session.add(Animal(Species='Tiger', ZooID=sdz.ID))
+ self.session.add(Animal(Species='Lion', ZooID=wap.ID))
+ self.session.add(Animal(Species='Slug', Legs=1, Lifespan=.75))
+ self.session.add(Animal(Species='Tiger', ZooID=sdz.ID))
# Override Legs.default with itself just to make sure it works.
- session.add(Animal(Species='Bear', Legs=4))
- session.add(Animal(Species='Ostrich', Legs=2, Lifespan=103.2))
- session.add(Animal(Species='Centipede', Legs=100))
- session.add(Animal(Species='Emperor Penguin', Legs=2,
+ self.session.add(Animal(Species='Bear', Legs=4))
+ self.session.add(Animal(Species='Ostrich', Legs=2, Lifespan=103.2))
+ self.session.add(Animal(Species='Centipede', Legs=100))
+ self.session.add(Animal(Species='Emperor Penguin', Legs=2,
ZooID=seaworld.ID))
- session.add(Animal(Species='Adelie Penguin', Legs=2,
+ self.session.add(Animal(Species='Adelie Penguin', Legs=2,
ZooID=seaworld.ID))
- session.add(Animal(Species='Millipede', Legs=1000000,
+ self.session.add(Animal(Species='Millipede', Legs=1000000,
ZooID=sdz.ID))
# Add a mother and child to test relationships
bai_yun = Animal(Species='Ape', Nameu='Bai Yun', Legs=2)
- session.add(bai_yun)
- session.add(Animal(Species='Ape', Name='Hua Mei', Legs=2,
+ self.session.add(bai_yun)
+ self.session.add(Animal(Species='Ape', Name='Hua Mei', Legs=2,
MotherID=bai_yun.ID))
- session.flush()
- session.commit()
+ self.session.commit()
- def test_baseline_2_insert(self):
+ def _baseline_2_insert(self):
for x in range(ITERATIONS):
- session.add(Animal(Species='Tick', Name='Tick %d' % x,
+ self.session.add(Animal(Species='Tick', Name='Tick %d' % x,
Legs=8))
- session.flush()
+ self.session.flush()
- def test_baseline_3_properties(self):
+ def _baseline_3_properties(self):
for x in range(ITERATIONS):
# Zoos
- list(session.query(Zoo).filter(
+ list(self.session.query(Zoo).filter(
Zoo.Name == 'Wild Animal Park'))
list(
- session.query(Zoo).filter(
+ self.session.query(Zoo).filter(
Zoo.Founded == datetime.date(
1835,
9,
13)))
list(
- session.query(Zoo).filter(
+ self.session.query(Zoo).filter(
Zoo.Name == 'Montr\xe9al Biod\xf4me'))
- list(session.query(Zoo).filter(Zoo.Admission == float(60)))
+ list(self.session.query(Zoo).filter(Zoo.Admission == float(60)))
# Animals
- list(session.query(Animal).filter(Animal.Species == 'Leopard'))
- list(session.query(Animal).filter(Animal.Species == 'Ostrich'))
- list(session.query(Animal).filter(Animal.Legs == 1000000))
- list(session.query(Animal).filter(Animal.Species == 'Tick'))
+ list(self.session.query(Animal).filter(Animal.Species == 'Leopard'))
+ list(self.session.query(Animal).filter(Animal.Species == 'Ostrich'))
+ list(self.session.query(Animal).filter(Animal.Legs == 1000000))
+ list(self.session.query(Animal).filter(Animal.Species == 'Tick'))
- def test_baseline_4_expressions(self):
+ def _baseline_4_expressions(self):
for x in range(ITERATIONS):
- assert len(list(session.query(Zoo))) == 5
- assert len(list(session.query(Animal))) == ITERATIONS + 12
- assert len(list(session.query(Animal).filter(Animal.Legs
+ assert len(list(self.session.query(Zoo))) == 5
+ assert len(list(self.session.query(Animal))) == ITERATIONS + 12
+ assert len(list(self.session.query(Animal).filter(Animal.Legs
== 4))) == 4
- assert len(list(session.query(Animal).filter(Animal.Legs
+ assert len(list(self.session.query(Animal).filter(Animal.Legs
== 2))) == 5
assert len(
list(
- session.query(Animal).filter(
+ self.session.query(Animal).filter(
and_(
Animal.Legs >= 2,
Animal.Legs < 20)))) == ITERATIONS + 9
- assert len(list(session.query(Animal).filter(Animal.Legs
+ assert len(list(self.session.query(Animal).filter(Animal.Legs
> 10))) == 2
- assert len(list(session.query(Animal).filter(Animal.Lifespan
+ assert len(list(self.session.query(Animal).filter(Animal.Lifespan
> 70))) == 2
- assert len(list(session.query(Animal).
+ assert len(list(self.session.query(Animal).
filter(Animal.Species.like('L%')))) == 2
- assert len(list(session.query(Animal).
+ assert len(list(self.session.query(Animal).
filter(Animal.Species.like('%pede')))) == 2
- assert len(list(session.query(Animal).filter(Animal.LastEscape
+ assert len(list(self.session.query(Animal).filter(Animal.LastEscape
!= None))) == 1
assert len(
list(
- session.query(Animal).filter(
+ self.session.query(Animal).filter(
Animal.LastEscape == None))) == ITERATIONS + 11
# In operator (containedby)
- assert len(list(session.query(Animal).filter(
+ assert len(list(self.session.query(Animal).filter(
Animal.Species.like('%pede%')))) == 2
assert len(
list(
- session.query(Animal). filter(
+ self.session.query(Animal). filter(
Animal.Species.in_(
('Lion', 'Tiger', 'Bear'))))) == 3
@@ -224,17 +226,17 @@ class ZooMarkTest(fixtures.TestBase):
pet, pet2 = thing(), thing()
pet.Name, pet2.Name = 'Slug', 'Ostrich'
- assert len(list(session.query(Animal).
+ assert len(list(self.session.query(Animal).
filter(Animal.Species.in_((pet.Name,
pet2.Name))))) == 2
# logic and other functions
name = 'Lion'
- assert len(list(session.query(Animal).
+ assert len(list(self.session.query(Animal).
filter(func.length(Animal.Species)
== len(name)))) == ITERATIONS + 3
- assert len(list(session.query(Animal).
+ assert len(list(self.session.query(Animal).
filter(Animal.Species.like('%i%'
)))) == ITERATIONS + 7
@@ -242,29 +244,29 @@ class ZooMarkTest(fixtures.TestBase):
assert len(
list(
- session.query(Zoo).filter(
+ self.session.query(Zoo).filter(
and_(
Zoo.Founded != None,
Zoo.Founded < func.now())))) == 3
- assert len(list(session.query(Animal).filter(Animal.LastEscape
+ assert len(list(self.session.query(Animal).filter(Animal.LastEscape
== func.now()))) == 0
- assert len(list(session.query(Animal).filter(
+ assert len(list(self.session.query(Animal).filter(
func.date_part('year', Animal.LastEscape) == 2004))) == 1
assert len(
list(
- session.query(Animal). filter(
+ self.session.query(Animal). filter(
func.date_part(
'month',
Animal.LastEscape) == 12))) == 1
- assert len(list(session.query(Animal).filter(
+ assert len(list(self.session.query(Animal).filter(
func.date_part('day', Animal.LastEscape) == 21))) == 1
- def test_baseline_5_aggregates(self):
- Animal = metadata.tables['Animal']
- Zoo = metadata.tables['Zoo']
+ def _baseline_5_aggregates(self):
+ Animal = self.metadata.tables['Animal']
+ Zoo = self.metadata.tables['Zoo']
# TODO: convert to ORM
- engine = metadata.bind
+ engine = self.metadata.bind
for x in range(ITERATIONS):
# views
@@ -307,12 +309,12 @@ class ZooMarkTest(fixtures.TestBase):
distinct=True)).fetchall()]
legs.sort()
- def test_baseline_6_editing(self):
+ def _baseline_6_editing(self):
for x in range(ITERATIONS):
# Edit
- SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
+ SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
).one()
SDZ.Name = 'The San Diego Zoo'
SDZ.Founded = datetime.date(1900, 1, 1)
@@ -321,7 +323,7 @@ class ZooMarkTest(fixtures.TestBase):
# Test edits
- SDZ = session.query(Zoo).filter(Zoo.Name
+ SDZ = self.session.query(Zoo).filter(Zoo.Name
== 'The San Diego Zoo').one()
assert SDZ.Founded == datetime.date(1900, 1, 1), SDZ.Founded
@@ -334,55 +336,12 @@ class ZooMarkTest(fixtures.TestBase):
# Test re-edits
- SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
+ SDZ = self.session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
).one()
assert SDZ.Founded == datetime.date(1835, 9, 13), \
SDZ.Founded
- def test_baseline_7_drop(self):
- session.rollback()
- metadata.drop_all()
-
- # Now, run all of these tests again with the DB-API driver factored
- # out: the ReplayableSession playback stands in for the database.
- #
- # How awkward is this in a unittest framework? Very.
-
- def test_profile_0(self):
- global metadata, session
- player = lambda: dbapi_session.player()
- engine = create_engine('postgresql:///', creator=player,
- use_native_hstore=False)
- metadata = MetaData(engine)
- session = sessionmaker(engine)()
- engine.connect()
-
- def test_profile_1_create_tables(self):
- self.test_baseline_1_create_tables()
-
- @profiling.function_call_count()
- def test_profile_1a_populate(self):
- self.test_baseline_1a_populate()
-
- @profiling.function_call_count()
- def test_profile_2_insert(self):
- self.test_baseline_2_insert()
-
- @profiling.function_call_count()
- def test_profile_3_properties(self):
- self.test_baseline_3_properties()
-
- @profiling.function_call_count()
- def test_profile_4_expressions(self):
- self.test_baseline_4_expressions()
-
- @profiling.function_call_count()
- def test_profile_5_aggregates(self):
- self.test_baseline_5_aggregates()
-
- @profiling.function_call_count()
- def test_profile_6_editing(self):
- self.test_baseline_6_editing()
-
- def test_profile_7_drop(self):
- self.test_baseline_7_drop()
+ def _baseline_7_drop(self):
+ self.session.rollback()
+ self.metadata.drop_all()
+
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index f92b874da..c82cca5a1 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -1,23 +1,24 @@
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
import time
-from sqlalchemy import select, MetaData, Integer, String, create_engine, pool
+from sqlalchemy import (
+ select, MetaData, Integer, String, create_engine, pool, exc, util)
from sqlalchemy.testing.schema import Table, Column
import sqlalchemy as tsa
from sqlalchemy import testing
from sqlalchemy.testing import engines
-from sqlalchemy.testing.util import gc_collect
-from sqlalchemy import exc, util
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing import is_not_
from sqlalchemy.testing.mock import Mock, call
+
class MockError(Exception):
pass
+
class MockDisconnect(MockError):
pass
+
def mock_connection():
def mock_cursor():
def execute(*args, **kwargs):
@@ -25,10 +26,12 @@ def mock_connection():
raise MockDisconnect("Lost the DB connection on execute")
elif conn.explode in ('execute_no_disconnect', ):
raise MockError(
- "something broke on execute but we didn't lose the connection")
+ "something broke on execute but we didn't lose the "
+ "connection")
elif conn.explode in ('rollback', 'rollback_no_disconnect'):
raise MockError(
- "something broke on execute but we didn't lose the connection")
+ "something broke on execute but we didn't lose the "
+ "connection")
elif args and "SELECT" in args[0]:
cursor.description = [('foo', None, None, None, None, None)]
else:
@@ -38,9 +41,8 @@ def mock_connection():
cursor.fetchall = cursor.fetchone = \
Mock(side_effect=MockError("cursor closed"))
cursor = Mock(
- execute=Mock(side_effect=execute),
- close=Mock(side_effect=close)
- )
+ execute=Mock(side_effect=execute),
+ close=Mock(side_effect=close))
return cursor
def cursor():
@@ -52,18 +54,20 @@ def mock_connection():
raise MockDisconnect("Lost the DB connection on rollback")
if conn.explode == 'rollback_no_disconnect':
raise MockError(
- "something broke on rollback but we didn't lose the connection")
+ "something broke on rollback but we didn't lose the "
+ "connection")
else:
return
conn = Mock(
- rollback=Mock(side_effect=rollback),
- cursor=Mock(side_effect=cursor())
- )
+ rollback=Mock(side_effect=rollback),
+ cursor=Mock(side_effect=cursor()))
return conn
+
def MockDBAPI():
connections = []
+
def connect():
while True:
conn = mock_connection()
@@ -80,13 +84,12 @@ def MockDBAPI():
connections[:] = []
return Mock(
- connect=Mock(side_effect=connect()),
- shutdown=Mock(side_effect=shutdown),
- dispose=Mock(side_effect=dispose),
- paramstyle='named',
- connections=connections,
- Error=MockError
- )
+ connect=Mock(side_effect=connect()),
+ shutdown=Mock(side_effect=shutdown),
+ dispose=Mock(side_effect=dispose),
+ paramstyle='named',
+ connections=connections,
+ Error=MockError)
class MockReconnectTest(fixtures.TestBase):
@@ -94,13 +97,14 @@ class MockReconnectTest(fixtures.TestBase):
self.dbapi = MockDBAPI()
self.db = testing_engine(
- 'postgresql://foo:bar@localhost/test',
- options=dict(module=self.dbapi, _initialize=False))
+ 'postgresql://foo:bar@localhost/test',
+ options=dict(module=self.dbapi, _initialize=False))
- self.mock_connect = call(host='localhost', password='bar',
- user='foo', database='test')
+ self.mock_connect = call(
+ host='localhost', password='bar', user='foo', database='test')
# monkeypatch disconnect checker
- self.db.dialect.is_disconnect = lambda e, conn, cursor: isinstance(e, MockDisconnect)
+ self.db.dialect.is_disconnect = \
+ lambda e, conn, cursor: isinstance(e, MockDisconnect)
def teardown(self):
self.dbapi.dispose()
@@ -194,10 +198,8 @@ class MockReconnectTest(fixtures.TestBase):
assert_raises_message(
tsa.exc.InvalidRequestError,
- "Can't reconnect until invalid transaction is "
- "rolled back",
- trans.commit
- )
+ "Can't reconnect until invalid transaction is rolled back",
+ trans.commit)
assert trans.is_active
trans.rollback()
@@ -351,16 +353,16 @@ class MockReconnectTest(fixtures.TestBase):
)
def test_dialect_initialize_once(self):
- from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.url import URL
from sqlalchemy.engine.default import DefaultDialect
- from sqlalchemy.pool import QueuePool
dbapi = self.dbapi
mock_dialect = Mock()
+
class MyURL(URL):
def get_dialect(self):
return Dialect
+
class Dialect(DefaultDialect):
initialize = Mock()
@@ -371,7 +373,6 @@ class MockReconnectTest(fixtures.TestBase):
eq_(Dialect.initialize.call_count, 1)
-
class CursorErrTest(fixtures.TestBase):
# this isn't really a "reconnect" test, it's more of
# a generic "recovery". maybe this test suite should have been
@@ -394,29 +395,24 @@ class CursorErrTest(fixtures.TestBase):
description=[],
close=Mock(side_effect=Exception("explode")),
)
+
def connect():
while True:
yield Mock(
- spec=['cursor', 'commit', 'rollback', 'close'],
- cursor=Mock(side_effect=cursor()),
- )
+ spec=['cursor', 'commit', 'rollback', 'close'],
+ cursor=Mock(side_effect=cursor()),)
return Mock(
- Error = DBAPIError,
- paramstyle='qmark',
- connect=Mock(side_effect=connect())
- )
+ Error=DBAPIError, paramstyle='qmark',
+ connect=Mock(side_effect=connect()))
dbapi = MockDBAPI()
from sqlalchemy.engine import default
url = Mock(
- get_dialect=lambda: default.DefaultDialect,
- translate_connect_args=lambda: {},
- query={},
- )
+ get_dialect=lambda: default.DefaultDialect,
+ translate_connect_args=lambda: {}, query={},)
eng = testing_engine(
- url,
- options=dict(module=dbapi, _initialize=initialize))
+ url, options=dict(module=dbapi, _initialize=initialize))
eng.pool.logger = Mock()
return eng
@@ -508,7 +504,6 @@ class RealReconnectTest(fixtures.TestBase):
# pool isn't replaced
assert self.engine.pool is p2
-
def test_ensure_is_disconnect_gets_connection(self):
def is_disconnect(e, conn, cursor):
# connection is still present
@@ -556,6 +551,7 @@ class RealReconnectTest(fixtures.TestBase):
"Crashes on py3k+cx_oracle")
def test_explode_in_initializer(self):
engine = engines.testing_engine()
+
def broken_initialize(connection):
connection.execute("select fake_stuff from _fake_table")
@@ -569,6 +565,7 @@ class RealReconnectTest(fixtures.TestBase):
"Crashes on py3k+cx_oracle")
def test_explode_in_initializer_disconnect(self):
engine = engines.testing_engine()
+
def broken_initialize(connection):
connection.execute("select fake_stuff from _fake_table")
@@ -584,7 +581,6 @@ class RealReconnectTest(fixtures.TestBase):
# invalidate() also doesn't screw up
assert_raises(exc.DBAPIError, engine.connect)
-
def test_null_pool(self):
engine = \
engines.reconnecting_engine(options=dict(poolclass=pool.NullPool))
@@ -623,10 +619,8 @@ class RealReconnectTest(fixtures.TestBase):
assert trans.is_active
assert_raises_message(
tsa.exc.StatementError,
- "Can't reconnect until invalid transaction is "\
- "rolled back",
- conn.execute, select([1])
- )
+ "Can't reconnect until invalid transaction is rolled back",
+ conn.execute, select([1]))
assert trans.is_active
assert_raises_message(
tsa.exc.InvalidRequestError,
@@ -640,13 +634,14 @@ class RealReconnectTest(fixtures.TestBase):
eq_(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
+
class RecycleTest(fixtures.TestBase):
__backend__ = True
def test_basic(self):
for threadlocal in False, True:
engine = engines.reconnecting_engine(
- options={'pool_threadlocal': threadlocal})
+ options={'pool_threadlocal': threadlocal})
conn = engine.contextual_connect()
eq_(conn.execute(select([1])).scalar(), 1)
@@ -671,13 +666,15 @@ class RecycleTest(fixtures.TestBase):
eq_(conn.execute(select([1])).scalar(), 1)
conn.close()
+
class InvalidateDuringResultTest(fixtures.TestBase):
__backend__ = True
def setup(self):
self.engine = engines.reconnecting_engine()
self.meta = MetaData(self.engine)
- table = Table('sometable', self.meta,
+ table = Table(
+ 'sometable', self.meta,
Column('id', Integer, primary_key=True),
Column('name', String(50)))
self.meta.create_all()
@@ -690,10 +687,8 @@ class InvalidateDuringResultTest(fixtures.TestBase):
self.engine.dispose()
@testing.fails_if([
- '+mysqlconnector', '+mysqldb',
- '+cymysql', '+pymysql', '+pg8000'
- ], "Buffers the result set and doesn't check for "
- "connection close")
+ '+mysqlconnector', '+mysqldb', '+cymysql', '+pymysql', '+pg8000'],
+ "Buffers the result set and doesn't check for connection close")
def test_invalidate_on_results(self):
conn = self.engine.connect()
result = conn.execute('select * from sometable')
@@ -702,4 +697,3 @@ class InvalidateDuringResultTest(fixtures.TestBase):
self.engine.test_shutdown()
_assert_invalidated(result.fetchone)
assert conn.invalidated
-
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index f9744444d..8a5303642 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -347,9 +347,10 @@ class TransactionTest(fixtures.TestBase):
connection.invalidate()
connection2 = testing.db.connect()
- eq_(connection2.execute(select([users.c.user_id]).
- order_by(users.c.user_id)).fetchall(),
- [])
+ eq_(
+ connection2.execution_options(autocommit=True).
+ execute(select([users.c.user_id]).
+ order_by(users.c.user_id)).fetchall(), [])
recoverables = connection2.recover_twophase()
assert transaction.xid in recoverables
connection2.commit_prepared(transaction.xid, recover=True)
diff --git a/test/orm/test_dynamic.py b/test/orm/test_dynamic.py
index bc47ba3f3..950ff1953 100644
--- a/test/orm/test_dynamic.py
+++ b/test/orm/test_dynamic.py
@@ -510,10 +510,6 @@ class UOWTest(
testing.db,
sess.flush,
CompiledSQL(
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = :param_1",
- lambda ctx: [{"param_1": u1_id}]),
- CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.email_address "
"AS addresses_email_address FROM addresses "
"WHERE addresses.id = :param_1",
@@ -523,7 +519,11 @@ class UOWTest(
"UPDATE addresses SET user_id=:user_id WHERE addresses.id = "
":addresses_id",
lambda ctx: [{'addresses_id': a2_id, 'user_id': None}]
- )
+ ),
+ CompiledSQL(
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :param_1",
+ lambda ctx: [{"param_1": u1_id}]),
)
def test_rollback(self):
diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py
index c643e6a87..374a77237 100644
--- a/test/orm/test_unitofworkv2.py
+++ b/test/orm/test_unitofworkv2.py
@@ -9,8 +9,9 @@ from sqlalchemy import Integer, String, ForeignKey, func
from sqlalchemy.orm import mapper, relationship, backref, \
create_session, unitofwork, attributes,\
Session, exc as orm_exc
-
+from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
+from sqlalchemy import event
class AssertsUOW(object):
@@ -1276,6 +1277,8 @@ class RowswitchAccountingTest(fixtures.MappedTest):
old = attributes.get_history(p3, 'child')[2][0]
assert old in sess
+ # essentially no SQL should emit here,
+ # because we've replaced the row with another identical one
sess.flush()
assert p3.child._sa_instance_state.session_id == sess.hash_key
@@ -1703,3 +1706,46 @@ class LoadersUsingCommittedTest(UOWTest):
sess.flush()
except AvoidReferencialError:
pass
+
+
+class NoAttrEventInFlushTest(fixtures.MappedTest):
+ """test [ticket:3167]"""
+
+ __backend__ = True
+
+ @classmethod
+ def define_tables(cls, metadata):
+ Table(
+ 'test', metadata,
+ Column('id', Integer, primary_key=True,
+ test_needs_autoincrement=True),
+ Column('prefetch_val', Integer, default=5),
+ Column('returning_val', Integer, server_default="5")
+ )
+
+ @classmethod
+ def setup_classes(cls):
+ class Thing(cls.Basic):
+ pass
+
+ @classmethod
+ def setup_mappers(cls):
+ Thing = cls.classes.Thing
+
+ mapper(Thing, cls.tables.test, eager_defaults=True)
+
+ def test_no_attr_events_flush(self):
+ Thing = self.classes.Thing
+ mock = Mock()
+ event.listen(Thing.id, "set", mock.id)
+ event.listen(Thing.prefetch_val, "set", mock.prefetch_val)
+ event.listen(Thing.returning_val, "set", mock.prefetch_val)
+ t1 = Thing()
+ s = Session()
+ s.add(t1)
+ s.flush()
+
+ eq_(len(mock.mock_calls), 0)
+ eq_(t1.id, 1)
+ eq_(t1.prefetch_val, 5)
+ eq_(t1.returning_val, 5)
diff --git a/test/profiles.txt b/test/profiles.txt
index 59ce23db3..ca84cdc26 100644
--- a/test/profiles.txt
+++ b/test/profiles.txt
@@ -13,507 +13,248 @@
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 73
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_mysql_mysqlconnector_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_postgresql_psycopg2_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_nocextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_cextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqlconnector_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_cextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_sqlite_pysqlite_nocextensions 74
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_mysql_mysqlconnector_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_postgresql_psycopg2_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_insert 3.3_sqlite_pysqlite_nocextensions 77
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 151
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_mysqlconnector_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 166
-test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_nocextensions 166
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_cextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqlconnector_nocextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_nocextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_cextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_sqlite_pysqlite_nocextensions 152
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_cextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_mysql_mysqlconnector_nocextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_cextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 165
+test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 165
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 185
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_mysqlconnector_nocextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 200
-test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_nocextensions 200
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_cextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqlconnector_nocextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_nocextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_cextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_sqlite_pysqlite_nocextensions 186
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_cextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_mysql_mysqlconnector_nocextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_cextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 199
+test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 199
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 78
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 76
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_cextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_mysql_mysqlconnector_nocextensions 81
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_postgresql_psycopg2_nocextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_cextensions 79
-test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_cextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqlconnector_nocextensions 79
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_cextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_sqlite_pysqlite_nocextensions 77
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_cextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_mysql_mysqlconnector_nocextensions 80
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_cextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_postgresql_psycopg2_nocextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_cextensions 78
+test.aaa_profiling.test_compiler.CompileTest.test_update 3.3_sqlite_pysqlite_nocextensions 78
# TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 147
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_mysql_mysqlconnector_nocextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_postgresql_psycopg2_nocextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_cextensions 149
-test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_pysqlite_nocextensions 149
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqlconnector_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_sqlite_pysqlite_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_mysql_mysqlconnector_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_postgresql_psycopg2_nocextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_cextensions 148
+test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.3_sqlite_pysqlite_nocextensions 148
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_cextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4265
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_mysql_mysqlconnector_nocextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_postgresql_psycopg2_nocextensions 4266
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_sqlite_pysqlite_nocextensions 4260
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_cextensions 4266
test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.3_sqlite_pysqlite_nocextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_mysql_mysqlconnector_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_postgresql_psycopg2_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_cextensions 4266
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_pysqlite_nocextensions 4266
# TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6525
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6525
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_mysql_mysqlconnector_nocextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_postgresql_psycopg2_nocextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_mysql_mysqlconnector_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_postgresql_psycopg2_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_cextensions 6527
-test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.4_sqlite_pysqlite_nocextensions 6527
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_cextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_sqlite_pysqlite_nocextensions 6426
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_cextensions 6428
+test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 3.3_sqlite_pysqlite_nocextensions 6428
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 31372
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 40389
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_cextensions 111690
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_mysql_mysqlconnector_nocextensions 120693
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_cextensions 32222
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 41225
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32411
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 41414
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_mysqlconnector_cextensions 91564
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 32222
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 32411
-test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 41414
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_cextensions 31373
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_sqlite_pysqlite_nocextensions 40336
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 32398
+test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 41401
# TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 31164
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 34169
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_cextensions 57315
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_mysql_mysqlconnector_nocextensions 60318
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_cextensions 32099
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 35102
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32210
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 35213
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_mysqlconnector_cextensions 55266
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 32099
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 32210
-test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 35213
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_cextensions 31165
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_sqlite_pysqlite_nocextensions 34170
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 32197
+test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 35200
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_mysql_mysqlconnector_nocextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_postgresql_psycopg2_nocextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_mysql_mysqlconnector_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_postgresql_psycopg2_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_cextensions 18987
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.4_sqlite_pysqlite_nocextensions 18987
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_cextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_sqlite_pysqlite_nocextensions 17988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_cextensions 18988
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 3.3_sqlite_pysqlite_nocextensions 18988
# TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162360
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165110
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_cextensions 203865
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_mysql_mysqlconnector_nocextensions 205567
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_cextensions 127615
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 129365
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 170115
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171865
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_mysqlconnector_cextensions 184817
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 127567
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 170067
-test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 171865
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_cextensions 162315
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_sqlite_pysqlite_nocextensions 165111
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 169566
+test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 171364
# TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22448
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22662
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_cextensions 26042
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_mysql_mysqlconnector_nocextensions 26246
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_cextensions 20541
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 20685
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23330
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23534
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_mysqlconnector_cextensions 24861
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 20377
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 23282
-test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 23452
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_cextensions 22288
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_sqlite_pysqlite_nocextensions 22530
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 23067
+test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 23271
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1600
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1625
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_cextensions 2268
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_mysql_mysqlconnector_nocextensions 2283
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_cextensions 1394
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1409
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1669
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1684
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_mysqlconnector_cextensions 2139
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1394
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1669
-test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1684
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_cextensions 1601
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_sqlite_pysqlite_nocextensions 1626
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1656
+test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1671
# TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 116,17
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 116,17
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_mysql_mysqlconnector_nocextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_mysqlconnector_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 128,18
-test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 128,18
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_cextensions 117,18
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_sqlite_pysqlite_nocextensions 117,18
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 122,19
+test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 122,19
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 90
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 90
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_mysql_mysqlconnector_nocextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_nocextensions 77
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_cextensions 91
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_sqlite_pysqlite_nocextensions 91
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 78
+test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 78
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 30
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 30
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_mysql_mysqlconnector_nocextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_postgresql_psycopg2_nocextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_mysql_mysqlconnector_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_postgresql_psycopg2_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_cextensions 23
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqlite_nocextensions 23
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_cextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_sqlite_pysqlite_nocextensions 31
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_cextensions 24
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.3_sqlite_pysqlite_nocextensions 24
# TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 7
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 7
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_mysql_mysqlconnector_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_postgresql_psycopg2_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_mysql_mysqlconnector_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_postgresql_psycopg2_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_cextensions 8
-test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sqlite_pysqlite_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_cextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_sqlite_pysqlite_nocextensions 8
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_cextensions 9
+test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.3_sqlite_pysqlite_nocextensions 9
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 44
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 44
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 44
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_mysql_mysqlconnector_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_postgresql_psycopg2_nocextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_cextensions 42
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.4_sqlite_pysqlite_nocextensions 42
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqlconnector_nocextensions 45
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_nocextensions 45
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_sqlite_pysqlite_nocextensions 45
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_mysql_mysqlconnector_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_postgresql_psycopg2_nocextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_cextensions 43
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 3.3_sqlite_pysqlite_nocextensions 43
# TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 79
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 79
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 79
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_mysql_mysqlconnector_nocextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_postgresql_psycopg2_nocextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_cextensions 77
-test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_sqlite_pysqlite_nocextensions 77
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqlconnector_nocextensions 80
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_nocextensions 80
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_sqlite_pysqlite_nocextensions 80
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_mysql_mysqlconnector_nocextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_postgresql_psycopg2_nocextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_cextensions 78
+test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.3_sqlite_pysqlite_nocextensions 78
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 14
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_mysql_mysqlconnector_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_postgresql_psycopg2_nocextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_cextensions 15
-test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4_sqlite_pysqlite_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqlconnector_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_cextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_sqlite_pysqlite_nocextensions 15
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_mysql_mysqlconnector_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_postgresql_psycopg2_nocextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_cextensions 16
+test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.3_sqlite_pysqlite_nocextensions 16
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92958
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107978
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20500
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35520
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 456
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15476
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109145
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123145
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 498
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14498
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 471
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14471
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_cextensions 79885
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_mysqlconnector_nocextensions 93885
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 498
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14498
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 471
-test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_nocextensions 14471
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_cextensions 92959
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqlconnector_nocextensions 107979
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20501
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_nocextensions 35521
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_cextensions 457
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_sqlite_pysqlite_nocextensions 15477
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_cextensions 109136
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_mysql_mysqlconnector_nocextensions 123136
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_cextensions 489
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14489
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 462
+test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14462
# TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92958
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107978
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20500
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35520
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 456
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15476
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109145
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123145
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 498
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14498
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 471
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14471
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_cextensions 79885
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_mysqlconnector_nocextensions 93885
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 498
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14498
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 471
-test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_nocextensions 14471
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 5562
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 5606
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 5381
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 5403
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.4_postgresql_psycopg2_cextensions 5381
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_1a_populate 3.4_postgresql_psycopg2_nocextensions 5403
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 277
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 277
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 269
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 269
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.4_postgresql_psycopg2_cextensions 269
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_2_insert 3.4_postgresql_psycopg2_nocextensions 269
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 3697
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 3929
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 3641
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 3737
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.4_postgresql_psycopg2_cextensions 3641
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_3_properties 3.4_postgresql_psycopg2_nocextensions 3737
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 11893
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 13595
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 11751
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 12923
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.4_postgresql_psycopg2_cextensions 11751
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_4_expressions 3.4_postgresql_psycopg2_nocextensions 12923
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1106
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1223
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1077
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1171
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.4_postgresql_psycopg2_cextensions 1077
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_5_aggregates 3.4_postgresql_psycopg2_nocextensions 1171
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 1968
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2011
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 1913
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 1920
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.4_postgresql_psycopg2_cextensions 1913
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_6_editing 3.4_postgresql_psycopg2_nocextensions 1920
-
-# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview
-
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_cextensions 2433
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 2.7_postgresql_psycopg2_nocextensions 2692
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_cextensions 2449
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.3_postgresql_psycopg2_nocextensions 2641
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.4_postgresql_psycopg2_cextensions 2449
-test.aaa_profiling.test_zoomark.ZooMarkTest.test_profile_7_multiview 3.4_postgresql_psycopg2_nocextensions 2641
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_cextensions 6276
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 2.7_postgresql_psycopg2_nocextensions 6395
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_cextensions 6412
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.3_postgresql_psycopg2_nocextensions 6497
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.4_postgresql_psycopg2_cextensions 6412
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_1a_populate 3.4_postgresql_psycopg2_nocextensions 6497
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_cextensions 403
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 2.7_postgresql_psycopg2_nocextensions 410
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_cextensions 401
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.3_postgresql_psycopg2_nocextensions 406
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.4_postgresql_psycopg2_cextensions 401
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_2_insert 3.4_postgresql_psycopg2_nocextensions 406
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_cextensions 6878
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 2.7_postgresql_psycopg2_nocextensions 7110
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_cextensions 7008
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.3_postgresql_psycopg2_nocextensions 7112
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.4_postgresql_psycopg2_cextensions 7008
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_3_properties 3.4_postgresql_psycopg2_nocextensions 7112
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_cextensions 19521
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 2.7_postgresql_psycopg2_nocextensions 20952
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_cextensions 19868
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.3_postgresql_psycopg2_nocextensions 20895
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.4_postgresql_psycopg2_cextensions 19868
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_4_expressions 3.4_postgresql_psycopg2_nocextensions 20895
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_cextensions 1118
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 2.7_postgresql_psycopg2_nocextensions 1226
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_cextensions 1091
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.3_postgresql_psycopg2_nocextensions 1177
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.4_postgresql_psycopg2_cextensions 1091
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_5_aggregates 3.4_postgresql_psycopg2_nocextensions 1177
-
-# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing
-
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_cextensions 2733
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 2.7_postgresql_psycopg2_nocextensions 2796
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_cextensions 2784
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.3_postgresql_psycopg2_nocextensions 2811
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.4_postgresql_psycopg2_cextensions 2784
-test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_profile_6_editing 3.4_postgresql_psycopg2_nocextensions 2811
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_cextensions 92959
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqlconnector_nocextensions 107979
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20501
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_nocextensions 35521
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_cextensions 457
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_sqlite_pysqlite_nocextensions 15477
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_cextensions 109136
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_mysql_mysqlconnector_nocextensions 123136
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_cextensions 489
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14489
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 462
+test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14462
+
+# TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation
+
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5562,277,3697,11893,1106,1968,2433
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5606,277,3929,13595,1223,2011,2692
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5238,259,3577,11529,1077,1886,2439
+test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5260,259,3673,12701,1171,1893,2631
+
+# TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation
+
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_cextensions 5908,396,6878,19521,1118,2725
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5999,401,7110,20952,1226,2790
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5816,383,6928,19676,1091,2753
+test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5886,388,7032,20703,1177,2782
diff --git a/test/requirements.py b/test/requirements.py
index e8705d145..7eeabef2b 100644
--- a/test/requirements.py
+++ b/test/requirements.py
@@ -363,20 +363,9 @@ class DefaultRequirements(SuiteRequirements):
'need separate XA implementation'),
exclude('mysql', '<', (5, 0, 3),
'two-phase xact not supported by database'),
- no_support("postgresql+pg8000", "not supported and/or hangs")
])
@property
- def graceful_disconnects(self):
- """Target driver must raise a DBAPI-level exception, such as
- InterfaceError, when the underlying connection has been closed
- and the execute() method is called.
- """
- return fails_on(
- "postgresql+pg8000", "Driver crashes"
- )
-
- @property
def views(self):
"""Target database must support VIEWs."""