diff options
157 files changed, 7809 insertions, 1732 deletions
diff --git a/MANIFEST.in b/MANIFEST.in index 23aa88268..9b5b29936 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,5 +9,5 @@ recursive-include test *.py *.dat # don't come in if --with-cextensions isn't specified. recursive-include lib *.c *.txt -include README* AUTHORS LICENSE distribute_setup.py sa2to3.py ez_setup.py sqla_nose.py CHANGES* +include README* AUTHORS LICENSE distribute_setup.py sa2to3.py ez_setup.py sqla_nose.py CHANGES* tox.ini prune doc/build/output diff --git a/doc/build/Makefile b/doc/build/Makefile index 4de8f98b6..cd39b7288 100644 --- a/doc/build/Makefile +++ b/doc/build/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -v SPHINXBUILD = sphinx-build PAPER = BUILDDIR = output diff --git a/doc/build/changelog/changelog_09.rst b/doc/build/changelog/changelog_09.rst index ab1292f49..2d2964ba4 100644 --- a/doc/build/changelog/changelog_09.rst +++ b/doc/build/changelog/changelog_09.rst @@ -15,6 +15,71 @@ :version: 0.9.10 .. change:: + :tags: feature, sql + :tickets: 3418 + :versions: 1.0.5 + + Added official support for a CTE used by the SELECT present + inside of :meth:`.Insert.from_select`. This behavior worked + accidentally up until 0.9.9, when it no longer worked due to + unrelated changes as part of :ticket:`3248`. Note that this + is the rendering of the WITH clause after the INSERT, before the + SELECT; the full functionality of CTEs rendered at the top + level of INSERT, UPDATE, DELETE is a new feature targeted for a + later release. + + .. change:: + :tags: bug, ext + :tickets: 3408 + :versions: 1.0.4 + + Fixed bug where when using extended attribute instrumentation system, + the correct exception would not be raised when :func:`.class_mapper` + were called with an invalid input that also happened to not + be weak referencable, such as an integer. + + .. change:: + :tags: bug, tests, pypy + :tickets: 3406 + :versions: 1.0.4 + + Fixed an import that prevented "pypy setup.py test" from working + correctly. + + .. change:: + :tags: bug, engine + :tickets: 3375 + :versions: 1.0.1 + + Added the string value ``"none"`` to those accepted by the + :paramref:`.Pool.reset_on_return` parameter as a synonym for ``None``, + so that string values can be used for all settings, allowing + utilities like :func:`.engine_from_config` to be usable without + issue. + + .. change:: + :tags: bug, sql + :tickets: 3362 + :versions: 1.0.0 + + Fixed issue where a :class:`.MetaData` object that used a naming + convention would not properly work with pickle. The attribute was + skipped leading to inconsistencies and failures if the unpickled + :class:`.MetaData` object were used to base additional tables + from. + + .. change:: + :tags: bug, postgresql + :tickets: 3354 + :versions: 1.0.0 + + Fixed a long-standing bug where the :class:`.Enum` type as used + with the psycopg2 dialect in conjunction with non-ascii values + and ``native_enum=False`` would fail to decode return results properly. + This stemmed from when the PG :class:`.postgresql.ENUM` type used + to be a standalone type without a "non native" option. + + .. change:: :tags: bug, orm :tickets: 3349 @@ -77,6 +142,9 @@ Compared to the existing entry for ``"type"``, it will always be a mapped entity, even if extracted from a column expression, or None if the given expression is a pure core expression. + See also :ticket:`3403` which repaired a regression in this feature + which was unreleased in 0.9.10 but was released in the 1.0 version. + .. changelog:: :version: 0.9.9 diff --git a/doc/build/changelog/changelog_10.rst b/doc/build/changelog/changelog_10.rst index 95bb7d0f3..8ac3d5844 100644 --- a/doc/build/changelog/changelog_10.rst +++ b/doc/build/changelog/changelog_10.rst @@ -16,7 +16,821 @@ :start-line: 5 .. changelog:: + :version: 1.0.7 + + .. change:: + :tags: bug, orm + :tickets: 3469 + + Fixed 1.0 regression where value objects that override + ``__eq__()`` to return a non-boolean-capable object, such as + some geoalchemy types as well as numpy types, were being tested + for ``bool()`` during a unit of work update operation, where in + 0.9 the return value of ``__eq__()`` was tested against "is True" + to guard against this. + + .. change:: + :tags: bug, orm + :tickets: 3468 + + Fixed 1.0 regression where a "deferred" attribute would not populate + correctly if it were loaded within the "optimized inheritance load", + which is a special SELECT emitted in the case of joined table + inheritance used to populate expired or unloaded attributes against + a joined table without loading the base table. This is related to + the fact that SQLA 1.0 no longer guesses about loading deferred + columns and must be directed explicitly. + + .. change:: + :tags: bug, orm + :tickets: 3466 + + Fixed 1.0 regression where the "parent entity" of a synonym- + mapped attribute on top of an :func:`.aliased` object would + resolve to the original mapper, not the :func:`.aliased` + version of it, thereby causing problems for a :class:`.Query` + that relies on this attribute (e.g. it's the only representative + attribute given in the constructor) to figure out the correct FROM + clause for the query. + +.. changelog:: + :version: 1.0.6 + :released: June 25, 2015 + + .. change:: + :tags: bug, orm + :tickets: 3465 + + Fixed a major regression in the 1.0 series where the version_id_counter + feature would cause an object's version counter to be incremented + when there was no net change to the object's row, but instead an object + related to it via relationship (e.g. typically many-to-one) + were associated or de-associated with it, resulting in an UPDATE + statement that updates the object's version counter and nothing else. + In the use case where the relatively recent "server side" and/or + "programmatic/conditional" version counter feature were used + (e.g. setting version_id_generator to False), the bug could cause an + UPDATE without a valid SET clause to be emitted. + + .. change:: + :tags: bug, mssql + :tickets: 3464 + + Fixed issue when using :class:`.VARBINARY` type in conjunction with + an INSERT of NULL + pyodbc; pyodbc requires a special + object be passed in order to persist NULL. As the :class:`.VARBINARY` + type is now usually the default for :class:`.LargeBinary` due to + :ticket:`3039`, this issue is partially a regression in 1.0. + The pymssql driver appears to be unaffected. + + .. change:: + :tags: bug, postgresql, pypy + :tickets: 3439 + + Re-fixed this issue first released in 1.0.5 to fix psycopg2cffi + JSONB support once again, as they suddenly + switched on unconditional decoding of JSONB types in version 2.7.1. + Version detection now specifies 2.7.1 as where we should expect + the DBAPI to do json encoding for us. + + .. change:: + :tags: feature, postgresql + :tickets: 3455 + :pullreq: github:179 + + Added support for storage parameters under CREATE INDEX, using + a new keyword argument ``postgresql_with``. Also added support for + reflection to support both the ``postgresql_with`` flag as well + as the ``postgresql_using`` flag, which will now be set on + :class:`.Index` objects that are reflected, as well present + in a new "dialect_options" dictionary in the result of + :meth:`.Inspector.get_indexes`. Pull request courtesy Pete Hollobon. + + .. seealso:: + + :ref:`postgresql_index_storage` + + .. change:: + :tags: bug, orm + :tickets: 3462 + + Fixed 1.0 regression where the enhanced behavior of single-inheritance + joins of :ticket:`3222` takes place inappropriately + for a JOIN along explicit join criteria with a single-inheritance + subclass that does not make use of any discriminator, resulting + in an additional "AND NULL" clause. + + .. change:: + :tags: bug, postgresql + :tickets: 3454 + + Repaired the :class:`.ExcludeConstraint` construct to support common + features that other objects like :class:`.Index` now do, that + the column expression may be specified as an arbitrary SQL + expression such as :obj:`.cast` or :obj:`.text`. + + .. change:: + :tags: feature, postgresql + :pullreq: github:182 + + Added new execution option ``max_row_buffer`` which is interpreted + by the psycopg2 dialect when the ``stream_results`` option is + used, which sets a limit on the size of the row buffer that may be + allocated. This value is also provided based on the integer + value sent to :meth:`.Query.yield_per`. Pull request courtesy + mcclurem. + + .. change:: + :tags: bug, orm + :tickets: 3451 + :pullreq: github:181 + + Fixed bug in new :meth:`.Session.bulk_update_mappings` feature where + the primary key columns used in the WHERE clause to locate the row + would also be included in the SET clause, setting their value to + themselves unnecessarily. Pull request courtesy Patrick Hayes. + + .. change:: + :tags: bug, orm + :tickets: 3448 + + Fixed an unexpected-use regression whereby custom :class:`.Comparator` + objects that made use of the ``__clause_element__()`` method and + returned an object that was an ORM-mapped + :class:`.InstrumentedAttribute` and not explicitly a + :class:`.ColumnElement` would fail to be correctly + handled when passed as an expression to :meth:`.Session.query`. + The logic in 0.9 happened to succeed on this, so this use case is now + supported. + + .. change:: + :tags: bug, sql + :tickets: 3445 + + Fixed a bug where clause adaption as applied to a :class:`.Label` + object would fail to accommodate the labeled SQL expression + in all cases, such that any SQL operation that made use of + :meth:`.Label.self_group` would use the original unadapted + expression. One effect of this would be that an ORM :func:`.aliased` + construct would not fully accommodate attributes mapped by + :obj:`.column_property`, such that the un-aliased table could + leak out when the property were used in some kinds of SQL + comparisons. + + .. change:: + :tags: bug, documentation + :tickets: 2077 + + Fixed an internal "memoization" routine for method types such + that a Python descriptor is no longer used; repairs inspectability + of these methods including support for Sphinx documentation. + +.. changelog:: + :version: 1.0.5 + :released: June 7, 2015 + + .. change:: + :tags: feature, engine + + Added new engine event :meth:`.ConnectionEvents.engine_disposed`. + Called after the :meth:`.Engine.dispose` method is called. + + .. change:: + :tags: bug, postgresql, pypy + :tickets: 3439 + + Repaired some typing and test issues related to the pypy + psycopg2cffi dialect, in particular that the current 2.7.0 version + does not have native support for the JSONB type. The version detection + for psycopg2 features has been tuned into a specific sub-version + for psycopg2cffi. Additionally, test coverage has been enabled + for the full series of psycopg2 features under psycopg2cffi. + + .. change:: + :tags: feature, ext + :pullreq: bitbucket:54 + + Added support for ``*args`` to be passed to the baked query + initial callable, in the same way that ``*args`` are supported + for the :meth:`.BakedQuery.add_criteria` and + :meth:`.BakedQuery.with_criteria` methods. Initial PR courtesy + Naoki INADA. + + .. change:: + :tags: bug, engine + :tickets: 3435 + + Fixed bug where known boolean values used by + :func:`.engine_from_config` were not being parsed correctly; + these included ``pool_threadlocal`` and the psycopg2 argument + ``use_native_unicode``. + + .. change:: + :tags: bug, mssql + :tickets: 3424, 3430 + + Added a new dialect flag to the MSSQL dialect + ``legacy_schema_aliasing`` which when set to False will disable a + very old and obsolete behavior, that of the compiler's + attempt to turn all schema-qualified table names into alias names, + to work around old and no longer locatable issues where SQL + server could not parse a multi-part identifier name in all + circumstances. The behavior prevented more + sophisticated statements from working correctly, including those which + use hints, as well as CRUD statements that embed correlated SELECT + statements. Rather than continue to repair the feature to work + with more complex statements, it's better to just disable it + as it should no longer be needed for any modern SQL server + version. The flag defaults to True for the 1.0.x series, leaving + current behavior unchanged for this version series. In the 1.1 + series, it will default to False. For the 1.0 series, + when not set to either value explicitly, a warning is emitted + when a schema-qualified table is first used in a statement, which + suggests that the flag be set to False for all modern SQL Server + versions. + + .. seealso:: + + :ref:`legacy_schema_rendering` + + .. change:: + :tags: feature, engine + :tickets: 3379 + + Adjustments to the engine plugin hook, such that the + :meth:`.URL.get_dialect` method will continue to return the + ultimate :class:`.Dialect` object when a dialect plugin is used, + without the need for the caller to be aware of the + :meth:`.Dialect.get_dialect_cls` method. + + + .. change:: + :tags: bug, ext + :tickets: 3427 + + Fixed regression in the :mod:`sqlalchemy.ext.mutable` extension + as a result of the bugfix for :ticket:`3167`, + where attribute and validation events are no longer + called within the flush process. The mutable + extension was relying upon this behavior in the case where a column + level Python-side default were responsible for generating the new value + on INSERT or UPDATE, or when a value were fetched from the RETURNING + clause for "eager defaults" mode. The new value would not be subject + to any event when populated and the mutable extension could not + establish proper coercion or history listening. A new event + :meth:`.InstanceEvents.refresh_flush` is added which the mutable + extension now makes use of for this use case. + + .. change:: + :tags: feature, orm + :tickets: 3427 + + Added new event :meth:`.InstanceEvents.refresh_flush`, invoked + when an INSERT or UPDATE level default value fetched via RETURNING + or Python-side default is invoked within the flush process. This + is to provide a hook that is no longer present as a result of + :ticket:`3167`, where attribute and validation events are no longer + called within the flush process. + + .. change:: + :tags: feature, ext + :tickets: 3427 + + Added a new semi-public method to :class:`.MutableBase` + :meth:`.MutableBase._get_listen_keys`. Overriding this method + is needed in the case where a :class:`.MutableBase` subclass needs + events to propagate for attribute keys other than the key to which + the mutable type is associated with, when intercepting the + :meth:`.InstanceEvents.refresh` or + :meth:`.InstanceEvents.refresh_flush` events. The current example of + this is composites using :class:`.MutableComposite`. + + .. change:: + :tags: bug, engine + :tickets: 3421 + + Added support for the case of the misbehaving DBAPI that has + pep-249 exception names linked to exception classes of an entirely + different name, preventing SQLAlchemy's own exception wrapping from + wrapping the error appropriately. + The SQLAlchemy dialect in use needs to implement a new + accessor :attr:`.DefaultDialect.dbapi_exception_translation_map` + to support this feature; this is implemented now for the py-postgresql + dialect. + + .. change:: + :tags: bug, orm + :tickets: 3420 + + The "lightweight named tuple" used when a :class:`.Query` returns + rows failed to implement ``__slots__`` correctly such that it still + had a ``__dict__``. This is resolved, but in the extremely + unlikely case someone was assigning values to the returned tuples, + that will no longer work. + + .. change:: + :tags: bug, engine + :tickets: 3419 + + Fixed bug involving the case when pool checkout event handlers are used + and connection attempts are made in the handler itself which fail, + the owning connection record would not be freed until the stack trace + of the connect error itself were freed. For the case where a test + pool of only a single connection were used, this means the pool would + be fully checked out until that stack trace were freed. This mostly + impacts very specific debugging scenarios and is unlikely to have been + noticable in any production application. The fix applies an + explicit checkin of the record before re-raising the caught exception. + + +.. changelog:: + :version: 1.0.4 + :released: May 7, 2015 + + .. change:: + :tags: bug, orm + :tickets: 3416 + + Fixed unexpected-use regression where in the odd case that the + primaryjoin of a relationship involved comparison to an unhashable + type such as an HSTORE, lazy loads would fail due to a hash-oriented + check on the statement parameters, modified in 1.0 as a result of + :ticket:`3061` to use hashing and modified in :ticket:`3368` + to occur in cases more common than "load on pending". + The values are now checked for the ``__hash__`` attribute beforehand. + + .. change:: + :tags: bug, orm + :tickets: 3412, 3347 + + Liberalized an assertion that was added as part of :ticket:`3347` + to protect against unknown conditions when splicing inner joins + together within joined eager loads with ``innerjoin=True``; if + some of the joins use a "secondary" table, the assertion needs to + unwrap further joins in order to pass. + + .. change:: + :tags: bug, schema + :tickets: 3411 + + Fixed bug in enhanced constraint-attachment logic introduced in + :ticket:`3341` where in the unusual case of a constraint that refers + to a mixture of :class:`.Column` objects and string column names + at the same time, the auto-attach-on-column-attach logic will be + skipped; for the constraint to be auto-attached in this case, + all columns must be assembled on the target table up front. + Added a new section to the migration document regarding the + original feature as well as this change. + + .. seealso:: + + :ref:`change_3341` + + .. change:: + :tags: bug, orm + :tickets: 3409, 3320 + + Repaired / added to tests yet more expressions that were reported + as failing with the new 'entity' key value added to + :attr:`.Query.column_descriptions`, the logic to discover the "from" + clause is again reworked to accommodate columns from aliased classes, + as well as to report the correct value for the "aliased" flag in these + cases. + + +.. changelog:: + :version: 1.0.3 + :released: April 30, 2015 + + .. change:: + :tags: bug, orm, pypy + :tickets: 3405 + + Fixed regression from 0.9.10 prior to release due to :ticket:`3349` + where the check for query state on :meth:`.Query.update` or + :meth:`.Query.delete` compared the empty tuple to itself using ``is``, + which fails on Pypy to produce ``True`` in this case; this would + erronously emit a warning in 0.9 and raise an exception in 1.0. + + .. change:: + :tags: feature, engine + :tickets: 3379 + + New features added to support engine/pool plugins with advanced + functionality. Added a new "soft invalidate" feature to the + connection pool at the level of the checked out connection wrapper + as well as the :class:`._ConnectionRecord`. This works similarly + to a modern pool invalidation in that connections aren't actively + closed, but are recycled only on next checkout; this is essentially + a per-connection version of that feature. A new event + :class:`.PoolEvents.soft_invalidate` is added to complement it. + + Also added new flag + :attr:`.ExceptionContext.invalidate_pool_on_disconnect`. + Allows an error handler within :meth:`.ConnectionEvents.handle_error` + to maintain a "disconnect" condition, but to handle calling invalidate + on individual connections in a specific manner within the event. + + .. change:: + :tags: feature, engine + :tickets: 3355 + + Added new event :class:`.DialectEvents.do_connect`, which allows + interception / replacement of when the :meth:`.Dialect.connect` + hook is called to create a DBAPI connection. Also added + dialect plugin hooks :meth:`.Dialect.get_dialect_cls` and + :meth:`.Dialect.engine_created` which allow external plugins to + add events to existing dialects using entry points. + + .. change:: + :tags: bug, orm + :tickets: 3403, 3320 + + Fixed regression from 0.9.10 prior to release where the new addition + of ``entity`` to the :attr:`.Query.column_descriptions` accessor + would fail if the target entity was produced from a core selectable + such as a :class:`.Table` or :class:`.CTE` object. + + .. change:: + :tags: feature, sql + + Added a placeholder method :meth:`.TypeEngine.compare_against_backend` + which is now consumed by Alembic migrations as of 0.7.6. User-defined + types can implement this method to assist in the comparison of + a type against one reflected from the database. + + .. change:: + :tags: bug, orm + :tickets: 3402 + + Fixed regression within the flush process when an attribute were + set to a SQL expression for an UPDATE, and the SQL expression when + compared to the previous value of the attribute would produce a SQL + comparison other than ``==`` or ``!=``, the exception "Boolean value + of this clause is not defined" would raise. The fix ensures that + the unit of work will not interpret the SQL expression in this way. + + .. change:: + :tags: bug, ext + :tickets: 3397 + + Fixed bug in association proxy where an any()/has() + on an relationship->scalar non-object attribute comparison would fail, + e.g. + ``filter(Parent.some_collection_to_attribute.any(Child.attr == 'foo'))`` + + .. change:: + :tags: bug, sql + :tickets: 3396 + + Fixed bug where the truncation of long labels in SQL could produce + a label that overlapped another label that is not truncated; this + because the length threshhold for truncation was greater than + the portion of the label that remains after truncation. These + two values have now been made the same; label_length - 6. + The effect here is that shorter column labels will be "truncated" + where they would not have been truncated before. + + .. change:: + :tags: bug, orm + :tickets: 3392 + + Fixed unexpected use regression due to :ticket:`2992` where + textual elements placed + into the :meth:`.Query.order_by` clause in conjunction with joined + eager loading would be added to the columns clause of the inner query + in such a way that they were assumed to be table-bound column names, + in the case where the joined eager load needs to wrap the query + in a subquery to accommodate for a limit/offset. + + Originally, the behavior here was intentional, in that a query such + as ``query(User).order_by('name').limit(1)`` + would order by ``user.name`` even if the query was modified by + joined eager loading to be within a subquery, as ``'name'`` would + be interpreted as a symbol to be located within the FROM clauses, + in this case ``User.name``, which would then be copied into the + columns clause to ensure it were present for ORDER BY. However, the + feature fails to anticipate the case where ``order_by("name")`` refers + to a specific label name present in the local columns clause already + and not a name bound to a selectable in the FROM clause. + + Beyond that, the feature also fails for deprecated cases such as + ``order_by("name desc")``, which, while it emits a + warning that :func:`.text` should be used here (note that the issue + does not impact cases where :func:`.text` is used explicitly), + still produces a different query than previously where the "name desc" + expression is copied into the columns clause inappropriately. The + resolution is such that the "joined eager loading" aspect of the + feature will skip over these so-called "label reference" expressions + when augmenting the inner columns clause, as though they were + :func:`.text` constructs already. + + .. change:: + :tags: bug, sql + :tickets: 3391 + + Fixed regression due to :ticket:`3282` where the ``tables`` collection + passed as a keyword argument to the :meth:`.DDLEvents.before_create`, + :meth:`.DDLEvents.after_create`, :meth:`.DDLEvents.before_drop`, and + :meth:`.DDLEvents.after_drop` events would no longer be a list + of tables, but instead a list of tuples which contained a second + entry with foreign keys to be added or dropped. As the ``tables`` + collection, while documented as not necessarily stable, has come + to be relied upon, this change is considered a regression. + Additionally, in some cases for "drop", this collection would + be an iterator that would cause the operation to fail if + prematurely iterated. The collection is now a list of table + objects in all cases and test coverage for the format of this + collection is now added. + + + .. change:: + :tags: bug, orm + :tickets: 3388 + + Fixed a regression regarding the :meth:`.MapperEvents.instrument_class` + event where its invocation was moved to be after the class manager's + instrumentation of the class, which is the opposite of what the + documentation for the event explicitly states. The rationale for the + switch was due to Declarative taking the step of setting up + the full "instrumentation manager" for a class before it was mapped + for the purpose of the new ``@declared_attr`` features + described in :ref:`feature_3150`, but the change was also made + against the classical use of :func:`.mapper` for consistency. + However, SQLSoup relies upon the instrumentation event happening + before any instrumentation under classical mapping. + The behavior is reverted in the case of classical and declarative + mapping, the latter implemented by using a simple memoization + without using class manager. + + .. change:: + :tags: bug, orm + :tickets: 3387 + + Fixed issue in new :meth:`.QueryEvents.before_compile` event where + changes made to the :class:`.Query` object's collection of entities + to load within the event would render in the SQL, but would not + be reflected during the loading process. + +.. changelog:: + :version: 1.0.2 + :released: April 24, 2015 + + .. change:: + :tags: bug, sql + :tickets: 3338, 3385 + + Fixed a regression that was incorrectly fixed in 1.0.0b4 + (hence becoming two regressions); reports that + SELECT statements would GROUP BY a label name and fail was misconstrued + that certain backends such as SQL Server should not be emitting + ORDER BY or GROUP BY on a simple label name at all; when in fact, + we had forgotten that 0.9 was already emitting ORDER BY on a simple + label name for all backends, as described in :ref:`migration_1068`, + even though 1.0 includes a rewrite of this logic as part of + :ticket:`2992`. As far + as emitting GROUP BY against a simple label, even Postgresql has + cases where it will raise an error even though the label to group + on should be apparent, so it is clear that GROUP BY should never + be rendered in this way automatically. + + In 1.0.2, SQL Server, Firebird and others will again emit ORDER BY on + a simple label name when passed a + :class:`.Label` construct that is also present in the columns clause. + Additionally, no backend will emit GROUP BY against the simple label + name only when passed a :class:`.Label` construct. + + .. change:: + :tags: bug, orm, declarative + :tickets: 3383 + + Fixed unexpected use regression regarding the declarative + ``__declare_first__`` and ``__declare_last__`` accessors where these + would no longer be called on the superclass of the declarative base. + +.. changelog:: + :version: 1.0.1 + :released: April 23, 2015 + + .. change:: + :tags: bug, firebird + :tickets: 3380 + :pullreq: github:168 + + Fixed a regression due to :ticket:`3034` where limit/offset + clauses were not properly interpreted by the Firebird dialect. + Pull request courtesy effem-git. + + .. change:: + :tags: bug, firebird + :tickets: 3381 + + Fixed support for "literal_binds" mode when using limit/offset + with Firebird, so that the values are again rendered inline when + this is selected. Related to :ticket:`3034`. + + .. change:: + :tags: bug, sqlite + :tickets: 3378 + + Fixed a regression due to :ticket:`3282`, where due to the fact that + we attempt to assume the availability of ALTER when creating/dropping + schemas, in the case of SQLite we simply said to not worry about + foreign keys at all, since ALTER is not available, when creating + and dropping tables. This meant that the sorting of tables was + basically skipped in the case of SQLite, and for the vast majority + of SQLite use cases, this is not an issue. + + However, users who were doing DROPs on SQLite + with tables that contained data and with referential integrity + turned on would then experience errors, as the + dependency sorting *does* matter in the case of DROP with + enforced constraints, when those tables have data (SQLite will still + happily let you create foreign keys to nonexistent tables and drop + tables referring to existing ones with constraints enabled, as long as + there's no data being referenced). + + In order to maintain the new feature of :ticket:`3282` while still + allowing a SQLite DROP operation to maintain ordering, we now + do the sort with full FKs taken under consideration, and if we encounter + an unresolvable cycle, only *then* do we forego attempting to sort + the tables; we instead emit a warning and go with the unsorted list. + If an environment needs both ordered DROPs *and* has foreign key + cycles, then the warning notes they will need to restore the + ``use_alter`` flag to their :class:`.ForeignKey` and + :class:`.ForeignKeyConstraint` objects so that just those objects will + be omitted from the dependency sort. + + .. seealso:: + + :ref:`feature_3282` - contains an updated note about SQLite. + + .. change:: + :tags: bug, sql + :tickets: 3372 + + Fixed issue where a straight SELECT EXISTS query would fail to + assign the proper result type of Boolean to the result mapping, and + instead would leak column types from within the query into the + result map. This issue exists in 0.9 and earlier as well, however + has less of an impact in those versions. In 1.0, due to :ticket:`918` + this becomes a regression in that we now rely upon the result mapping + to be very accurate, else we can assign result-type processors to + the wrong column. In all versions, this issue also has the effect + that a simple EXISTS will not apply the Boolean type handler, leading + to simple 1/0 values for backends without native boolean instead of + True/False. The fix includes that an EXISTS columns argument + will be anon-labeled like other column expressions; a similar fix is + implemented for pure-boolean expressions like ``not_(True())``. + + .. change:: + :tags: bug, orm + :tickets: 3374 + + Fixed issue where a query of the form + ``query(B).filter(B.a != A(id=7))`` would render the ``NEVER_SET`` + symbol, when + given a transient object. For a persistent object, it would + always use the persisted database value and not the currently + set value. Assuming autoflush is turned on, this usually would + not be apparent for persistent values, as any pending changes + would be flushed first in any case. However, this is inconsistent + vs. the logic used for the non-negated comparison, + ``query(B).filter(B.a == A(id=7))``, which does use the + current value and additionally allows comparisons to transient + objects. The comparison now uses the current value and not + the database-persisted value. + + Unlike the other ``NEVER_SET`` issues that are repaired as regressions + caused by :ticket:`3061` in this release, this particular issue is + present at least as far back as 0.8 and possibly earlier, however it + was discovered as a result of repairing the related ``NEVER_SET`` + issues. + + .. seealso:: + + :ref:`bug_3374` + + .. change:: + :tags: bug, orm + :tickets: 3371 + + Fixed unexpected use regression cause by :ticket:`3061` where + the NEVER_SET + symbol could leak into relationship-oriented queries, including + ``filter()`` and ``with_parent()`` queries. The ``None`` symbol + is returned in all cases, however many of these queries have never + been correctly supported in any case, and produce comparisons + to NULL without using the IS operator. For this reason, a warning + is also added to that subset of relationship queries that don't + currently provide for ``IS NULL``. + + .. seealso:: + + :ref:`bug_3371` + + + .. change:: + :tags: bug, orm + :tickets: 3368 + + Fixed a regression caused by :ticket:`3061` where the + NEVER_SET symbol could leak into a lazyload query, subsequent + to the flush of a pending object. This would occur typically + for a many-to-one relationship that does not use a simple + "get" strategy. The good news is that the fix improves efficiency + vs. 0.9, because we can now skip the SELECT statement entirely + when we detect NEVER_SET symbols present in the parameters; prior to + :ticket:`3061`, we couldn't discern if the None here were set or not. + + +.. changelog:: + :version: 1.0.0 + :released: April 16, 2015 + + .. change:: + :tags: bug, orm + :tickets: 3367 + + Identified an inconsistency when handling :meth:`.Query.join` to the + same target more than once; it implicitly dedupes only in the case of + a relationship join, and due to :ticket:`3233`, in 1.0 a join + to the same table twice behaves differently than 0.9 in that it no + longer erroneously aliases. To help document this change, + the verbiage regarding :ticket:`3233` in the migration notes has + been generalized, and a warning has been added when :meth:`.Query.join` + is called against the same target relationship more than once. + + .. change:: + :tags: bug, orm + :tickets: 3364 + + Made a small improvement to the heuristics of relationship when + determining remote side with semi-self-referential (e.g. two joined + inh subclasses referring to each other), non-simple join conditions + such that the parententity is taken into account and can reduce the + need for using the ``remote()`` annotation; this can restore some + cases that might have worked without the annotation prior to 0.9.4 + via :ticket:`2948`. + + .. change:: + :tags: bug, mssql + :tickets: 3360 + + Fixed a regression where the "last inserted id" mechanics would + fail to store the correct value for MSSQL on an INSERT where the + primary key value was present in the insert params before execution, + as well as in the case where an INSERT from SELECT would state the + target columns as column objects, instead of string keys. + + + .. change:: + :tags: bug, mssql + :pullreq: github:166 + + Using the ``Binary`` constructor now present in pymssql rather than + patching one in. Pull request courtesy Ramiro Morales. + + .. change:: + :tags: bug, tests + :tickets: 3356 + + Fixed the pathing used when tests run; for sqla_nose.py and py.test, + the "./lib" prefix is again inserted at the head of sys.path but + only if sys.flags.no_user_site isn't set; this makes it act just + like the way Python puts "." in the current path by default. + For tox, we are setting the PYTHONNOUSERSITE flag now. + + .. change:: + :tags: feature, sql + :tickets: 3084 + :pullreq: bitbucket:47 + + The topological sorting used to sort :class:`.Table` objects + and available via the :attr:`.MetaData.sorted_tables` collection + will now produce a **deterministic** ordering; that is, the same + ordering each time given a set of tables with particular names + and dependencies. This is to help with comparison of DDL scripts + and other use cases. The tables are sent to the topological sort + sorted by name, and the topological sort itself will process + the incoming data in an ordered fashion. Pull request + courtesy Sebastian Bank. + + .. seealso:: + + :ref:`feature_3084` + + .. change:: + :tags: feature, orm + :pullreq: github:164 + + Added new argument :paramref:`.Query.update.update_args` which allows + kw arguments such as ``mysql_limit`` to be passed to the underlying + :class:`.Update` construct. Pull request courtesy Amir Sadoughi. + +.. changelog:: :version: 1.0.0b5 + :released: April 3, 2015 .. change:: :tags: bug, orm @@ -90,6 +904,9 @@ GROUP BY expressions. The flag is also turned off defensively for the Firebird and Sybase dialects. + .. note:: this resolution was incorrect, please see version 1.0.2 + for a rework of this resolution. + .. change:: :tags: feature, schema :tickets: 3341 @@ -102,6 +919,10 @@ same time the columns are associated with the table. This in particular helps in some edge cases in declarative but is also of general use. + .. seealso:: + + :ref:`change_3341` + .. change:: :tags: bug, sql :tickets: 3340 @@ -150,7 +971,7 @@ courtesy Thomas Grainger. .. change:: - :tags: change, ext, declarative + :tags: change, orm, declarative :tickets: 3331 Loosened some restrictions that were added to ``@declared_attr`` @@ -186,8 +1007,8 @@ :tickets: 3327 :pullreq: github:160 - Fixed 1.0 regression from pullreq github:137 where Py2K unicode - literals (e.g. ``u""``) would not be accepted by the + Fixed unexpected use regression from pullreq github:137 where + Py2K unicode literals (e.g. ``u""``) would not be accepted by the :paramref:`.relationship.cascade` option. Pull request courtesy Julien Castets. @@ -202,7 +1023,7 @@ on compatibility concerns, see :doc:`/changelog/migration_10`. .. change:: - :tags: feature, extensions + :tags: feature, ext :tickets: 3054 Added a new extension suite :mod:`sqlalchemy.ext.baked`. This @@ -277,7 +1098,7 @@ continued after the error raise occurred. .. change:: - :tags: bug, ext + :tags: bug, orm, declarative :tickets: 3219, 3240 Fixed bug where using an ``__abstract__`` mixin in the middle @@ -1030,7 +1851,7 @@ all transactional status and operations. .. change:: - :tags: bug, declarative + :tags: bug, orm, declarative :tickets: 2670 A relationship set up with :class:`.declared_attr` on @@ -1043,7 +1864,7 @@ :ref:`feature_3150` .. change:: - :tags: feature, declarative + :tags: feature, orm, declarative :tickets: 3150 The :class:`.declared_attr` construct has newly improved diff --git a/doc/build/changelog/migration_09.rst b/doc/build/changelog/migration_09.rst index 4904dcfdf..b07aed925 100644 --- a/doc/build/changelog/migration_09.rst +++ b/doc/build/changelog/migration_09.rst @@ -9,7 +9,7 @@ What's New in SQLAlchemy 0.9? and SQLAlchemy version 0.9, which had its first production release on December 30, 2013. - Document last updated: February 28, 2014 + Document last updated: June 10, 2015 Introduction ============ @@ -402,6 +402,70 @@ This is a small change demonstrated as follows:: Behavioral Changes - Core ========================= +Type objects no longer accept ignored keyword arguments +------------------------------------------------------- + +Up through the 0.8 series, most type objects accepted arbitrary keyword +arguments which were silently ignored:: + + from sqlalchemy import Date, Integer + + # storage_format argument here has no effect on any backend; + # it needs to be on the SQLite-specific type + d = Date(storage_format="%(day)02d.%(month)02d.%(year)04d") + + # display_width argument here has no effect on any backend; + # it needs to be on the MySQL-specific type + i = Integer(display_width=5) + +This was a very old bug for which a deprecation warning was added to the +0.8 series, but because nobody ever runs Python with the "-W" flag, it +was mostly never seen:: + + + $ python -W always::DeprecationWarning ~/dev/sqlalchemy/test.py + /Users/classic/dev/sqlalchemy/test.py:5: SADeprecationWarning: Passing arguments to + type object constructor <class 'sqlalchemy.types.Date'> is deprecated + d = Date(storage_format="%(day)02d.%(month)02d.%(year)04d") + /Users/classic/dev/sqlalchemy/test.py:9: SADeprecationWarning: Passing arguments to + type object constructor <class 'sqlalchemy.types.Integer'> is deprecated + i = Integer(display_width=5) + +As of the 0.9 series the "catch all" constructor is removed from +:class:`.TypeEngine`, and these meaningless arguments are no longer accepted. + +The correct way to make use of dialect-specific arguments such as +``storage_format`` and ``display_width`` is to use the appropriate +dialect-specific types:: + + from sqlalchemy.dialects.sqlite import DATE + from sqlalchemy.dialects.mysql import INTEGER + + d = DATE(storage_format="%(day)02d.%(month)02d.%(year)04d") + + i = INTEGER(display_width=5) + +What about the case where we want the dialect-agnostic type also? We +use the :meth:`.TypeEngine.with_variant` method:: + + from sqlalchemy import Date, Integer + from sqlalchemy.dialects.sqlite import DATE + from sqlalchemy.dialects.mysql import INTEGER + + d = Date().with_variant( + DATE(storage_format="%(day)02d.%(month)02d.%(year)04d"), + "sqlite" + ) + + i = Integer().with_variant( + INTEGER(display_width=5), + "mysql" + ) + +:meth:`.TypeEngine.with_variant` isn't new, it was added in SQLAlchemy +0.7.2. So code that is running on the 0.8 series can be corrected to use +this approach and tested before upgrading to 0.9. + ``None`` can no longer be used as a "partial AND" constructor -------------------------------------------------------------- diff --git a/doc/build/changelog/migration_10.rst b/doc/build/changelog/migration_10.rst index f4ead01aa..08e26fd4b 100644 --- a/doc/build/changelog/migration_10.rst +++ b/doc/build/changelog/migration_10.rst @@ -6,9 +6,9 @@ What's New in SQLAlchemy 1.0? This document describes changes between SQLAlchemy version 0.9, undergoing maintenance releases as of May, 2014, - and SQLAlchemy version 1.0, as of yet unreleased. + and SQLAlchemy version 1.0, released in April, 2015. - Document last updated: March 17, 2015 + Document last updated: June 9, 2015 Introduction ============ @@ -609,8 +609,8 @@ than the integer value. .. _feature_3282: -The ``use_alter`` flag on ``ForeignKeyConstraint`` is no longer needed ----------------------------------------------------------------------- +The ``use_alter`` flag on ``ForeignKeyConstraint`` is (usually) no longer needed +-------------------------------------------------------------------------------- The :meth:`.MetaData.create_all` and :meth:`.MetaData.drop_all` methods will now make use of a system that automatically renders an ALTER statement @@ -629,6 +629,16 @@ The :paramref:`.ForeignKeyConstraint.use_alter` and the same effect of establishing those constraints for which ALTER is required during a CREATE/DROP scenario. +As of version 1.0.1, special logic takes over in the case of SQLite, which +does not support ALTER, in the case that during a DROP, the given tables have +an unresolvable cycle; in this case a warning is emitted, and the tables +are dropped with **no** ordering, which is usually fine on SQLite unless +constraints are enabled. To resolve the warning and proceed with at least +a partial ordering on a SQLite database, particuarly one where constraints +are enabled, re-apply "use_alter" flags to those +:class:`.ForeignKey` and :class:`.ForeignKeyConstraint` objects which should +be explicitly omitted from the sort. + .. seealso:: :ref:`use_alter` - full description of the new behavior. @@ -723,6 +733,95 @@ now make use of all CHECK constraint conventions. :ticket:`3299` +.. _change_3341: + +Constraints referring to unattached Columns can auto-attach to the Table when their referred columns are attached +----------------------------------------------------------------------------------------------------------------- + +Since at least version 0.8, a :class:`.Constraint` has had the ability to +"auto-attach" itself to a :class:`.Table` based on being passed table-attached columns:: + + from sqlalchemy import Table, Column, MetaData, Integer, UniqueConstraint + + m = MetaData() + + t = Table('t', m, + Column('a', Integer), + Column('b', Integer) + ) + + uq = UniqueConstraint(t.c.a, t.c.b) # will auto-attach to Table + + assert uq in t.constraints + +In order to assist with some cases that tend to come up with declarative, +this same auto-attachment logic can now function even if the :class:`.Column` +objects are not yet associated with the :class:`.Table`; additional events +are established such that when those :class:`.Column` objects are associated, +the :class:`.Constraint` is also added:: + + from sqlalchemy import Table, Column, MetaData, Integer, UniqueConstraint + + m = MetaData() + + a = Column('a', Integer) + b = Column('b', Integer) + + uq = UniqueConstraint(a, b) + + t = Table('t', m, a, b) + + assert uq in t.constraints # constraint auto-attached + +The above feature was a late add as of version 1.0.0b3. A fix as of +version 1.0.4 for :ticket:`3411` ensures that this logic +does not occur if the :class:`.Constraint` refers to a mixture of +:class:`.Column` objects and string column names; as we do not yet have +tracking for the addition of names to a :class:`.Table`:: + + from sqlalchemy import Table, Column, MetaData, Integer, UniqueConstraint + + m = MetaData() + + a = Column('a', Integer) + b = Column('b', Integer) + + uq = UniqueConstraint(a, 'b') + + t = Table('t', m, a, b) + + # constraint *not* auto-attached, as we do not have tracking + # to locate when a name 'b' becomes available on the table + assert uq not in t.constraints + +Above, the attachment event for column "a" to table "t" will fire off before +column "b" is attached (as "a" is stated in the :class:`.Table` constructor +before "b"), and the constraint will fail to locate "b" if it were to attempt +an attachment. For consistency, if the constraint refers to any string names, +the autoattach-on-column-attach logic is skipped. + +The original auto-attach logic of course remains in place, if the :class:`.Table` +already contains all the target :class:`.Column` objects at the time +the :class:`.Constraint` is constructed:: + + from sqlalchemy import Table, Column, MetaData, Integer, UniqueConstraint + + m = MetaData() + + a = Column('a', Integer) + b = Column('b', Integer) + + + t = Table('t', m, a, b) + + uq = UniqueConstraint(a, 'b') + + # constraint auto-attached normally as in older versions + assert uq in t.constraints + + +:ticket:`3341` +:ticket:`3411` .. _change_2051: @@ -955,6 +1054,117 @@ to by string name as well:: :ticket:`3228` +.. _bug_3371: + +Warnings emitted when comparing objects with None values to relationships +------------------------------------------------------------------------- + +This change is new as of 1.0.1. Some users are performing +queries that are essentially of this form:: + + session.query(Address).filter(Address.user == User(id=None)) + +This pattern is not currently supported in SQLAlchemy. For all versions, +it emits SQL resembling:: + + SELECT address.id AS address_id, address.user_id AS address_user_id, + address.email_address AS address_email_address + FROM address WHERE ? = address.user_id + (None,) + +Note above, there is a comparison ``WHERE ? = address.user_id`` where the +bound value ``?`` is receving ``None``, or ``NULL`` in SQL. **This will +always return False in SQL**. The comparison here would in theory +generate SQL as follows:: + + SELECT address.id AS address_id, address.user_id AS address_user_id, + address.email_address AS address_email_address + FROM address WHERE address.user_id IS NULL + +But right now, **it does not**. Applications which are relying upon the +fact that "NULL = NULL" produces False in all cases run the risk that +someday, SQLAlchemy might fix this issue to generate "IS NULL", and the queries +will then produce different results. Therefore with this kind of operation, +you will see a warning:: + + SAWarning: Got None for value of column user.id; this is unsupported + for a relationship comparison and will not currently produce an + IS comparison (but may in a future release) + +Note that this pattern was broken in most cases for release 1.0.0 including +all of the betas; a value like ``SYMBOL('NEVER_SET')`` would be generated. +This issue has been fixed, but as a result of identifying this pattern, +the warning is now there so that we can more safely repair this broken +behavior (now captured in :ticket:`3373`) in a future release. + +:ticket:`3371` + +.. _bug_3374: + +A "negated contains or equals" relationship comparison will use the current value of attributes, not the database value +------------------------------------------------------------------------------------------------------------------------- + +This change is new as of 1.0.1; while we would have preferred for this to be in 1.0.0, +it only became apparent as a result of :ticket:`3371`. + +Given a mapping:: + + class A(Base): + __tablename__ = 'a' + id = Column(Integer, primary_key=True) + + class B(Base): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + a_id = Column(ForeignKey('a.id')) + a = relationship("A") + +Given ``A``, with primary key of 7, but which we changed to be 10 +without flushing:: + + s = Session(autoflush=False) + a1 = A(id=7) + s.add(a1) + s.commit() + + a1.id = 10 + +A query against a many-to-one relationship with this object as the target +will use the value 10 in the bound parameters:: + + s.query(B).filter(B.a == a1) + +Produces:: + + SELECT b.id AS b_id, b.a_id AS b_a_id + FROM b + WHERE ? = b.a_id + (10,) + +However, before this change, the negation of this criteria would **not** use +10, it would use 7, unless the object were flushed first:: + + s.query(B).filter(B.a != a1) + +Produces (in 0.9 and all versions prior to 1.0.1):: + + SELECT b.id AS b_id, b.a_id AS b_a_id + FROM b + WHERE b.a_id != ? OR b.a_id IS NULL + (7,) + +For a transient object, it would produce a broken query:: + + SELECT b.id, b.a_id + FROM b + WHERE b.a_id != :a_id_1 OR b.a_id IS NULL + {u'a_id_1': symbol('NEVER_SET')} + +This inconsistency has been repaired, and in all queries the current attribute +value, in this example ``10``, will now be used. + +:ticket:`3374` + .. _migration_3061: Changes to attribute events and other operations regarding attributes that have no pre-existing value @@ -1014,7 +1224,8 @@ INSERT statement in relational databases considers a missing value to be the same as NULL in most cases. Whether SQLAlchemy received a history event for a particular attribute set to None or not would usually not matter; as the difference between sending None/NULL or not wouldn't have an impact. -However, as :ticket:`3060` illustrates, there are some seldom edge cases +However, as :ticket:`3060` (described here in :ref:`migration_3060`) +illustrates, there are some seldom edge cases where we do in fact want to positively have ``None`` set. Also, allowing the attribute event here means it's now possible to create "default value" functions for ORM mapped attributes. @@ -1032,6 +1243,58 @@ symbol, and no change to the object's state occurs. :ticket:`3061` +.. _migration_3060: + +Priority of attribute changes on relationship-bound attributes vs. FK-bound may appear to change +------------------------------------------------------------------------------------------------ + +As a side effect of :ticket:`3060`, setting a relationship-bound attribute to ``None`` +is now a tracked history event which refers to the intention of persisting +``None`` to that attribute. As it has always been the case that setting a +relationship-bound attribute will trump direct assignment to the foreign key +attributes, a change in behavior can be seen here when assigning None. +Given a mapping:: + + class A(Base): + __tablename__ = 'table_a' + + id = Column(Integer, primary_key=True) + + class B(Base): + __tablename__ = 'table_b' + + id = Column(Integer, primary_key=True) + a_id = Column(ForeignKey('table_a.id')) + a = relationship(A) + +In 1.0, the relationship-bound attribute takes precedence over the FK-bound +attribute in all cases, whether or not +the value we assign is a reference to an ``A`` object or is ``None``. +In 0.9, the behavior is inconsistent and +only takes effect if a value is assigned; the None is not considered:: + + a1 = A(id=1) + a2 = A(id=2) + session.add_all([a1, a2]) + session.flush() + + b1 = B() + b1.a = a1 # we expect a_id to be '1'; takes precedence in 0.9 and 1.0 + + b2 = B() + b2.a = None # we expect a_id to be None; takes precedence only in 1.0 + + b1.a_id = 2 + b2.a_id = 2 + + session.add_all([b1, b2]) + session.commit() + + assert b1.a is a1 # passes in both 0.9 and 1.0 + assert b2.a is None # passes in 1.0, in 0.9 it's a2 + +:ticket:`3060` + .. _bug_3139: session.expunge() will fully detach an object that's been deleted @@ -1092,18 +1355,83 @@ joined loader options can still be used:: .. _bug_3233: -Single inheritance join targets will no longer sometimes implicitly alias themselves ------------------------------------------------------------------------------------- +Changes and fixes in handling of duplicate join targets +-------------------------------------------------------- -This is a bug where an unexpected and inconsistent behavior would occur -in some scenarios when joining to a single-table-inheritance entity. The -difficulty this might cause is that the query is supposed to raise an error, -as it is invalid SQL, however the bug would cause an alias to be added which -makes the query "work". The issue is confusing because this aliasing -is not applied consistently and could change based on the nature of the query -preceding the join. +Changes here encompass bugs where an unexpected and inconsistent +behavior would occur in some scenarios when joining to an entity +twice, or to multple single-table entities against the same table, +without using a relationship-based ON clause, as well as when joining +multiple times to the same target relationship. -A simple example is:: +Starting with a mapping as:: + + from sqlalchemy import Integer, Column, String, ForeignKey + from sqlalchemy.orm import Session, relationship + from sqlalchemy.ext.declarative import declarative_base + + Base = declarative_base() + + class A(Base): + __tablename__ = 'a' + id = Column(Integer, primary_key=True) + bs = relationship("B") + + class B(Base): + __tablename__ = 'b' + id = Column(Integer, primary_key=True) + a_id = Column(ForeignKey('a.id')) + +A query that joins to ``A.bs`` twice:: + + print s.query(A).join(A.bs).join(A.bs) + +Will render:: + + SELECT a.id AS a_id + FROM a JOIN b ON a.id = b.a_id + +The query deduplicates the redundant ``A.bs`` because it is attempting +to support a case like the following:: + + s.query(A).join(A.bs).\ + filter(B.foo == 'bar').\ + reset_joinpoint().join(A.bs, B.cs).filter(C.bar == 'bat') + +That is, the ``A.bs`` is part of a "path". As part of :ticket:`3367`, +arriving at the same endpoint twice without it being part of a +larger path will now emit a warning:: + + SAWarning: Pathed join target A.bs has already been joined to; skipping + +The bigger change involves when joining to an entity without using a +relationship-bound path. If we join to ``B`` twice:: + + print s.query(A).join(B, B.a_id == A.id).join(B, B.a_id == A.id) + +In 0.9, this would render as follows:: + + SELECT a.id AS a_id + FROM a JOIN b ON b.a_id = a.id JOIN b AS b_1 ON b_1.a_id = a.id + +This is problematic since the aliasing is implicit and in the case of different +ON clauses can lead to unpredictable results. + +In 1.0, no automatic aliasing is applied and we get:: + + SELECT a.id AS a_id + FROM a JOIN b ON b.a_id = a.id JOIN b ON b.a_id = a.id + +This will raise an error from the database. While it might be nice if +the "duplicate join target" acted identically if we joined both from +redundant relationships vs. redundant non-relationship based targets, +for now we are only changing the behavior in the more serious case where +implicit aliasing would have occurred previously, and only emitting a warning +in the relationship case. Ultimately, joining to the same thing twice without +any aliasing to disambiguate should raise an error in all cases. + +The change also has an impact on single-table inheritance targets. Using +a mapping as follows:: from sqlalchemy import Integer, Column, String, ForeignKey from sqlalchemy.orm import Session, relationship @@ -1151,7 +1479,8 @@ the identical SQL:: WHERE a.type IN (:type_2) The above SQL is invalid, as it renders "a" within the FROM list twice. -The bug however would occur with the second query only and render this instead:: +However, the implicit aliasing bug would occur with the second query only +and render this instead:: SELECT a.id AS a_id, a.type AS a_type FROM a JOIN b ON b.a_id = a.id JOIN a AS a_1 @@ -1173,6 +1502,7 @@ as all the subclasses normally refer to the same table:: print s.query(ASub1).join(B, ASub1.b).join(asub2_alias, B.a.of_type(asub2_alias)) :ticket:`3233` +:ticket:`3367` Deferred Columns No Longer Implicitly Undefer @@ -1290,7 +1620,7 @@ join into a subquery as a join target on SQLite. :ticket:`3008` -.. _change_3429: +.. _change_3249: Subqueries no longer applied to uselist=False joined eager loads ---------------------------------------------------------------- @@ -1339,6 +1669,20 @@ has always emitted a warning here and ignored addtional results for :ticket:`3249` +query.update() / query.delete() raises if used with join(), select_from(), from_self() +-------------------------------------------------------------------------------------- + +A warning is emitted in SQLAlchemy 0.9.10 (not yet released as of +June 9, 2015) when the :meth:`.Query.update` or :meth:`.Query.delete` methods +are invoked against a query which has also called upon :meth:`.Query.join`, +:meth:`.Query.outerjoin`, +:meth:`.Query.select_from` or :meth:`.Query.from_self`. These are unsupported +use cases which silently fail in the 0.9 series up until 0.9.10 where it emits +a warning. In 1.0, these cases raise an exception. + +:ticket:`3349` + + query.update() with ``synchronize_session='evaluate'`` raises on multi-table update ----------------------------------------------------------------------------------- @@ -1810,6 +2154,22 @@ columns regardless of how the object was constructed or its current state. +.. _feature_3084: + +MetaData.sorted_tables accessor is "deterministic" +----------------------------------------------------- + +The sorting of tables resulting from the :attr:`.MetaData.sorted_tables` +accessor is "deterministic"; the ordering should be the same in all cases +regardless of Python hashing. This is done by first sorting the tables +by name before passing them to the topological algorithm, which maintains +that ordering as it iterates. + +Note that this change does **not** yet apply to the ordering applied +when emitting :meth:`.MetaData.create_all` or :meth:`.MetaData.drop_all`. + +:ticket:`3084` + .. _bug_3170: null(), false() and true() constants are no longer singletons diff --git a/doc/build/conf.py b/doc/build/conf.py index 35204b1b9..fa9be2d25 100644 --- a/doc/build/conf.py +++ b/doc/build/conf.py @@ -13,6 +13,25 @@ import sys import os +import traceback + +def force_install_reqs(): + import logging + + log = logging.getLogger("pip") + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter(logging.Formatter("[pip] %(message)s")) + log.addHandler(handler) + log.setLevel(logging.INFO) + + log.info("READTHEDOCS is set, force-installing requirements.txt") + + from pip.commands import install + req = os.path.join(os.path.dirname(__file__), "requirements.txt") + cmd = install.InstallCommand() + options, args = cmd.parse_args(["-v", "-U", "-r", req]) + cmd.run(options, args) + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -23,6 +42,19 @@ sys.path.insert(0, os.path.abspath('.')) import sqlalchemy +# attempt to force pip to definitely get the latest +# versions of libraries, see +# https://github.com/rtfd/readthedocs.org/issues/1293 +rtd = os.environ.get('READTHEDOCS', None) == 'True' +if rtd: + try: + force_install_reqs() + except: + traceback.print_exc() + + + + # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. @@ -106,9 +138,9 @@ copyright = u'2007-2015, the SQLAlchemy authors and contributors' # The short X.Y version. version = "1.0" # The full version, including alpha/beta/rc tags. -release = "1.0.0b4" +release = "1.0.6" -release_date = "March 29, 2015" +release_date = "June 25, 2015" site_base = os.environ.get("RTD_SITE_BASE", "http://www.sqlalchemy.org") site_adapter_template = "docs_adapter.mako" diff --git a/doc/build/core/connections.rst b/doc/build/core/connections.rst index b6770bb82..72e1d6a61 100644 --- a/doc/build/core/connections.rst +++ b/doc/build/core/connections.rst @@ -368,6 +368,74 @@ the SQL statement. When the :class:`.ResultProxy` is closed, the underlying :class:`.Connection` is closed for us, resulting in the DBAPI connection being returned to the pool with transactional resources removed. +.. _engine_disposal: + +Engine Disposal +=============== + +The :class:`.Engine` refers to a connection pool, which means under normal +circumstances, there are open database connections present while the +:class:`.Engine` object is still resident in memory. When an :class:`.Engine` +is garbage collected, its connection pool is no longer referred to by +that :class:`.Engine`, and assuming none of its connections are still checked +out, the pool and its connections will also be garbage collected, which has the +effect of closing out the actual database connections as well. But otherwise, +the :class:`.Engine` will hold onto open database connections assuming +it uses the normally default pool implementation of :class:`.QueuePool`. + +The :class:`.Engine` is intended to normally be a permanent +fixture established up-front and maintained throughout the lifespan of an +application. It is **not** intended to be created and disposed on a +per-connection basis; it is instead a registry that maintains both a pool +of connections as well as configurational information about the database +and DBAPI in use, as well as some degree of internal caching of per-database +resources. + +However, there are many cases where it is desirable that all connection resources +referred to by the :class:`.Engine` be completely closed out. It's +generally not a good idea to rely on Python garbage collection for this +to occur for these cases; instead, the :class:`.Engine` can be explicitly disposed using +the :meth:`.Engine.dispose` method. This disposes of the engine's +underlying connection pool and replaces it with a new one that's empty. +Provided that the :class:`.Engine` +is discarded at this point and no longer used, all **checked-in** connections +which it refers to will also be fully closed. + +Valid use cases for calling :meth:`.Engine.dispose` include: + +* When a program wants to release any remaining checked-in connections + held by the connection pool and expects to no longer be connected + to that database at all for any future operations. + +* When a program uses multiprocessing or ``fork()``, and an + :class:`.Engine` object is copied to the child process, + :meth:`.Engine.dispose` should be called so that the engine creates + brand new database connections local to that fork. Database connections + generally do **not** travel across process boundaries. + +* Within test suites or multitenancy scenarios where many + ad-hoc, short-lived :class:`.Engine` objects may be created and disposed. + + +Connections that are **checked out** are **not** discarded when the +engine is disposed or garbage collected, as these connections are still +strongly referenced elsewhere by the application. +However, after :meth:`.Engine.dispose` is called, those +connections are no longer associated with that :class:`.Engine`; when they +are closed, they will be returned to their now-orphaned connection pool +which will ultimately be garbage collected, once all connections which refer +to it are also no longer referenced anywhere. +Since this process is not easy to control, it is strongly recommended that +:meth:`.Engine.dispose` is called only after all checked out connections +are checked in or otherwise de-associated from their pool. + +An alternative for applications that are negatively impacted by the +:class:`.Engine` object's use of connection pooling is to disable pooling +entirely. This typically incurs only a modest performance impact upon the +use of new connections, and means that when a connection is checked in, +it is entirely closed out and is not held in memory. See :ref:`pool_switching` +for guidelines on how to disable pooling. + .. _threadlocal_strategy: Using the Threadlocal Execution Strategy diff --git a/doc/build/core/defaults.rst b/doc/build/core/defaults.rst index 1d55cd6c6..4166ac449 100644 --- a/doc/build/core/defaults.rst +++ b/doc/build/core/defaults.rst @@ -325,6 +325,46 @@ executed standalone like a SQL expression, which has the effect of calling its seq = Sequence('some_sequence') nextid = connection.execute(seq) +Associating a Sequence as the Server Side Default +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When we associate a :class:`.Sequence` with a :class:`.Column` as above, +this association is an **in-Python only** association. The CREATE TABLE +that would be generated for our :class:`.Table` would not refer to this +sequence. If we want the sequence to be used as a server-side default, +meaning it takes place even if we emit INSERT commands to the table from +the SQL commandline, we can use the :paramref:`.Column.server_default` +parameter in conjunction with the value-generation function of the +sequence, available from the :meth:`.Sequence.next_value` method:: + + cart_id_seq = Sequence('cart_id_seq') + table = Table("cartitems", meta, + Column( + "cart_id", Integer, cart_id_seq, + server_default=cart_id_seq.next_value(), primary_key=True), + Column("description", String(40)), + Column("createdate", DateTime()) + ) + +The above metadata will generate a CREATE TABLE statement on Postgresql as:: + + CREATE TABLE cartitems ( + cart_id INTEGER DEFAULT nextval('cart_id_seq') NOT NULL, + description VARCHAR(40), + createdate TIMESTAMP WITHOUT TIME ZONE, + PRIMARY KEY (cart_id) + ) + +We place the :class:`.Sequence` also as a Python-side default above, that +is, it is mentioned twice in the :class:`.Column` definition. Depending +on the backend in use, this may not be strictly necessary, for example +on the Postgresql backend the Core will use ``RETURNING`` to access the +newly generated primary key value in any case. However, for the best +compatibility, :class:`.Sequence` was originally intended to be a Python-side +directive first and foremost so it's probably a good idea to specify it +in this way as well. + + Default Objects API ------------------- diff --git a/doc/build/core/metadata.rst b/doc/build/core/metadata.rst index e46217c17..c04de158b 100644 --- a/doc/build/core/metadata.rst +++ b/doc/build/core/metadata.rst @@ -30,10 +30,10 @@ The remaining positional arguments are mostly :class:`~sqlalchemy.schema.Column` objects describing each column:: user = Table('user', metadata, - Column('user_id', Integer, primary_key = True), - Column('user_name', String(16), nullable = False), + Column('user_id', Integer, primary_key=True), + Column('user_name', String(16), nullable=False), Column('email_address', String(60)), - Column('password', String(20), nullable = False) + Column('password', String(20), nullable=False) ) Above, a table called ``user`` is described, which contains four columns. The @@ -151,10 +151,10 @@ will issue the CREATE statements: metadata = MetaData() user = Table('user', metadata, - Column('user_id', Integer, primary_key = True), - Column('user_name', String(16), nullable = False), + Column('user_id', Integer, primary_key=True), + Column('user_name', String(16), nullable=False), Column('email_address', String(60), key='email'), - Column('password', String(20), nullable = False) + Column('password', String(20), nullable=False) ) user_prefs = Table('user_prefs', metadata, @@ -291,7 +291,7 @@ example, MySQL has different table backend types, including "MyISAM" and ``mysql_engine``:: addresses = Table('engine_email_addresses', meta, - Column('address_id', Integer, primary_key = True), + Column('address_id', Integer, primary_key=True), Column('remote_user_id', Integer, ForeignKey(users.c.user_id)), Column('email_address', String(20)), mysql_engine='InnoDB' diff --git a/doc/build/core/pooling.rst b/doc/build/core/pooling.rst index 0dbf835d9..ce6d443f9 100644 --- a/doc/build/core/pooling.rst +++ b/doc/build/core/pooling.rst @@ -56,6 +56,8 @@ queued up - the pool would only grow to that size if the application actually used five connections concurrently, in which case the usage of a small pool is an entirely appropriate default behavior. +.. _pool_switching: + Switching Pool Implementations ------------------------------ diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst index b4f185ac2..cc2a97625 100644 --- a/doc/build/core/tutorial.rst +++ b/doc/build/core/tutorial.rst @@ -364,6 +364,10 @@ statement is compiled against the **first** dictionary in the list, and it's assumed that all subsequent argument dictionaries are compatible with that statement. +The "executemany" style of invocation is available for each of the +:func:`.insert`, :func:`.update` and :func:`.delete` constructs. + + .. _coretutorial_selecting: Selecting @@ -1754,7 +1758,7 @@ that can be specified: COMMIT {stop}<sqlalchemy.engine.result.ResultProxy object at 0x...> -When using :meth:`~.TableClause.update` in an "execute many" context, +When using :meth:`~.TableClause.update` in an "executemany" context, we may wish to also use explicitly named bound parameters in the WHERE clause. Again, :func:`~.expression.bindparam` is the construct used to achieve this: diff --git a/doc/build/faq/sessions.rst b/doc/build/faq/sessions.rst index 300b4bdbc..e3aae00ce 100644 --- a/doc/build/faq/sessions.rst +++ b/doc/build/faq/sessions.rst @@ -7,6 +7,72 @@ Sessions / Queries :backlinks: none +I'm re-loading data with my Session but it isn't seeing changes that I committed elsewhere +------------------------------------------------------------------------------------------ + +The main issue regarding this behavior is that the session acts as though +the transaction is in the *serializable* isolation state, even if it's not +(and it usually is not). In practical terms, this means that the session +does not alter any data that it's already read within the scope of a transaction. + +If the term "isolation level" is unfamiliar, then you first need to read this link: + +`Isolation Level <https://en.wikipedia.org/wiki/Isolation_%28database_systems%29>`_ + +In short, serializable isolation level generally means +that once you SELECT a series of rows in a transaction, you will get +*the identical data* back each time you re-emit that SELECT. If you are in +the next-lower isolation level, "repeatable read", you'll +see newly added rows (and no longer see deleted rows), but for rows that +you've *already* loaded, you won't see any change. Only if you are in a +lower isolation level, e.g. "read committed", does it become possible to +see a row of data change its value. + +For information on controlling the isolation level when using the +SQLAlchemy ORM, see :ref:`session_transaction_isolation`. + +To simplify things dramatically, the :class:`.Session` itself works in +terms of a completely isolated transaction, and doesn't overwrite any mapped attributes +it's already read unless you tell it to. The use case of trying to re-read +data you've already loaded in an ongoing transaction is an *uncommon* use +case that in many cases has no effect, so this is considered to be the +exception, not the norm; to work within this exception, several methods +are provided to allow specific data to be reloaded within the context +of an ongoing transaction. + +To understand what we mean by "the transaction" when we talk about the +:class:`.Session`, your :class:`.Session` is intended to only work within +a transaction. An overview of this is at :ref:`unitofwork_transaction`. + +Once we've figured out what our isolation level is, and we think that +our isolation level is set at a low enough level so that if we re-SELECT a row, +we should see new data in our :class:`.Session`, how do we see it? + +Three ways, from most common to least: + +1. We simply end our transaction and start a new one on next access + with our :class:`.Session` by calling :meth:`.Session.commit` (note + that if the :class:`.Session` is in the lesser-used "autocommit" + mode, there would be a call to :meth:`.Session.begin` as well). The + vast majority of applications and use cases do not have any issues + with not being able to "see" data in other transactions because + they stick to this pattern, which is at the core of the best practice of + **short lived transactions**. + See :ref:`session_faq_whentocreate` for some thoughts on this. + +2. We tell our :class:`.Session` to re-read rows that it has already read, + either when we next query for them using :meth:`.Session.expire_all` + or :meth:`.Session.expire`, or immediately on an object using + :class:`.Session.refresh`. See :ref:`session_expire` for detail on this. + +3. We can run whole queries while setting them to definitely overwrite + already-loaded objects as they read rows by using + :meth:`.Query.populate_existing`. + +But remember, **the ORM cannot see changes in rows if our isolation +level is repeatable read or higher, unless we start a new transaction**. + + "This Session's transaction has been rolled back due to a previous exception during flush." (or similar) --------------------------------------------------------------------------------------------------------- diff --git a/doc/build/orm/extensions/declarative/mixins.rst b/doc/build/orm/extensions/declarative/mixins.rst index 1b3364c2e..917c55f88 100644 --- a/doc/build/orm/extensions/declarative/mixins.rst +++ b/doc/build/orm/extensions/declarative/mixins.rst @@ -138,8 +138,7 @@ point at which the ``User`` class is constructed, and the declarative extension can use the resulting :class:`.Column` object as returned by the method without the need to copy it. -.. versionchanged:: > 0.6.5 - Rename 0.6.5 ``sqlalchemy.util.classproperty`` +.. versionchanged:: 0.6.5 Rename ``sqlalchemy.util.classproperty`` into :class:`~.declared_attr`. Columns generated by :class:`~.declared_attr` can also be diff --git a/doc/build/orm/mapping_styles.rst b/doc/build/orm/mapping_styles.rst index 7571ce650..52c478361 100644 --- a/doc/build/orm/mapping_styles.rst +++ b/doc/build/orm/mapping_styles.rst @@ -120,7 +120,7 @@ user-defined class, linked together with a :func:`.mapper`. When we talk about "the behavior of :func:`.mapper`", this includes when using the Declarative system as well - it's still used, just behind the scenes. -Runtime Intropsection of Mappings, Objects +Runtime Introspection of Mappings, Objects ========================================== The :class:`.Mapper` object is available from any mapped class, regardless diff --git a/doc/build/orm/session_basics.rst b/doc/build/orm/session_basics.rst index 8919864ca..dd1162216 100644 --- a/doc/build/orm/session_basics.rst +++ b/doc/build/orm/session_basics.rst @@ -158,7 +158,7 @@ Session Frequently Asked Questions =================================== By this point, many users already have questions about sessions. -This section presents a mini-FAQ (note that we have also a `real FAQ </faq/index>`) +This section presents a mini-FAQ (note that we have also a :doc:`real FAQ </faq/index>`) of the most basic issues one is presented with when using a :class:`.Session`. When do I make a :class:`.sessionmaker`? @@ -192,9 +192,15 @@ When do I construct a :class:`.Session`, when do I commit it, and when do I clos .. topic:: tl;dr; - As a general rule, keep the lifecycle of the session **separate and - external** from functions and objects that access and/or manipulate - database data. + 1. As a general rule, keep the lifecycle of the session **separate and + external** from functions and objects that access and/or manipulate + database data. This will greatly help with achieving a predictable + and consistent transactional scope. + + 2. Make sure you have a clear notion of where transactions + begin and end, and keep transactions **short**, meaning, they end + at the series of a sequence of operations, instead of being held + open indefinitely. A :class:`.Session` is typically constructed at the beginning of a logical operation where database access is potentially anticipated. diff --git a/doc/build/orm/session_transaction.rst b/doc/build/orm/session_transaction.rst index 24a844650..bca3e944f 100644 --- a/doc/build/orm/session_transaction.rst +++ b/doc/build/orm/session_transaction.rst @@ -484,7 +484,9 @@ everything is rolled back. from sqlalchemy import event + class SomeTest(TestCase): + def setUp(self): # connect to the database self.connection = engine.connect() @@ -502,7 +504,12 @@ everything is rolled back. @event.listens_for(self.session, "after_transaction_end") def restart_savepoint(session, transaction): if transaction.nested and not transaction._parent.nested: - session.begin_nested() + # ensure that state is expired the way + # session.commit() at the top level normally does + # (optional step) + session.expire_all() + + session.begin_nested() # ... the tearDown() method stays the same diff --git a/doc/build/requirements.txt b/doc/build/requirements.txt index 3f87e68ea..d1eb23d0f 100644 --- a/doc/build/requirements.txt +++ b/doc/build/requirements.txt @@ -1,3 +1,3 @@ changelog>=0.3.4 sphinx-paramlinks>=0.2.2 -git+https://bitbucket.org/zzzeek/zzzeeksphinx.git +git+https://bitbucket.org/zzzeek/zzzeeksphinx.git@HEAD#egg=zzzeeksphinx diff --git a/lib/sqlalchemy/__init__.py b/lib/sqlalchemy/__init__.py index 709ba3246..093e90bbf 100644 --- a/lib/sqlalchemy/__init__.py +++ b/lib/sqlalchemy/__init__.py @@ -120,7 +120,7 @@ from .schema import ( from .inspection import inspect from .engine import create_engine, engine_from_config -__version__ = '1.0.0b5' +__version__ = '1.0.7' def __go(lcls): diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py index 9d8630d3c..c34829cd3 100644 --- a/lib/sqlalchemy/dialects/firebird/base.py +++ b/lib/sqlalchemy/dialects/firebird/base.py @@ -293,22 +293,22 @@ class FBCompiler(sql.compiler.SQLCompiler): def visit_sequence(self, seq): return "gen_id(%s, 1)" % self.preparer.format_sequence(seq) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list Firebird puts the limit and offset right after the ``SELECT``... """ result = "" - if select._limit_clause: - result += "FIRST %s " % self.process(select._limit_clause) - if select._offset_clause: - result += "SKIP %s " % self.process(select._offset_clause) + if select._limit_clause is not None: + result += "FIRST %s " % self.process(select._limit_clause, **kw) + if select._offset_clause is not None: + result += "SKIP %s " % self.process(select._offset_clause, **kw) if select._distinct: result += "DISTINCT " return result - def limit_clause(self, select): + def limit_clause(self, select, **kw): """Already taken care of in the `get_select_precolumns` method.""" return "" @@ -394,8 +394,6 @@ class FBDialect(default.DefaultDialect): requires_name_normalize = True supports_empty_insert = False - supports_simple_order_by_label = False - statement_compiler = FBCompiler ddl_compiler = FBDDLCompiler preparer = FBIdentifierPreparer diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 26b794712..bd41c19bf 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -166,6 +166,55 @@ how SQLAlchemy handles this: This is an auxilliary use case suitable for testing and bulk insert scenarios. +.. _legacy_schema_rendering: + +Rendering of SQL statements that include schema qualifiers +--------------------------------------------------------- + +When using :class:`.Table` metadata that includes a "schema" qualifier, +such as:: + + account_table = Table( + 'account', metadata, + Column('id', Integer, primary_key=True), + Column('info', String(100)), + schema="customer_schema" + ) + +The SQL Server dialect has a long-standing behavior that it will attempt +to turn a schema-qualified table name into an alias, such as:: + + >>> eng = create_engine("mssql+pymssql://mydsn") + >>> print(account_table.select().compile(eng)) + SELECT account_1.id, account_1.info + FROM customer_schema.account AS account_1 + +This behavior is legacy, does not function correctly for many forms +of SQL statements, and will be disabled by default in the 1.1 series +of SQLAlchemy. As of 1.0.5, the above statement will produce the following +warning:: + + SAWarning: legacy_schema_aliasing flag is defaulted to True; + some schema-qualified queries may not function correctly. + Consider setting this flag to False for modern SQL Server versions; + this flag will default to False in version 1.1 + +This warning encourages the :class:`.Engine` to be created as follows:: + + >>> eng = create_engine("mssql+pymssql://mydsn", legacy_schema_aliasing=False) + +Where the above SELECT statement will produce:: + + >>> print(account_table.select().compile(eng)) + SELECT customer_schema.account.id, customer_schema.account.info + FROM customer_schema.account + +The warning will not emit if the ``legacy_schema_aliasing`` flag is set +to either True or False. + +.. versionadded:: 1.0.5 - Added the ``legacy_schema_aliasing`` flag to disable + the SQL Server dialect's legacy behavior with schema-qualified table + names. This flag will default to False in version 1.1. Collation Support ----------------- @@ -951,6 +1000,15 @@ class MSSQLCompiler(compiler.SQLCompiler): self.tablealiases = {} super(MSSQLCompiler, self).__init__(*args, **kwargs) + def _with_legacy_schema_aliasing(fn): + def decorate(self, *arg, **kw): + if self.dialect.legacy_schema_aliasing: + return fn(self, *arg, **kw) + else: + super_ = getattr(super(MSSQLCompiler, self), fn.__name__) + return super_(*arg, **kw) + return decorate + def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" @@ -979,7 +1037,7 @@ class MSSQLCompiler(compiler.SQLCompiler): self.process(binary.left, **kw), self.process(binary.right, **kw)) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """ MS-SQL puts TOP, it's version of LIMIT here """ s = "" @@ -995,7 +1053,8 @@ class MSSQLCompiler(compiler.SQLCompiler): if s: return s else: - return compiler.SQLCompiler.get_select_precolumns(self, select) + return compiler.SQLCompiler.get_select_precolumns( + self, select, **kw) def get_from_hint_text(self, table, text): return text @@ -1053,14 +1112,7 @@ class MSSQLCompiler(compiler.SQLCompiler): else: return compiler.SQLCompiler.visit_select(self, select, **kwargs) - def _schema_aliased_table(self, table): - if getattr(table, 'schema', None) is not None: - if table not in self.tablealiases: - self.tablealiases[table] = table.alias() - return self.tablealiases[table] - else: - return None - + @_with_legacy_schema_aliasing def visit_table(self, table, mssql_aliased=False, iscrud=False, **kwargs): if mssql_aliased is table or iscrud: return super(MSSQLCompiler, self).visit_table(table, **kwargs) @@ -1072,25 +1124,14 @@ class MSSQLCompiler(compiler.SQLCompiler): else: return super(MSSQLCompiler, self).visit_table(table, **kwargs) - def visit_alias(self, alias, **kwargs): + @_with_legacy_schema_aliasing + def visit_alias(self, alias, **kw): # translate for schema-qualified table aliases - kwargs['mssql_aliased'] = alias.original - return super(MSSQLCompiler, self).visit_alias(alias, **kwargs) + kw['mssql_aliased'] = alias.original + return super(MSSQLCompiler, self).visit_alias(alias, **kw) - def visit_extract(self, extract, **kw): - field = self.extract_map.get(extract.field, extract.field) - return 'DATEPART("%s", %s)' % \ - (field, self.process(extract.expr, **kw)) - - def visit_savepoint(self, savepoint_stmt): - return "SAVE TRANSACTION %s" % \ - self.preparer.format_savepoint(savepoint_stmt) - - def visit_rollback_to_savepoint(self, savepoint_stmt): - return ("ROLLBACK TRANSACTION %s" - % self.preparer.format_savepoint(savepoint_stmt)) - - def visit_column(self, column, add_to_result_map=None, **kwargs): + @_with_legacy_schema_aliasing + def visit_column(self, column, add_to_result_map=None, **kw): if column.table is not None and \ (not self.isupdate and not self.isdelete) or \ self.is_subquery(): @@ -1108,10 +1149,40 @@ class MSSQLCompiler(compiler.SQLCompiler): ) return super(MSSQLCompiler, self).\ - visit_column(converted, **kwargs) + visit_column(converted, **kw) return super(MSSQLCompiler, self).visit_column( - column, add_to_result_map=add_to_result_map, **kwargs) + column, add_to_result_map=add_to_result_map, **kw) + + def _schema_aliased_table(self, table): + if getattr(table, 'schema', None) is not None: + if self.dialect._warn_schema_aliasing and \ + table.schema.lower() != 'information_schema': + util.warn( + "legacy_schema_aliasing flag is defaulted to True; " + "some schema-qualified queries may not function " + "correctly. Consider setting this flag to False for " + "modern SQL Server versions; this flag will default to " + "False in version 1.1") + + if table not in self.tablealiases: + self.tablealiases[table] = table.alias() + return self.tablealiases[table] + else: + return None + + def visit_extract(self, extract, **kw): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART("%s", %s)' % \ + (field, self.process(extract.expr, **kw)) + + def visit_savepoint(self, savepoint_stmt): + return "SAVE TRANSACTION %s" % \ + self.preparer.format_savepoint(savepoint_stmt) + + def visit_rollback_to_savepoint(self, savepoint_stmt): + return ("ROLLBACK TRANSACTION %s" + % self.preparer.format_savepoint(savepoint_stmt)) def visit_binary(self, binary, **kwargs): """Move bind parameters to the right-hand side of an operator, where @@ -1416,7 +1487,6 @@ class MSDialect(default.DefaultDialect): use_scope_identity = True max_identifier_length = 128 schema_name = "dbo" - supports_simple_order_by_label = False colspecs = { sqltypes.DateTime: _MSDateTime, @@ -1424,6 +1494,10 @@ class MSDialect(default.DefaultDialect): sqltypes.Time: TIME, } + engine_config_types = default.DefaultDialect.engine_config_types.union([ + ('legacy_schema_aliasing', util.asbool), + ]) + ischema_names = ischema_names supports_native_boolean = False @@ -1455,7 +1529,8 @@ class MSDialect(default.DefaultDialect): use_scope_identity=True, max_identifier_length=None, schema_name="dbo", - deprecate_large_types=None, **opts): + deprecate_large_types=None, + legacy_schema_aliasing=None, **opts): self.query_timeout = int(query_timeout or 0) self.schema_name = schema_name @@ -1463,6 +1538,14 @@ class MSDialect(default.DefaultDialect): self.max_identifier_length = int(max_identifier_length or 0) or \ self.max_identifier_length self.deprecate_large_types = deprecate_large_types + + if legacy_schema_aliasing is None: + self.legacy_schema_aliasing = True + self._warn_schema_aliasing = True + else: + self.legacy_schema_aliasing = legacy_schema_aliasing + self._warn_schema_aliasing = False + super(MSDialect, self).__init__(**opts) def do_savepoint(self, connection, name): @@ -1483,11 +1566,15 @@ class MSDialect(default.DefaultDialect): # FreeTDS with version 4.2 seems to report here # a number like "95.10.255". Don't know what # that is. So emit warning. + # Use TDS Version 7.0 through 7.3, per the MS information here: + # https://msdn.microsoft.com/en-us/library/dd339982.aspx + # and FreeTDS information here (7.3 highest supported version): + # http://www.freetds.org/userguide/choosingtdsprotocol.htm util.warn( "Unrecognized server version info '%s'. Version specific " "behaviors may not function properly. If using ODBC " - "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, " - "is configured in the FreeTDS configuration." % + "with FreeTDS, ensure TDS_VERSION 7.0 through 7.3, not " + "4.2, is configured in the FreeTDS configuration." % ".".join(str(x) for x in self.server_version_info)) if self.server_version_info >= MS_2005_VERSION and \ 'implicit_returning' not in self.__dict__: diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 2214d18d1..324b3770c 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -46,11 +46,12 @@ class MSDialect_pymssql(MSDialect): @classmethod def dbapi(cls): module = __import__('pymssql') - # pymmsql doesn't have a Binary method. we use string - # TODO: monkeypatching here is less than ideal - module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) - + # pymmsql < 2.1.1 doesn't have a Binary method. we use string client_ver = tuple(int(x) for x in module.__version__.split(".")) + if client_ver < (2, 1, 1): + # TODO: monkeypatching here is less than ideal + module.Binary = lambda x: x if hasattr(x, 'decode') else str(x) + if client_ver < (1, ): util.warn("The pymssql dialect expects at least " "the 1.0 series of the pymssql DBAPI.") diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index ad1e7ae37..7ec8cbaa7 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -95,7 +95,7 @@ for unix + PyODBC. """ -from .base import MSExecutionContext, MSDialect +from .base import MSExecutionContext, MSDialect, VARBINARY from ...connectors.pyodbc import PyODBCConnector from ... import types as sqltypes, util import decimal @@ -174,6 +174,22 @@ class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float): pass +class _VARBINARY_pyodbc(VARBINARY): + def bind_processor(self, dialect): + if dialect.dbapi is None: + return None + + DBAPIBinary = dialect.dbapi.Binary + + def process(value): + if value is not None: + return DBAPIBinary(value) + else: + # pyodbc-specific + return dialect.dbapi.BinaryNull + return process + + class MSExecutionContext_pyodbc(MSExecutionContext): _embedded_scope_identity = False @@ -230,7 +246,9 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect): MSDialect.colspecs, { sqltypes.Numeric: _MSNumeric_pyodbc, - sqltypes.Float: _MSFloat_pyodbc + sqltypes.Float: _MSFloat_pyodbc, + VARBINARY: _VARBINARY_pyodbc, + sqltypes.LargeBinary: _VARBINARY_pyodbc, } ) diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 8460ff92a..fee05fd2d 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1829,7 +1829,7 @@ class MySQLCompiler(compiler.SQLCompiler): def visit_false(self, element, **kw): return "false" - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Add special MySQL keywords in place of DISTINCT. .. note:: diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index c1c0ab08e..22c66dbbb 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -401,6 +401,19 @@ The value passed to the keyword argument will be simply passed through to the underlying CREATE INDEX command, so it *must* be a valid index type for your version of PostgreSQL. +.. _postgresql_index_storage: + +Index Storage Parameters +^^^^^^^^^^^^^^^^^^^^^^^^ + +PostgreSQL allows storage parameters to be set on indexes. The storage +parameters available depend on the index method used by the index. Storage +parameters can be specified on :class:`.Index` using the ``postgresql_with`` +keyword argument:: + + Index('my_index', my_table.c.data, postgresql_with={"fillfactor": 50}) + +.. versionadded:: 1.0.6 .. _postgresql_index_concurrently: @@ -1446,7 +1459,7 @@ class PGCompiler(compiler.SQLCompiler): raise exc.CompileError("Unrecognized hint: %r" % hint) return "ONLY " + sqltext - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): if select._distinct is not False: if select._distinct is True: return "DISTINCT " @@ -1455,7 +1468,8 @@ class PGCompiler(compiler.SQLCompiler): [self.process(col) for col in select._distinct] ) + ") " else: - return "DISTINCT ON (" + self.process(select._distinct) + ") " + return "DISTINCT ON (" + \ + self.process(select._distinct, **kw) + ") " else: return "" @@ -1591,6 +1605,13 @@ class PGDDLCompiler(compiler.DDLCompiler): ]) ) + withclause = index.dialect_options['postgresql']['with'] + + if withclause: + text += " WITH (%s)" % (', '.join( + ['%s = %s' % storage_parameter + for storage_parameter in withclause.items()])) + whereclause = index.dialect_options["postgresql"]["where"] if whereclause is not None: @@ -1600,15 +1621,17 @@ class PGDDLCompiler(compiler.DDLCompiler): text += " WHERE " + where_compiled return text - def visit_exclude_constraint(self, constraint): + def visit_exclude_constraint(self, constraint, **kw): text = "" if constraint.name is not None: text += "CONSTRAINT %s " % \ self.preparer.format_constraint(constraint) elements = [] - for c in constraint.columns: - op = constraint.operators[c.name] - elements.append(self.preparer.quote(c.name) + ' WITH ' + op) + for expr, name, op in constraint._render_exprs: + kw['include_table'] = False + elements.append( + "%s WITH %s" % (self.sql_compiler.process(expr, **kw), op) + ) text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements)) if constraint.where is not None: @@ -1918,6 +1941,7 @@ class PGDialect(default.DefaultDialect): "where": None, "ops": {}, "concurrently": False, + "with": {} }), (schema.Table, { "ignore_search_path": False, @@ -2606,7 +2630,8 @@ class PGDialect(default.DefaultDialect): SELECT i.relname as relname, ix.indisunique, ix.indexprs, ix.indpred, - a.attname, a.attnum, NULL, ix.indkey%s + a.attname, a.attnum, NULL, ix.indkey%s, + i.reloptions, am.amname FROM pg_class t join pg_index ix on t.oid = ix.indrelid @@ -2614,6 +2639,9 @@ class PGDialect(default.DefaultDialect): left outer join pg_attribute a on t.oid = a.attrelid and %s + left outer join + pg_am am + on i.relam = am.oid WHERE t.relkind IN ('r', 'v', 'f', 'm') and t.oid = :table_oid @@ -2633,7 +2661,8 @@ class PGDialect(default.DefaultDialect): SELECT i.relname as relname, ix.indisunique, ix.indexprs, ix.indpred, - a.attname, a.attnum, c.conrelid, ix.indkey::varchar + a.attname, a.attnum, c.conrelid, ix.indkey::varchar, + i.reloptions, am.amname FROM pg_class t join pg_index ix on t.oid = ix.indrelid @@ -2646,6 +2675,9 @@ class PGDialect(default.DefaultDialect): on (ix.indrelid = c.conrelid and ix.indexrelid = c.conindid and c.contype in ('p', 'u', 'x')) + left outer join + pg_am am + on i.relam = am.oid WHERE t.relkind IN ('r', 'v', 'f', 'm') and t.oid = :table_oid @@ -2662,7 +2694,8 @@ class PGDialect(default.DefaultDialect): sv_idx_name = None for row in c.fetchall(): - idx_name, unique, expr, prd, col, col_num, conrelid, idx_key = row + (idx_name, unique, expr, prd, col, + col_num, conrelid, idx_key, options, amname) = row if expr: if idx_name != sv_idx_name: @@ -2688,6 +2721,16 @@ class PGDialect(default.DefaultDialect): index['unique'] = unique if conrelid is not None: index['duplicates_constraint'] = idx_name + if options: + index['options'] = dict( + [option.split("=") for option in options]) + + # it *might* be nice to include that this is 'btree' in the + # reflection info. But we don't want an Index object + # to have a ``postgresql_using`` in it that is just the + # default, so for the moment leaving this out. + if amname and amname != 'btree': + index['amname'] = amname result = [] for name, idx in indexes.items(): @@ -2698,6 +2741,12 @@ class PGDialect(default.DefaultDialect): } if 'duplicates_constraint' in idx: entry['duplicates_constraint'] = idx['duplicates_constraint'] + if 'options' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_with"] = idx['options'] + if 'amname' in idx: + entry.setdefault( + 'dialect_options', {})["postgresql_using"] = idx['amname'] result.append(entry) return result diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py index 0371daf3d..4cfc050de 100644 --- a/lib/sqlalchemy/dialects/postgresql/constraints.py +++ b/lib/sqlalchemy/dialects/postgresql/constraints.py @@ -3,8 +3,9 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php -from sqlalchemy.schema import ColumnCollectionConstraint -from sqlalchemy.sql import expression +from ...sql.schema import ColumnCollectionConstraint +from ...sql import expression +from ... import util class ExcludeConstraint(ColumnCollectionConstraint): @@ -48,17 +49,39 @@ static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE for this constraint. """ + columns = [] + render_exprs = [] + self.operators = {} + + expressions, operators = zip(*elements) + + for (expr, column, strname, add_element), operator in zip( + self._extract_col_expression_collection(expressions), + operators + ): + if add_element is not None: + columns.append(add_element) + + name = column.name if column is not None else strname + + if name is not None: + # backwards compat + self.operators[name] = operator + + expr = expression._literal_as_text(expr) + + render_exprs.append( + (expr, name, operator) + ) + + self._render_exprs = render_exprs ColumnCollectionConstraint.__init__( self, - *[col for col, op in elements], + *columns, name=kw.get('name'), deferrable=kw.get('deferrable'), initially=kw.get('initially') ) - self.operators = {} - for col_or_string, op in elements: - name = getattr(col_or_string, 'name', col_or_string) - self.operators[name] = op self.using = kw.get('using', 'gist') where = kw.get('where') if where: diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 46228ac15..36a9d7bf7 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -74,6 +74,8 @@ See also: `PQconnectdbParams <http://www.postgresql.org/docs/9.1/static/\ libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS>`_ +.. _psycopg2_execution_options: + Per-Statement/Connection Execution Options ------------------------------------------- @@ -81,16 +83,23 @@ The following DBAPI-specific options are respected when used with :meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`, :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: -* isolation_level - Set the transaction isolation level for the lifespan of a +* ``isolation_level`` - Set the transaction isolation level for the lifespan of a :class:`.Connection` (can only be set on a connection, not a statement or query). See :ref:`psycopg2_isolation_level`. -* stream_results - Enable or disable usage of psycopg2 server side cursors - +* ``stream_results`` - Enable or disable usage of psycopg2 server side cursors - this feature makes use of "named" cursors in combination with special result handling methods so that result rows are not fully buffered. If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used. +* ``max_row_buffer`` - when using ``stream_results``, an integer value that + specifies the maximum number of rows to buffer at a time. This is + interpreted by the :class:`.BufferedRowResultProxy`, and if omitted the + buffer will grow to ultimately store 1000 rows at a time. + + .. versionadded:: 1.0.6 + .. _psycopg2_unicode: Unicode with Psycopg2 @@ -354,7 +363,7 @@ class _PGNumeric(sqltypes.Numeric): class _PGEnum(ENUM): def result_processor(self, dialect, coltype): - if util.py2k and self.convert_unicode is True: + if self.native_enum and util.py2k and self.convert_unicode is True: # we can't easily use PG's extensions here because # the OID is on the fly, and we need to give it a python # function anyway - not really worth it. @@ -501,10 +510,22 @@ class PGDialect_psycopg2(PGDialect): preparer = PGIdentifierPreparer_psycopg2 psycopg2_version = (0, 0) + FEATURE_VERSION_MAP = dict( + native_json=(2, 5), + native_jsonb=(2, 5, 4), + sane_multi_rowcount=(2, 0, 9), + array_oid=(2, 4, 3), + hstore_adapter=(2, 4) + ) + _has_native_hstore = False _has_native_json = False _has_native_jsonb = False + engine_config_types = PGDialect.engine_config_types.union([ + ('use_native_unicode', util.asbool), + ]) + colspecs = util.update_copy( PGDialect.colspecs, { @@ -543,11 +564,15 @@ class PGDialect_psycopg2(PGDialect): self._has_native_hstore = self.use_native_hstore and \ self._hstore_oids(connection.connection) \ is not None - self._has_native_json = self.psycopg2_version >= (2, 5) - self._has_native_jsonb = self.psycopg2_version >= (2, 5, 4) + self._has_native_json = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_json'] + self._has_native_jsonb = \ + self.psycopg2_version >= self.FEATURE_VERSION_MAP['native_jsonb'] # http://initd.org/psycopg/docs/news.html#what-s-new-in-psycopg-2-0-9 - self.supports_sane_multi_rowcount = self.psycopg2_version >= (2, 0, 9) + self.supports_sane_multi_rowcount = \ + self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['sane_multi_rowcount'] @classmethod def dbapi(cls): @@ -621,7 +646,8 @@ class PGDialect_psycopg2(PGDialect): kw = {'oid': oid} if util.py2k: kw['unicode'] = True - if self.psycopg2_version >= (2, 4, 3): + if self.psycopg2_version >= \ + self.FEATURE_VERSION_MAP['array_oid']: kw['array_oid'] = array_oid extras.register_hstore(conn, **kw) fns.append(on_connect) @@ -646,7 +672,7 @@ class PGDialect_psycopg2(PGDialect): @util.memoized_instancemethod def _hstore_oids(self, conn): - if self.psycopg2_version >= (2, 4): + if self.psycopg2_version >= self.FEATURE_VERSION_MAP['hstore_adapter']: extras = self._psycopg2_extras() oids = extras.HstoreAdapter.get_oids(conn) if oids is not None and oids[0]: diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py index f5c475d90..97f241d2e 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py @@ -31,6 +31,18 @@ class PGDialect_psycopg2cffi(PGDialect_psycopg2): driver = 'psycopg2cffi' supports_unicode_statements = True + # psycopg2cffi's first release is 2.5.0, but reports + # __version__ as 2.4.4. Subsequent releases seem to have + # fixed this. + + FEATURE_VERSION_MAP = dict( + native_json=(2, 4, 4), + native_jsonb=(2, 7, 1), + sane_multi_rowcount=(2, 4, 4), + array_oid=(2, 4, 4), + hstore_adapter=(2, 4, 4) + ) + @classmethod def dbapi(cls): return __import__('psycopg2cffi') diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py index 00c67d170..db6d5e16c 100644 --- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py +++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -65,6 +65,23 @@ class PGDialect_pypostgresql(PGDialect): from postgresql.driver import dbapi20 return dbapi20 + _DBAPI_ERROR_NAMES = [ + "Error", + "InterfaceError", "DatabaseError", "DataError", + "OperationalError", "IntegrityError", "InternalError", + "ProgrammingError", "NotSupportedError" + ] + + @util.memoized_property + def dbapi_exception_translation_map(self): + if self.dbapi is None: + return {} + + return dict( + (getattr(self.dbapi, name).__name__, name) + for name in self._DBAPI_ERROR_NAMES + ) + def create_connect_args(self, url): opts = url.translate_connect_args(username='user') if 'port' in opts: diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index 0254690b4..d9da46f4c 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -45,14 +45,20 @@ SQLite Auto Incrementing Behavior Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html -Two things to note: +Key concepts: -* The AUTOINCREMENT keyword is **not** required for SQLite tables to - generate primary key values automatically. AUTOINCREMENT only means that the - algorithm used to generate ROWID values should be slightly different. -* SQLite does **not** generate primary key (i.e. ROWID) values, even for - one column, if the table has a composite (i.e. multi-column) primary key. - This is regardless of the AUTOINCREMENT keyword being present or not. +* SQLite has an implicit "auto increment" feature that takes place for any + non-composite primary-key column that is specifically created using + "INTEGER PRIMARY KEY" for the type + primary key. + +* SQLite also has an explicit "AUTOINCREMENT" keyword, that is **not** + equivalent to the implicit autoincrement feature; this keyword is not + recommended for general use. SQLAlchemy does not render this keyword + unless a special SQLite-specific directive is used (see below). However, + it still requires that the column's type is named "INTEGER". + +Using the AUTOINCREMENT Keyword +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To specifically render the AUTOINCREMENT keyword on the primary key column when rendering DDL, add the flag ``sqlite_autoincrement=True`` to the Table @@ -62,6 +68,60 @@ construct:: Column('id', Integer, primary_key=True), sqlite_autoincrement=True) +Allowing autoincrement behavior SQLAlchemy types other than Integer/INTEGER +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +SQLite's typing model is based on naming conventions. Among +other things, this means that any type name which contains the +substring ``"INT"`` will be determined to be of "integer affinity". A +type named ``"BIGINT"``, ``"SPECIAL_INT"`` or even ``"XYZINTQPR"``, will be considered by +SQLite to be of "integer" affinity. However, **the SQLite +autoincrement feature, whether implicitly or explicitly enabled, +requires that the name of the column's type +is exactly the string "INTEGER"**. Therefore, if an +application uses a type like :class:`.BigInteger` for a primary key, on +SQLite this type will need to be rendered as the name ``"INTEGER"`` when +emitting the initial ``CREATE TABLE`` statement in order for the autoincrement +behavior to be available. + +One approach to achieve this is to use :class:`.Integer` on SQLite +only using :meth:`.TypeEngine.with_variant`:: + + table = Table( + "my_table", metadata, + Column("id", BigInteger().with_variant(Integer, "sqlite"), primary_key=True) + ) + +Another is to use a subclass of :class:`.BigInteger` that overrides its DDL name +to be ``INTEGER`` when compiled against SQLite:: + + from sqlalchemy import BigInteger + from sqlalchemy.ext.compiler import compiles + + class SLBigInteger(BigInteger): + pass + + @compiles(SLBigInteger, 'sqlite') + def bi_c(element, compiler, **kw): + return "INTEGER" + + @compiles(SLBigInteger) + def bi_c(element, compiler, **kw): + return compiler.visit_BIGINT(element, **kw) + + + table = Table( + "my_table", metadata, + Column("id", SLBigInteger(), primary_key=True) + ) + +.. seealso:: + + :meth:`.TypeEngine.with_variant` + + :ref:`sqlalchemy.ext.compiler_toplevel` + + `Datatypes In SQLite Version 3 <http://sqlite.org/datatype3.html>`_ .. _sqlite_concurrency: @@ -292,6 +352,107 @@ The index will be rendered at create time as:: .. versionadded:: 0.9.9 +Dotted Column Names +------------------- + +Using table or column names that explicitly have periods in them is +**not recommended**. While this is generally a bad idea for relational +databases in general, as the dot is a syntactically significant character, +the SQLite driver has a bug which requires that SQLAlchemy filter out these +dots in result sets. + +The bug, entirely outside of SQLAlchemy, can be illustrated thusly:: + + import sqlite3 + + conn = sqlite3.connect(":memory:") + cursor = conn.cursor() + + cursor.execute("create table x (a integer, b integer)") + cursor.execute("insert into x (a, b) values (1, 1)") + cursor.execute("insert into x (a, b) values (2, 2)") + + cursor.execute("select x.a, x.b from x") + assert [c[0] for c in cursor.description] == ['a', 'b'] + + cursor.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert [c[0] for c in cursor.description] == ['a', 'b'], \\ + [c[0] for c in cursor.description] + +The second assertion fails:: + + Traceback (most recent call last): + File "test.py", line 19, in <module> + [c[0] for c in cursor.description] + AssertionError: ['x.a', 'x.b'] + +Where above, the driver incorrectly reports the names of the columns +including the name of the table, which is entirely inconsistent vs. +when the UNION is not present. + +SQLAlchemy relies upon column names being predictable in how they match +to the original statement, so the SQLAlchemy dialect has no choice but +to filter these out:: + + + from sqlalchemy import create_engine + + eng = create_engine("sqlite://") + conn = eng.connect() + + conn.execute("create table x (a integer, b integer)") + conn.execute("insert into x (a, b) values (1, 1)") + conn.execute("insert into x (a, b) values (2, 2)") + + result = conn.execute("select x.a, x.b from x") + assert result.keys() == ["a", "b"] + + result = conn.execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["a", "b"] + +Note that above, even though SQLAlchemy filters out the dots, *both +names are still addressable*:: + + >>> row = result.first() + >>> row["a"] + 1 + >>> row["x.a"] + 1 + >>> row["b"] + 1 + >>> row["x.b"] + 1 + +Therefore, the workaround applied by SQLAlchemy only impacts +:meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` in the public API. +In the very specific case where +an application is forced to use column names that contain dots, and the +functionality of :meth:`.ResultProxy.keys` and :meth:`.RowProxy.keys()` +is required to return these dotted names unmodified, the ``sqlite_raw_colnames`` +execution option may be provided, either on a per-:class:`.Connection` basis:: + + result = conn.execution_options(sqlite_raw_colnames=True).execute(''' + select x.a, x.b from x where a=1 + union + select x.a, x.b from x where a=2 + ''') + assert result.keys() == ["x.a", "x.b"] + +or on a per-:class:`.Engine` basis:: + + engine = create_engine("sqlite://", execution_options={"sqlite_raw_colnames": True}) + +When using the per-:class:`.Engine` execution option, note that +**Core and ORM queries that use UNION may not function properly**. + """ import datetime diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py index 57213382e..ae0473a3e 100644 --- a/lib/sqlalchemy/dialects/sybase/base.py +++ b/lib/sqlalchemy/dialects/sybase/base.py @@ -323,7 +323,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler): 'milliseconds': 'millisecond' }) - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): s = select._distinct and "DISTINCT " or "" # TODO: don't think Sybase supports # bind params for FIRST / TOP @@ -435,7 +435,6 @@ class SybaseDialect(default.DefaultDialect): supports_native_boolean = False supports_unicode_binds = False postfetch_lastrowid = True - supports_simple_order_by_label = False colspecs = {} ischema_names = ischema_names diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 0678dd201..f1eacf6a6 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -394,9 +394,9 @@ def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): 'prefix' argument indicates the prefix to be searched for. A select set of keyword arguments will be "coerced" to their - expected type based on string values. In a future release, this - functionality will be expanded and include dialect-specific - arguments. + expected type based on string values. The set of arguments + is extensible per-dialect using the ``engine_config_types`` accessor. + """ options = dict((key[len(prefix):], configuration[key]) diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 5921ab9ba..c5eabac0d 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -1254,12 +1254,15 @@ class Connection(Connectable): if context: context.is_disconnect = self._is_disconnect + invalidate_pool_on_disconnect = True + if self._reentrant_error: util.raise_from_cause( exc.DBAPIError.instance(statement, parameters, e, - self.dialect.dbapi.Error), + self.dialect.dbapi.Error, + dialect=self.dialect), exc_info ) self._reentrant_error = True @@ -1275,7 +1278,8 @@ class Connection(Connectable): parameters, e, self.dialect.dbapi.Error, - connection_invalidated=self._is_disconnect) + connection_invalidated=self._is_disconnect, + dialect=self.dialect) else: sqlalchemy_exception = None @@ -1316,6 +1320,11 @@ class Connection(Connectable): sqlalchemy_exception.connection_invalidated = \ self._is_disconnect = ctx.is_disconnect + # set up potentially user-defined value for + # invalidate pool. + invalidate_pool_on_disconnect = \ + ctx.invalidate_pool_on_disconnect + if should_wrap and context: context.handle_dbapi_exception(e) @@ -1340,7 +1349,8 @@ class Connection(Connectable): del self._is_disconnect if not self.invalidated: dbapi_conn_wrapper = self.__connection - self.engine.pool._invalidate(dbapi_conn_wrapper, e) + if invalidate_pool_on_disconnect: + self.engine.pool._invalidate(dbapi_conn_wrapper, e) self.invalidate(e) if self.should_close_with_result: self.close() @@ -1801,29 +1811,28 @@ class Engine(Connectable, log.Identified): def dispose(self): """Dispose of the connection pool used by this :class:`.Engine`. + This has the effect of fully closing all **currently checked in** + database connections. Connections that are still checked out + will **not** be closed, however they will no longer be associated + with this :class:`.Engine`, so when they are closed individually, + eventually the :class:`.Pool` which they are associated with will + be garbage collected and they will be closed out fully, if + not already closed on checkin. + A new connection pool is created immediately after the old one has been disposed. This new pool, like all SQLAlchemy connection pools, does not make any actual connections to the database until one is - first requested. + first requested, so as long as the :class:`.Engine` isn't used again, + no new connections will be made. - This method has two general use cases: - - * When a dropped connection is detected, it is assumed that all - connections held by the pool are potentially dropped, and - the entire pool is replaced. - - * An application may want to use :meth:`dispose` within a test - suite that is creating multiple engines. + .. seealso:: - It is critical to note that :meth:`dispose` does **not** guarantee - that the application will release all open database connections - only - those connections that are checked into the pool are closed. - Connections which remain checked out or have been detached from - the engine are not affected. + :ref:`engine_disposal` """ self.pool.dispose() self.pool = self.pool.recreate() + self.dispatch.engine_disposed(self) def _execute_default(self, default): with self.contextual_connect() as conn: diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index 3eebc6c06..9a7b80bfd 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -61,14 +61,13 @@ class DefaultDialect(interfaces.Dialect): engine_config_types = util.immutabledict([ ('convert_unicode', util.bool_or_str('force')), - ('pool_timeout', int), + ('pool_timeout', util.asint), ('echo', util.bool_or_str('debug')), ('echo_pool', util.bool_or_str('debug')), - ('pool_recycle', int), - ('pool_size', int), - ('max_overflow', int), - ('pool_threadlocal', bool), - ('use_native_unicode', bool), + ('pool_recycle', util.asint), + ('pool_size', util.asint), + ('max_overflow', util.asint), + ('pool_threadlocal', util.asbool), ]) # if the NUMERIC type @@ -157,6 +156,15 @@ class DefaultDialect(interfaces.Dialect): reflection_options = () + dbapi_exception_translation_map = util.immutabledict() + """mapping used in the extremely unusual case that a DBAPI's + published exceptions don't actually have the __name__ that they + are linked towards. + + .. versionadded:: 1.0.5 + + """ + def __init__(self, convert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, implicit_returning=None, @@ -840,18 +848,26 @@ class DefaultExecutionContext(interfaces.ExecutionContext): compiled_params = self.compiled_parameters[0] lastrowid = self.get_lastrowid() - autoinc_col = table._autoincrement_column - if autoinc_col is not None: - # apply type post processors to the lastrowid - proc = autoinc_col.type._cached_result_processor( - self.dialect, None) - if proc is not None: - lastrowid = proc(lastrowid) - self.inserted_primary_key = [ - lastrowid if c is autoinc_col else - compiled_params.get(key_getter(c), None) - for c in table.primary_key - ] + if lastrowid is not None: + autoinc_col = table._autoincrement_column + if autoinc_col is not None: + # apply type post processors to the lastrowid + proc = autoinc_col.type._cached_result_processor( + self.dialect, None) + if proc is not None: + lastrowid = proc(lastrowid) + self.inserted_primary_key = [ + lastrowid if c is autoinc_col else + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] + else: + # don't have a usable lastrowid, so + # do the same as _setup_ins_pk_from_empty + self.inserted_primary_key = [ + compiled_params.get(key_getter(c), None) + for c in table.primary_key + ] def _setup_ins_pk_from_empty(self): key_getter = self.compiled._key_getters_for_crud_column[2] diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index da8fa81eb..73a8b4635 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -150,6 +150,16 @@ class Dialect(object): This will prevent types.Boolean from generating a CHECK constraint when that type is used. + dbapi_exception_translation_map + A dictionary of names that will contain as values the names of + pep-249 exceptions ("IntegrityError", "OperationalError", etc) + keyed to alternate class names, to support the case where a + DBAPI has exception classes that aren't named as they are + referred to (e.g. IntegrityError = MyException). In the vast + majority of cases this dictionary is empty. + + .. versionadded:: 1.0.5 + """ _has_events = False @@ -733,6 +743,41 @@ class Dialect(object): raise NotImplementedError() + @classmethod + def get_dialect_cls(cls, url): + """Given a URL, return the :class:`.Dialect` that will be used. + + This is a hook that allows an external plugin to provide functionality + around an existing dialect, by allowing the plugin to be loaded + from the url based on an entrypoint, and then the plugin returns + the actual dialect to be used. + + By default this just returns the cls. + + .. versionadded:: 1.0.3 + + """ + return cls + + @classmethod + def engine_created(cls, engine): + """A convenience hook called before returning the final :class:`.Engine`. + + If the dialect returned a different class from the + :meth:`.get_dialect_cls` + method, then the hook is called on both classes, first on + the dialect class returned by the :meth:`.get_dialect_cls` method and + then on the class on which the method was called. + + The hook should be used by dialects and/or wrappers to apply special + events to the engine or its components. In particular, it allows + a dialect-wrapping class to apply dialect-level events. + + .. versionadded:: 1.0.3 + + """ + pass + class ExecutionContext(object): """A messenger object for a Dialect that corresponds to a single @@ -1085,3 +1130,21 @@ class ExceptionContext(object): changing this flag. """ + + invalidate_pool_on_disconnect = True + """Represent whether all connections in the pool should be invalidated + when a "disconnect" condition is in effect. + + Setting this flag to False within the scope of the + :meth:`.ConnectionEvents.handle_error` event will have the effect such + that the full collection of connections in the pool will not be + invalidated during a disconnect; only the current connection that is the + subject of the error will actually be invalidated. + + The purpose of this flag is for custom disconnect-handling schemes where + the invalidation of other connections in the pool is to be performed + based on other conditions, or even on a per-connection basis. + + .. versionadded:: 1.0.3 + + """
\ No newline at end of file diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 6d19cb6d0..b2b78dee8 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -1067,10 +1067,27 @@ class BufferedRowResultProxy(ResultProxy): The pre-fetching behavior fetches only one row initially, and then grows its buffer size by a fixed amount with each successive need - for additional rows up to a size of 100. + for additional rows up to a size of 1000. + + The size argument is configurable using the ``max_row_buffer`` + execution option:: + + with psycopg2_engine.connect() as conn: + + result = conn.execution_options( + stream_results=True, max_row_buffer=50 + ).execute("select * from table") + + .. versionadded:: 1.0.6 Added the ``max_row_buffer`` option. + + .. seealso:: + + :ref:`psycopg2_execution_options` """ def _init_metadata(self): + self._max_row_buffer = self.context.execution_options.get( + 'max_row_buffer', None) self.__buffer_rows() super(BufferedRowResultProxy, self)._init_metadata() @@ -1095,6 +1112,8 @@ class BufferedRowResultProxy(ResultProxy): size = getattr(self, '_bufsize', 1) self.__rowbuffer = collections.deque(self.cursor.fetchmany(size)) self._bufsize = self.size_growth.get(size, size) + if self._max_row_buffer is not None: + self._bufsize = min(self._max_row_buffer, self._bufsize) def _soft_close(self, **kw): self.__rowbuffer.clear() diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 1fd105d67..a539ee9f7 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -48,7 +48,8 @@ class DefaultEngineStrategy(EngineStrategy): # create url.URL object u = url.make_url(name_or_url) - dialect_cls = u.get_dialect() + entrypoint = u._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(u) if kwargs.pop('_coerce_config', False): def pop_kwarg(key, default=None): @@ -81,11 +82,18 @@ class DefaultEngineStrategy(EngineStrategy): # assemble connection arguments (cargs, cparams) = dialect.create_connect_args(u) cparams.update(pop_kwarg('connect_args', {})) + cargs = list(cargs) # allow mutability # look for existing pool or create pool = pop_kwarg('pool', None) if pool is None: - def connect(): + def connect(connection_record=None): + if dialect._has_events: + for fn in dialect.dispatch.do_connect: + connection = fn( + dialect, connection_record, cargs, cparams) + if connection is not None: + return connection return dialect.connect(*cargs, **cparams) creator = pop_kwarg('creator', connect) @@ -157,6 +165,10 @@ class DefaultEngineStrategy(EngineStrategy): dialect.initialize(c) event.listen(pool, 'first_connect', first_connect, once=True) + dialect_cls.engine_created(engine) + if entrypoint is not dialect_cls: + entrypoint.engine_created(engine) + return engine diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index d045961dd..32e3f8a6b 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -117,11 +117,13 @@ class URL(object): else: return self.drivername.split('+')[1] - def get_dialect(self): - """Return the SQLAlchemy database dialect class corresponding - to this URL's driver name. - """ + def _get_entrypoint(self): + """Return the "entry point" dialect class. + + This is normally the dialect itself except in the case when the + returned class implements the get_dialect_cls() method. + """ if '+' not in self.drivername: name = self.drivername else: @@ -137,6 +139,14 @@ class URL(object): else: return cls + def get_dialect(self): + """Return the SQLAlchemy database dialect class corresponding + to this URL's driver name. + """ + entrypoint = self._get_entrypoint() + dialect_cls = entrypoint.get_dialect_cls(self) + return dialect_cls + def translate_connect_args(self, names=[], **kw): """Translate url attributes into a dictionary of connection arguments. diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 22e066c88..f439d554f 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -371,7 +371,9 @@ class PoolEvents(event.Events): """Called when a DBAPI connection is to be "invalidated". This event is called any time the :meth:`._ConnectionRecord.invalidate` - method is invoked, either from API usage or via "auto-invalidation". + method is invoked, either from API usage or via "auto-invalidation", + without the ``soft`` flag. + The event occurs before a final attempt to call ``.close()`` on the connection occurs. @@ -392,6 +394,21 @@ class PoolEvents(event.Events): """ + def soft_invalidate(self, dbapi_connection, connection_record, exception): + """Called when a DBAPI connection is to be "soft invalidated". + + This event is called any time the :meth:`._ConnectionRecord.invalidate` + method is invoked with the ``soft`` flag. + + Soft invalidation refers to when the connection record that tracks + this connection will force a reconnect after the current connection + is checked in. It does not actively close the dbapi_connection + at the point at which it is called. + + .. versionadded:: 1.0.3 + + """ + class ConnectionEvents(event.Events): """Available events for :class:`.Connectable`, which includes @@ -707,6 +724,16 @@ class ConnectionEvents(event.Events): "failed" in str(context.original_exception): raise MySpecialException("failed operation") + .. warning:: Because the :meth:`.ConnectionEvents.handle_error` + event specifically provides for exceptions to be re-thrown as + the ultimate exception raised by the failed statement, + **stack traces will be misleading** if the user-defined event + handler itself fails and throws an unexpected exception; + the stack trace may not illustrate the actual code line that + failed! It is advised to code carefully here and use + logging and/or inline debugging if unexpected exceptions are + occurring. + Alternatively, a "chained" style of event handling can be used, by configuring the handler with the ``retval=True`` modifier and returning the new exception instance from the @@ -855,6 +882,23 @@ class ConnectionEvents(event.Events): """ + def engine_disposed(self, engine): + """Intercept when the :meth:`.Engine.dispose` method is called. + + The :meth:`.Engine.dispose` method instructs the engine to + "dispose" of it's connection pool (e.g. :class:`.Pool`), and + replaces it with a new one. Disposing of the old pool has the + effect that existing checked-in connections are closed. The new + pool does not establish any new connections until it is first used. + + This event can be used to indicate that resources related to the + :class:`.Engine` should also be cleaned up, keeping in mind that the + :class:`.Engine` can still be used for new requests in which case + it re-acquires connection resources. + + .. versionadded:: 1.0.5 + + """ def begin(self, conn): """Intercept begin() events. @@ -1007,6 +1051,23 @@ class DialectEvents(event.Events): else: return target + def do_connect(self, dialect, conn_rec, cargs, cparams): + """Receive connection arguments before a connection is made. + + Return a DBAPI connection to halt further events from invoking; + the returned connection will be used. + + Alternatively, the event can manipulate the cargs and/or cparams + collections; cargs will always be a Python list that can be mutated + in-place and cparams a Python dictionary. Return None to + allow control to pass to the next event handler and ultimately + to allow the dialect to connect normally, given the updated + arguments. + + .. versionadded:: 1.0.3 + + """ + def do_executemany(self, cursor, statement, parameters, context): """Receive a cursor to have executemany() called. diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 9b27436b3..3a4f346e0 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -13,8 +13,6 @@ raised as a result of DBAPI exceptions are all subclasses of """ -import traceback - class SQLAlchemyError(Exception): """Generic error class.""" @@ -278,7 +276,8 @@ class DBAPIError(StatementError): @classmethod def instance(cls, statement, params, orig, dbapi_base_err, - connection_invalidated=False): + connection_invalidated=False, + dialect=None): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. if (isinstance(orig, BaseException) and @@ -300,6 +299,9 @@ class DBAPIError(StatementError): glob = globals() for super_ in orig.__class__.__mro__: name = super_.__name__ + if dialect: + name = dialect.dbapi_exception_translation_map.get( + name, name) if name in glob and issubclass(glob[name], DBAPIError): cls = glob[name] break diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index a74141973..d837aab52 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -365,13 +365,17 @@ class AssociationProxy(interfaces.InspectionAttrInfo): operators of the underlying proxied attributes. """ - - if self._value_is_scalar: - value_expr = getattr( - self.target_class, self.value_attr).has(criterion, **kwargs) + if self._target_is_object: + if self._value_is_scalar: + value_expr = getattr( + self.target_class, self.value_attr).has( + criterion, **kwargs) + else: + value_expr = getattr( + self.target_class, self.value_attr).any( + criterion, **kwargs) else: - value_expr = getattr( - self.target_class, self.value_attr).any(criterion, **kwargs) + value_expr = criterion # check _value_is_scalar here, otherwise # we're scalar->scalar - call .any() so that diff --git a/lib/sqlalchemy/ext/automap.py b/lib/sqlalchemy/ext/automap.py index ca550ded6..330992e56 100644 --- a/lib/sqlalchemy/ext/automap.py +++ b/lib/sqlalchemy/ext/automap.py @@ -11,12 +11,6 @@ schema, typically though not necessarily one which is reflected. .. versionadded:: 0.9.1 Added :mod:`sqlalchemy.ext.automap`. -.. note:: - - The :mod:`sqlalchemy.ext.automap` extension should be considered - **experimental** as of 0.9.1. Featureset and API stability is - not guaranteed at this time. - It is hoped that the :class:`.AutomapBase` system provides a quick and modernized solution to the problem that the very famous `SQLSoup <https://sqlsoup.readthedocs.org/en/latest/>`_ @@ -67,7 +61,7 @@ asking it to reflect the schema and produce mappings:: Above, calling :meth:`.AutomapBase.prepare` while passing along the :paramref:`.AutomapBase.prepare.reflect` parameter indicates that the :meth:`.MetaData.reflect` method will be called on this declarative base -classes' :class:`.MetaData` collection; then, each viable +classes' :class:`.MetaData` collection; then, each **viable** :class:`.Table` within the :class:`.MetaData` will get a new mapped class generated automatically. The :class:`.ForeignKeyConstraint` objects which link the various tables together will be used to produce new, bidirectional @@ -76,6 +70,12 @@ follow along a default naming scheme that we can customize. At this point, our basic mapping consisting of related ``User`` and ``Address`` classes is ready to use in the traditional way. +.. note:: By **viable**, we mean that for a table to be mapped, it must + specify a primary key. Additionally, if the table is detected as being + a pure association table between two other tables, it will not be directly + mapped and will instead be configured as a many-to-many table between + the mappings for the two referring tables. + Generating Mappings from an Existing MetaData ============================================= @@ -188,7 +188,7 @@ scheme for class names and a "pluralizer" for collection names using the "'words_and_underscores' -> 'WordsAndUnderscores'" return str(tablename[0].upper() + \\ - re.sub(r'_(\w)', lambda m: m.group(1).upper(), tablename[1:])) + re.sub(r'_([a-z])', lambda m: m.group(1).upper(), tablename[1:])) _pluralizer = inflect.engine() def pluralize_collection(base, local_cls, referred_cls, constraint): @@ -196,10 +196,9 @@ scheme for class names and a "pluralizer" for collection names using the "'SomeTerm' -> 'some_terms'" referred_name = referred_cls.__name__ - uncamelized = referred_name[0].lower() + \\ - re.sub(r'\W', - lambda m: "_%s" % m.group(0).lower(), - referred_name[1:]) + uncamelized = re.sub(r'[A-Z]', + lambda m: "_%s" % m.group(0).lower(), + referred_name)[1:] pluralized = _pluralizer.plural(uncamelized) return pluralized @@ -625,7 +624,7 @@ def generate_relationship( :param base: the :class:`.AutomapBase` class doing the prepare. :param direction: indicate the "direction" of the relationship; this will - be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOONE`. + be one of :data:`.ONETOMANY`, :data:`.MANYTOONE`, :data:`.MANYTOMANY`. :param return_fn: the function that is used by default to create the relationship. This will be either :func:`.relationship` or diff --git a/lib/sqlalchemy/ext/baked.py b/lib/sqlalchemy/ext/baked.py index 65d6a8603..f01e0b348 100644 --- a/lib/sqlalchemy/ext/baked.py +++ b/lib/sqlalchemy/ext/baked.py @@ -34,11 +34,8 @@ class BakedQuery(object): __slots__ = 'steps', '_bakery', '_cache_key', '_spoiled' def __init__(self, bakery, initial_fn, args=()): - if args: - self._cache_key = tuple(args) - else: - self._cache_key = () - self._update_cache_key(initial_fn) + self._cache_key = () + self._update_cache_key(initial_fn, args) self.steps = [initial_fn] self._spoiled = False self._bakery = bakery @@ -49,8 +46,8 @@ class BakedQuery(object): _bakery = util.LRUCache(size) - def call(initial_fn): - return cls(_bakery, initial_fn) + def call(initial_fn, *args): + return cls(_bakery, initial_fn, args) return call diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index 713ea0aba..3d46bd4cb 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -163,21 +163,16 @@ class declared_attr(interfaces._MappedAttribute, property): self._cascading = cascading def __get__(desc, self, cls): - # use the ClassManager for memoization of values. This is better than - # adding yet another attribute onto the class, or using weakrefs - # here which are slow and take up memory. It also allows us to - # warn for non-mapped use of declared_attr. - - manager = attributes.manager_of_class(cls) - if manager is None: - util.warn( - "Unmanaged access of declarative attribute %s from " - "non-mapped class %s" % - (desc.fget.__name__, cls.__name__)) + reg = cls.__dict__.get('_sa_declared_attr_reg', None) + if reg is None: + manager = attributes.manager_of_class(cls) + if manager is None: + util.warn( + "Unmanaged access of declarative attribute %s from " + "non-mapped class %s" % + (desc.fget.__name__, cls.__name__)) return desc.fget(cls) - reg = manager.info.get('declared_attr_reg', None) - if reg is None: return desc.fget(cls) elif desc in reg: diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py index 7d4020b24..57eb54f63 100644 --- a/lib/sqlalchemy/ext/declarative/base.py +++ b/lib/sqlalchemy/ext/declarative/base.py @@ -39,7 +39,7 @@ def _resolve_for_abstract(cls): if cls is object: return None - if _get_immediate_cls_attr(cls, '__abstract__'): + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): for sup in cls.__bases__: sup = _resolve_for_abstract(sup) if sup is not None: @@ -50,7 +50,7 @@ def _resolve_for_abstract(cls): return cls -def _get_immediate_cls_attr(cls, attrname): +def _get_immediate_cls_attr(cls, attrname, strict=False): """return an attribute of the class that is either present directly on the class, e.g. not on a superclass, or is from a superclass but this superclass is a mixin, that is, not a descendant of @@ -66,11 +66,12 @@ def _get_immediate_cls_attr(cls, attrname): for base in cls.__mro__: _is_declarative_inherits = hasattr(base, '_decl_class_registry') - if attrname in base.__dict__: - value = getattr(base, attrname) - if (base is cls or - (base in cls.__bases__ and not _is_declarative_inherits)): - return value + if attrname in base.__dict__ and ( + base is cls or + ((base in cls.__bases__ if strict else True) + and not _is_declarative_inherits) + ): + return getattr(base, attrname) else: return None @@ -81,7 +82,7 @@ def _as_declarative(cls, classname, dict_): from .api import declared_attr declarative_props = (declared_attr, util.classproperty) - if _get_immediate_cls_attr(cls, '__abstract__'): + if _get_immediate_cls_attr(cls, '__abstract__', strict=True): return _MapperConfig.setup_mapping(cls, classname, dict_) @@ -92,7 +93,7 @@ class _MapperConfig(object): @classmethod def setup_mapping(cls, cls_, classname, dict_): defer_map = _get_immediate_cls_attr( - cls_, '_sa_decl_prepare_nocascade') or \ + cls_, '_sa_decl_prepare_nocascade', strict=True) or \ hasattr(cls_, '_sa_decl_prepare') if defer_map: @@ -114,10 +115,10 @@ class _MapperConfig(object): self.column_copies = {} self._setup_declared_events() - # register up front, so that @declared_attr can memoize - # function evaluations in .info - manager = instrumentation.register_class(self.cls) - manager.info['declared_attr_reg'] = {} + # temporary registry. While early 1.0 versions + # set up the ClassManager here, by API contract + # we can't do that until there's a mapper. + self.cls._sa_declared_attr_reg = {} self._scan_attributes() @@ -158,7 +159,8 @@ class _MapperConfig(object): for base in cls.__mro__: class_mapped = base is not cls and \ _declared_mapping_info(base) is not None and \ - not _get_immediate_cls_attr(base, '_sa_decl_prepare_nocascade') + not _get_immediate_cls_attr( + base, '_sa_decl_prepare_nocascade', strict=True) if not class_mapped and base is not cls: self._produce_column_copies(base) @@ -412,7 +414,7 @@ class _MapperConfig(object): continue if _declared_mapping_info(c) is not None and \ not _get_immediate_cls_attr( - c, '_sa_decl_prepare_nocascade'): + c, '_sa_decl_prepare_nocascade', strict=True): self.inherits = c break else: @@ -527,7 +529,7 @@ class _MapperConfig(object): self.local_table, **self.mapper_args ) - del mp_.class_manager.info['declared_attr_reg'] + del self.cls._sa_declared_attr_reg return mp_ diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index f94c2079e..9c6178264 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -45,7 +45,7 @@ as the class itself:: return self.end - self.start @hybrid_method - def contains(self,point): + def contains(self, point): return (self.start <= point) & (point < self.end) @hybrid_method diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index 024136661..30a0ab7d7 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -166,7 +166,13 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory): def manager_of_class(self, cls): if cls is None: return None - return self._manager_finders.get(cls, _default_manager_getter)(cls) + try: + finder = self._manager_finders.get(cls, _default_manager_getter) + except TypeError: + # due to weakref lookup on invalid object + return None + else: + return finder(cls) def state_of(self, instance): if instance is None: @@ -392,6 +398,7 @@ def _reinstall_default_lookups(): manager_of_class=_default_manager_getter ) ) + _instrumentation_factory._extended = False def _install_lookups(lookups): diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index 24fc37a42..501b18f39 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -403,6 +403,27 @@ class MutableBase(object): raise ValueError(msg % (key, type(value))) @classmethod + def _get_listen_keys(cls, attribute): + """Given a descriptor attribute, return a ``set()`` of the attribute + keys which indicate a change in the state of this attribute. + + This is normally just ``set([attribute.key])``, but can be overridden + to provide for additional keys. E.g. a :class:`.MutableComposite` + augments this set with the attribute keys associated with the columns + that comprise the composite value. + + This collection is consulted in the case of intercepting the + :meth:`.InstanceEvents.refresh` and + :meth:`.InstanceEvents.refresh_flush` events, which pass along a list + of attribute names that have been refreshed; the list is compared + against this set to determine if action needs to be taken. + + .. versionadded:: 1.0.5 + + """ + return set([attribute.key]) + + @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): """Establish this type as a mutation listener for the given mapped descriptor. @@ -415,6 +436,8 @@ class MutableBase(object): # rely on "propagate" here parent_cls = attribute.class_ + listen_keys = cls._get_listen_keys(attribute) + def load(state, *args): """Listen for objects loaded or refreshed. @@ -429,6 +452,10 @@ class MutableBase(object): state.dict[key] = val val._parents[state.obj()] = key + def load_attrs(state, ctx, attrs): + if not attrs or listen_keys.intersection(attrs): + load(state) + def set(target, value, oldvalue, initiator): """Listen for set/replace events on the target data member. @@ -463,7 +490,9 @@ class MutableBase(object): event.listen(parent_cls, 'load', load, raw=True, propagate=True) - event.listen(parent_cls, 'refresh', load, + event.listen(parent_cls, 'refresh', load_attrs, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh_flush', load_attrs, raw=True, propagate=True) event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True) @@ -574,6 +603,10 @@ class MutableComposite(MutableBase): """ + @classmethod + def _get_listen_keys(cls, attribute): + return set([attribute.key]).union(attribute.property._attribute_keys) + def changed(self): """Subclasses should call this method whenever change events occur.""" diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 41803c8bf..a45c22394 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -619,7 +619,7 @@ class AttributeImpl(object): if self.key in state.committed_state: value = state.committed_state[self.key] - if value is NO_VALUE: + if value in (NO_VALUE, NEVER_SET): return None else: return value diff --git a/lib/sqlalchemy/orm/base.py b/lib/sqlalchemy/orm/base.py index c259878f0..785bd09dd 100644 --- a/lib/sqlalchemy/orm/base.py +++ b/lib/sqlalchemy/orm/base.py @@ -181,6 +181,8 @@ NOT_EXTENSION = util.symbol( """) +_never_set = frozenset([NEVER_SET]) + _none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT]) _SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED") diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 233cd66a6..801701be9 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -272,12 +272,35 @@ class InstanceEvents(event.Events): object associated with the instance. :param context: the :class:`.QueryContext` corresponding to the current :class:`.Query` in progress. - :param attrs: iterable collection of attribute names which + :param attrs: sequence of attribute names which were populated, or None if all column-mapped, non-deferred attributes were populated. """ + def refresh_flush(self, target, flush_context, attrs): + """Receive an object instance after one or more attributes have + been refreshed within the persistence of the object. + + This event is the same as :meth:`.InstanceEvents.refresh` except + it is invoked within the unit of work flush process, and the values + here typically come from the process of handling an INSERT or + UPDATE, such as via the RETURNING clause or from Python-side default + values. + + .. versionadded:: 1.0.5 + + :param target: the mapped instance. If + the event is configured with ``raw=True``, this will + instead be the :class:`.InstanceState` state-management + object associated with the instance. + :param flush_context: Internal :class:`.UOWTransaction` object + which handles the details of the flush. + :param attrs: sequence of attribute names which + were populated. + + """ + def expire(self, target, attrs): """Receive an object instance after its attributes or some subset have been expired. @@ -289,7 +312,7 @@ class InstanceEvents(event.Events): the event is configured with ``raw=True``, this will instead be the :class:`.InstanceState` state-management object associated with the instance. - :param attrs: iterable collection of attribute + :param attrs: sequence of attribute names which were expired, or None if all attributes were expired. diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 6cc613baa..cd4a0116d 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -338,7 +338,7 @@ class PropComparator(operators.ColumnOperators): def __init__(self, prop, parentmapper, adapt_to_entity=None): self.prop = self.property = prop - self._parententity = parentmapper + self._parententity = adapt_to_entity or parentmapper self._adapt_to_entity = adapt_to_entity def __clause_element__(self): diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index 50afaf601..b81e98a58 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -17,6 +17,8 @@ from __future__ import absolute_import from .. import util from . import attributes, exc as orm_exc from ..sql import util as sql_util +from . import strategy_options + from .util import _none_set, state_str from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE from .. import exc as sa_exc @@ -612,10 +614,17 @@ def load_scalar_attributes(mapper, state, attribute_names): result = False if mapper.inherits and not mapper.concrete: + # because we are using Core to produce a select() that we + # pass to the Query, we aren't calling setup() for mapped + # attributes; in 1.0 this means deferred attrs won't get loaded + # by default statement = mapper._optimized_get_statement(state, attribute_names) if statement is not None: result = load_on_ident( - session.query(mapper).from_statement(statement), + session.query(mapper). + options( + strategy_options.Load(mapper).undefer("*") + ).from_statement(statement), None, only_load_props=attribute_names, refresh_state=state diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index 4554f78f9..48fbaae32 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -1096,8 +1096,6 @@ class Mapper(InspectionAttr): """ - # when using declarative as of 1.0, the register_class has - # already happened from within declarative. manager = attributes.manager_of_class(self.class_) if self.non_primary: @@ -1120,14 +1118,20 @@ class Mapper(InspectionAttr): "create a non primary Mapper. clear_mappers() will " "remove *all* current mappers from all classes." % self.class_) - - if manager is None: - manager = instrumentation.register_class(self.class_) + # else: + # a ClassManager may already exist as + # ClassManager.instrument_attribute() creates + # new managers for each subclass if they don't yet exist. _mapper_registry[self] = True + # note: this *must be called before instrumentation.register_class* + # to maintain the documented behavior of instrument_class self.dispatch.instrument_class(self, self.class_) + if manager is None: + manager = instrumentation.register_class(self.class_) + self.class_manager = manager manager.mapper = self @@ -2034,6 +2038,17 @@ class Mapper(InspectionAttr): returned, inclding :attr:`.synonyms`, :attr:`.column_attrs`, :attr:`.relationships`, and :attr:`.composites`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. seealso:: :attr:`.Mapper.all_orm_descriptors` @@ -2069,6 +2084,17 @@ class Mapper(InspectionAttr): referring to the collection of mapped properties via :attr:`.Mapper.attrs`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. versionadded:: 0.8.0 .. seealso:: @@ -2110,6 +2136,17 @@ class Mapper(InspectionAttr): """Return a namespace of all :class:`.RelationshipProperty` properties maintained by this :class:`.Mapper`. + .. warning:: + + the :attr:`.Mapper.relationships` accessor namespace is an + instance of :class:`.OrderedProperties`. This is + a dictionary-like object which includes a small number of + named methods such as :meth:`.OrderedProperties.items` + and :meth:`.OrderedProperties.values`. When + accessing attributes dynamically, favor using the dict-access + scheme, e.g. ``mapper.attrs[somename]`` over + ``getattr(mapper.attrs, somename)`` to avoid name collisions. + .. seealso:: :attr:`.Mapper.attrs` - namespace of all :class:`.MapperProperty` @@ -2377,15 +2414,15 @@ class Mapper(InspectionAttr): """ state = attributes.instance_state(instance) - return self._primary_key_from_state(state) + return self._primary_key_from_state(state, attributes.PASSIVE_OFF) - def _primary_key_from_state(self, state): + def _primary_key_from_state( + self, state, passive=attributes.PASSIVE_RETURN_NEVER_SET): dict_ = state.dict manager = state.manager return [ manager[prop.key]. - impl.get(state, dict_, - attributes.PASSIVE_RETURN_NEVER_SET) + impl.get(state, dict_, passive) for prop in self._identity_key_props ] @@ -2428,7 +2465,8 @@ class Mapper(InspectionAttr): def _get_committed_attr_by_column(self, obj, column): state = attributes.instance_state(obj) dict_ = attributes.instance_dict(obj) - return self._get_committed_state_attr_by_column(state, dict_, column) + return self._get_committed_state_attr_by_column( + state, dict_, column, passive=attributes.PASSIVE_OFF) def _get_committed_state_attr_by_column( self, state, dict_, column, diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index ff5dda7b3..0bfee2ece 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -443,7 +443,8 @@ def _collect_update_commands( params = dict( (propkey_to_col[propkey].key, state_dict[propkey]) for propkey in - set(propkey_to_col).intersection(state_dict) + set(propkey_to_col).intersection(state_dict).difference( + mapper._pk_keys_by_table[table]) ) else: params = {} @@ -452,15 +453,33 @@ def _collect_update_commands( value = state_dict[propkey] col = propkey_to_col[propkey] - if not state.manager[propkey].impl.is_equal( - value, state.committed_state[propkey]): - if isinstance(value, sql.ClauseElement): - value_params[col] = value - else: - params[col.key] = value + if isinstance(value, sql.ClauseElement): + value_params[col] = value + # guard against values that generate non-__nonzero__ + # objects for __eq__() + elif state.manager[propkey].impl.is_equal( + value, state.committed_state[propkey]) is not True: + params[col.key] = value if update_version_id is not None and \ mapper.version_id_col in mapper._cols_by_table[table]: + + if not bulk and not (params or value_params): + # HACK: check for history in other tables, in case the + # history is only in a different table than the one + # where the version_id_col is. This logic was lost + # from 0.9 -> 1.0.0 and restored in 1.0.6. + for prop in mapper._columntoproperty.values(): + history = ( + state.manager[prop.key].impl.get_history( + state, state_dict, + attributes.PASSIVE_NO_INITIALIZE)) + if history.added: + break + else: + # no net change, break + continue + col = mapper.version_id_col params[col._label] = update_version_id @@ -469,7 +488,7 @@ def _collect_update_commands( val = mapper.version_id_generator(update_version_id) params[col.key] = val - if not (params or value_params): + elif not (params or value_params): continue if bulk: @@ -531,7 +550,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, params[col._label] = \ mapper._get_state_attr_by_column( state, - state_dict, col) + state_dict, col, passive=attributes.PASSIVE_OFF) elif col in post_update_cols: prop = mapper._columntoproperty[col] @@ -951,6 +970,10 @@ def _postfetch(mapper, uowtransaction, table, mapper.version_id_col in mapper._cols_by_table[table]: prefetch_cols = list(prefetch_cols) + [mapper.version_id_col] + refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush) + if refresh_flush: + load_evt_attrs = [] + if returning_cols: row = result.context.returned_defaults if row is not None: @@ -958,10 +981,18 @@ def _postfetch(mapper, uowtransaction, table, if col.primary_key: continue dict_[mapper._columntoproperty[col].key] = row[col] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[col].key) for c in prefetch_cols: if c.key in params and c in mapper._columntoproperty: dict_[mapper._columntoproperty[c].key] = params[c.key] + if refresh_flush: + load_evt_attrs.append(mapper._columntoproperty[c].key) + + if refresh_flush and load_evt_attrs: + mapper.class_manager.dispatch.refresh_flush( + state, uowtransaction, load_evt_attrs) if postfetch_cols: state._expire_attributes(state.dict, @@ -1035,18 +1066,18 @@ class BulkUD(object): self._validate_query_state() def _validate_query_state(self): - for attr, methname, notset in ( - ('_limit', 'limit()', None), - ('_offset', 'offset()', None), - ('_order_by', 'order_by()', False), - ('_group_by', 'group_by()', False), - ('_distinct', 'distinct()', False), + for attr, methname, notset, op in ( + ('_limit', 'limit()', None, operator.is_), + ('_offset', 'offset()', None, operator.is_), + ('_order_by', 'order_by()', False, operator.is_), + ('_group_by', 'group_by()', False, operator.is_), + ('_distinct', 'distinct()', False, operator.is_), ( '_from_obj', 'join(), outerjoin(), select_from(), or from_self()', - ()) + (), operator.eq) ): - if getattr(self.query, attr) is not notset: + if not op(getattr(self.query, attr), notset): raise sa_exc.InvalidRequestError( "Can't call Query.update() or Query.delete() " "when %s has been called" % @@ -1170,17 +1201,18 @@ class BulkFetch(BulkUD): class BulkUpdate(BulkUD): """BulkUD which handles UPDATEs.""" - def __init__(self, query, values): + def __init__(self, query, values, update_kwargs): super(BulkUpdate, self).__init__(query) self.values = values + self.update_kwargs = update_kwargs @classmethod - def factory(cls, query, synchronize_session, values): + def factory(cls, query, synchronize_session, values, update_kwargs): return BulkUD._factory({ "evaluate": BulkUpdateEvaluate, "fetch": BulkUpdateFetch, False: BulkUpdate - }, synchronize_session, query, values) + }, synchronize_session, query, values, update_kwargs) def _resolve_string_to_expr(self, key): if self.mapper and isinstance(key, util.string_types): @@ -1215,7 +1247,8 @@ class BulkUpdate(BulkUD): for k, v in self.values.items() ) update_stmt = sql.update(self.primary_table, - self.context.whereclause, values) + self.context.whereclause, values, + **self.update_kwargs) self.result = self.query.session.execute( update_stmt, params=self.query._params, diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 5694f7255..55e02984b 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -245,6 +245,8 @@ class ColumnProperty(StrategizedProperty): if self.adapter: return self.adapter(self.prop.columns[0]) else: + # no adapter, so we aren't aliased + # assert self._parententity is self._parentmapper return self.prop.columns[0]._annotate({ "parententity": self._parententity, "parentmapper": self._parententity}) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 9aa2e3d99..8b3df08e7 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -756,7 +756,8 @@ class Query(object): """ self._yield_per = count self._execution_options = self._execution_options.union( - {"stream_results": True}) + {"stream_results": True, + "max_row_buffer": count}) def get(self, ident): """Return an instance based on the given primary key identifier, @@ -1815,7 +1816,8 @@ class Query(object): # convert to a tuple. keys = (keys,) - for arg1 in util.to_list(keys): + keylist = util.to_list(keys) + for idx, arg1 in enumerate(keylist): if isinstance(arg1, tuple): # "tuple" form of join, multiple # tuples are accepted as well. The simpler @@ -1894,6 +1896,11 @@ class Query(object): jp = self._joinpoint[edge].copy() jp['prev'] = (edge, self._joinpoint) self._update_joinpoint(jp) + + if idx == len(keylist) - 1: + util.warn( + "Pathed join target %s has already " + "been joined to; skipping" % prop) continue elif onclause is not None and right_entity is None: @@ -2522,7 +2529,7 @@ class Query(object): close_with_result=True) result = conn.execute(querycontext.statement, self._params) - return loading.instances(self, result, querycontext) + return loading.instances(querycontext.query, result, querycontext) @property def column_descriptions(self): @@ -2563,17 +2570,27 @@ class Query(object): ] """ + return [ { 'name': ent._label_name, 'type': ent.type, - 'aliased': getattr(ent, 'is_aliased_class', False), + 'aliased': getattr(insp_ent, 'is_aliased_class', False), 'expr': ent.expr, 'entity': - ent.entity_zero.entity if ent.entity_zero is not None + getattr(insp_ent, "entity", None) + if ent.entity_zero is not None + and not insp_ent.is_clause_element else None } - for ent in self._entities + for ent, insp_ent in [ + ( + _ent, + (inspect(_ent.entity_zero) + if _ent.entity_zero is not None else None) + ) + for _ent in self._entities + ] ] def instances(self, cursor, __context=None): @@ -2811,7 +2828,7 @@ class Query(object): delete_op.exec_() return delete_op.rowcount - def update(self, values, synchronize_session='evaluate'): + def update(self, values, synchronize_session='evaluate', update_args=None): """Perform a bulk update query. Updates rows matched by this query in the database. @@ -2860,6 +2877,13 @@ class Query(object): The expression evaluator currently doesn't account for differing string collations between the database and Python. + :param update_args: Optional dictionary, if present will be passed + to the underlying :func:`.update` construct as the ``**kw`` for + the object. May be used to pass dialect-specific arguments such + as ``mysql_limit``. + + .. versionadded:: 1.0.0 + :return: the count of rows matched as returned by the database's "row count" feature. @@ -2920,8 +2944,9 @@ class Query(object): """ + update_args = update_args or {} update_op = persistence.BulkUpdate.factory( - self, synchronize_session, values) + self, synchronize_session, values, update_args) update_op.exec_() return update_op.rowcount @@ -3515,11 +3540,13 @@ class _ColumnEntity(_QueryEntity): self.expr = column self.namespace = namespace search_entities = True + check_column = False if isinstance(column, util.string_types): column = sql.literal_column(column) self._label_name = column.name search_entities = False + check_column = True _entity = None elif isinstance(column, ( attributes.QueryableAttribute, @@ -3530,10 +3557,12 @@ class _ColumnEntity(_QueryEntity): search_entities = False self._label_name = column.key column = column._query_clause_element() + check_column = True if isinstance(column, Bundle): _BundleEntity(query, column) return - elif not isinstance(column, sql.ColumnElement): + + if not isinstance(column, sql.ColumnElement): if hasattr(column, '_select_iterable'): # break out an object like Table into # individual columns @@ -3548,7 +3577,7 @@ class _ColumnEntity(_QueryEntity): "SQL expression, column, or mapped entity " "expected - got '%r'" % (column, ) ) - else: + elif not check_column: self._label_name = getattr(column, 'key', None) search_entities = True @@ -3606,7 +3635,6 @@ class _ColumnEntity(_QueryEntity): if 'parententity' in elem._annotations and actual_froms.intersection(elem._from_objects) ]) - if self.entities: self.entity_zero = self.entities[0] elif self.namespace is not None: diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index e36a644da..da0730f46 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -195,7 +195,7 @@ class RelationshipProperty(StrategizedProperty): The :paramref:`~.relationship.secondary` keyword argument is typically applied in the case where the intermediary :class:`.Table` - is not otherwise exprssed in any direct class mapping. If the + is not otherwise expressed in any direct class mapping. If the "secondary" table is also explicitly mapped elsewhere (e.g. as in :ref:`association_pattern`), one should consider applying the :paramref:`~.relationship.viewonly` flag so that this @@ -1233,11 +1233,15 @@ class RelationshipProperty(StrategizedProperty): state = attributes.instance_state(other) def state_bindparam(x, state, col): - o = state.obj() # strong ref + dict_ = state.dict return sql.bindparam( - x, unique=True, callable_=lambda: - self.property.mapper. - _get_committed_attr_by_column(o, col)) + x, unique=True, + callable_=self.property._get_attr_w_warn_on_none( + col, + self.property.mapper._get_state_attr_by_column, + state, dict_, col, passive=attributes.PASSIVE_OFF + ) + ) def adapt(col): if self.adapter: @@ -1252,13 +1256,14 @@ class RelationshipProperty(StrategizedProperty): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x == y for (x, y) in - zip( - self.property.mapper.primary_key, - self.property. - mapper. - primary_key_from_instance(other)) + criterion = sql.and_(*[ + x == y for (x, y) in + zip( + self.property.mapper.primary_key, + self.property.mapper.primary_key_from_instance(other) + ) ]) + return ~self._criterion_exists(criterion) def __ne__(self, other): @@ -1357,10 +1362,12 @@ class RelationshipProperty(StrategizedProperty): def visit_bindparam(bindparam): if bindparam._identifying_key in bind_to_col: - bindparam.callable = \ - lambda: mapper._get_state_attr_by_column( - state, dict_, - bind_to_col[bindparam._identifying_key]) + bindparam.callable = self._get_attr_w_warn_on_none( + bind_to_col[bindparam._identifying_key], + mapper._get_state_attr_by_column, + state, dict_, + bind_to_col[bindparam._identifying_key], + passive=attributes.PASSIVE_OFF) if self.secondary is not None and alias_secondary: criterion = ClauseAdapter( @@ -1374,6 +1381,18 @@ class RelationshipProperty(StrategizedProperty): criterion = adapt_source(criterion) return criterion + def _get_attr_w_warn_on_none(self, column, fn, *arg, **kw): + def _go(): + value = fn(*arg, **kw) + if value is None: + util.warn( + "Got None for value of column %s; this is unsupported " + "for a relationship comparison and will not " + "currently produce an IS comparison " + "(but may in a future release)" % column) + return value + return _go + def _lazy_none_clause(self, reverse_direction=False, adapt_source=None): if not reverse_direction: criterion, bind_to_col = \ @@ -2329,12 +2348,21 @@ class JoinCondition(object): binary.right, binary.left = proc_left_right(binary.right, binary.left) + check_entities = self.prop is not None and \ + self.prop.mapper is not self.prop.parent + def proc_left_right(left, right): if isinstance(left, expression.ColumnClause) and \ isinstance(right, expression.ColumnClause): if self.child_selectable.c.contains_column(right) and \ self.parent_selectable.c.contains_column(left): right = right._annotate({"remote": True}) + elif check_entities and \ + right._annotations.get('parentmapper') is self.prop.mapper: + right = right._annotate({"remote": True}) + elif check_entities and \ + left._annotations.get('parentmapper') is self.prop.mapper: + left = left._annotate({"remote": True}) else: self._warn_non_column_elements() diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index c03e133de..78e929345 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -456,15 +456,18 @@ class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): o = state.obj() # strong ref dict_ = attributes.instance_dict(o) + if passive & attributes.INIT_OK: + passive ^= attributes.INIT_OK + params = {} for key, ident, value in param_keys: if ident is not None: if passive and passive & attributes.LOAD_AGAINST_COMMITTED: value = mapper._get_committed_state_attr_by_column( - state, dict_, ident) + state, dict_, ident, passive) else: value = mapper._get_state_attr_by_column( - state, dict_, ident) + state, dict_, ident, passive) params[key] = value @@ -583,7 +586,11 @@ class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): lazy_clause, params = self._generate_lazy_clause( state, passive=passive) - if pending and orm_util._none_set.intersection(params.values()): + if pending: + if util.has_intersection( + orm_util._none_set, params.values()): + return None + elif util.has_intersection(orm_util._never_set, params.values()): return None q = q.filter(lazy_clause).params(params) diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index e9a745cc0..e8e273a86 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -85,7 +85,7 @@ def update(source, source_mapper, dest, old_prefix, synchronize_pairs): oldvalue = source_mapper._get_committed_attr_by_column( source.obj(), l) value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) dest[r.key] = value @@ -96,7 +96,7 @@ def populate_dict(source, source_mapper, dict_, synchronize_pairs): for l, r in synchronize_pairs: try: value = source_mapper._get_state_attr_by_column( - source, source.dict, l) + source, source.dict, l, passive=attributes.PASSIVE_OFF) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index b3f3bc5fa..6d3869679 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -13,7 +13,7 @@ from . import attributes import re from .base import instance_str, state_str, state_class_str, attribute_str, \ - state_attribute_str, object_mapper, object_state, _none_set + state_attribute_str, object_mapper, object_state, _none_set, _never_set from .base import class_mapper, _class_to_mapper from .base import InspectionAttr from .path_registry import PathRegistry @@ -530,7 +530,7 @@ class AliasedInsp(InspectionAttr): def _adapt_element(self, elem): return self._adapter.traverse(elem).\ _annotate({ - 'parententity': self.entity, + 'parententity': self, 'parentmapper': self.mapper} ) @@ -839,9 +839,10 @@ class _ORMJoin(expression.Join): # or implicit ON clause, augment it the same way we'd augment the # WHERE. single_crit = right_info.mapper._single_table_criterion - if right_info.is_aliased_class: - single_crit = right_info._adapter.traverse(single_crit) - self.onclause = self.onclause & single_crit + if single_crit is not None: + if right_info.is_aliased_class: + single_crit = right_info._adapter.traverse(single_crit) + self.onclause = self.onclause & single_crit def _splice_into_center(self, other): """Splice a join into the center. @@ -849,7 +850,11 @@ class _ORMJoin(expression.Join): Given join(a, b) and join(b, c), return join(a, b).join(c) """ - assert self.right is other.left + leftmost = other + while isinstance(leftmost, sql.Join): + leftmost = leftmost.left + + assert self.right is leftmost left = _ORMJoin( self.left, other.left, diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index ccb4f1e6a..b38aefb3d 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -186,6 +186,10 @@ class Pool(log.Identified): database that supports transactions, as it will lead to deadlocks and stale state. + * ``"none"`` - same as ``None`` + + .. versionadded:: 0.9.10 + * ``False`` - same as None, this is here for backwards compatibility. @@ -220,7 +224,7 @@ class Pool(log.Identified): self._use_threadlocal = use_threadlocal if reset_on_return in ('rollback', True, reset_rollback): self._reset_on_return = reset_rollback - elif reset_on_return in (None, False, reset_none): + elif reset_on_return in ('none', None, False, reset_none): self._reset_on_return = reset_none elif reset_on_return in ('commit', reset_commit): self._reset_on_return = reset_commit @@ -245,6 +249,41 @@ class Pool(log.Identified): for l in listeners: self.add_listener(l) + @property + def _creator(self): + return self.__dict__['_creator'] + + @_creator.setter + def _creator(self, creator): + self.__dict__['_creator'] = creator + self._invoke_creator = self._should_wrap_creator(creator) + + def _should_wrap_creator(self, creator): + """Detect if creator accepts a single argument, or is sent + as a legacy style no-arg function. + + """ + + try: + argspec = util.get_callable_argspec(self._creator, no_self=True) + except TypeError: + return lambda crec: creator() + + defaulted = argspec[3] is not None and len(argspec[3]) or 0 + positionals = len(argspec[0]) - defaulted + + # look for the exact arg signature that DefaultStrategy + # sends us + if (argspec[0], argspec[3]) == (['connection_record'], (None,)): + return creator + # or just a single positional + elif positionals == 1: + return creator + # all other cases, just wrap and assume legacy "creator" callable + # thing + else: + return lambda crec: creator() + def _close_connection(self, connection): self.logger.debug("Closing connection %r", connection) try: @@ -424,6 +463,8 @@ class _ConnectionRecord(object): """ + _soft_invalidate_time = 0 + @util.memoized_property def info(self): """The ``.info`` dictionary associated with the DBAPI connection. @@ -472,7 +513,7 @@ class _ConnectionRecord(object): if self.connection is not None: self.__close() - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Invalidate the DBAPI connection held by this :class:`._ConnectionRecord`. This method is called for all connection invalidations, including @@ -480,6 +521,13 @@ class _ConnectionRecord(object): :meth:`.Connection.invalidate` methods are called, as well as when any so-called "automatic invalidation" condition occurs. + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + .. seealso:: :ref:`pool_connection_invalidation` @@ -488,22 +536,31 @@ class _ConnectionRecord(object): # already invalidated if self.connection is None: return - self.__pool.dispatch.invalidate(self.connection, self, e) + if soft: + self.__pool.dispatch.soft_invalidate(self.connection, self, e) + else: + self.__pool.dispatch.invalidate(self.connection, self, e) if e is not None: self.__pool.logger.info( - "Invalidate connection %r (reason: %s:%s)", + "%sInvalidate connection %r (reason: %s:%s)", + "Soft " if soft else "", self.connection, e.__class__.__name__, e) else: self.__pool.logger.info( - "Invalidate connection %r", self.connection) - self.__close() - self.connection = None + "%sInvalidate connection %r", + "Soft " if soft else "", + self.connection) + if soft: + self._soft_invalidate_time = time.time() + else: + self.__close() + self.connection = None def get_connection(self): recycle = False if self.connection is None: - self.connection = self.__connect() self.info.clear() + self.connection = self.__connect() if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) elif self.__pool._recycle > -1 and \ @@ -519,11 +576,18 @@ class _ConnectionRecord(object): self.connection ) recycle = True + elif self._soft_invalidate_time > self.starttime: + self.__pool.logger.info( + "Connection %r invalidated due to local soft invalidation; " + + "recycling", + self.connection + ) + recycle = True if recycle: self.__close() - self.connection = self.__connect() self.info.clear() + self.connection = self.__connect() if self.__pool.dispatch.connect: self.__pool.dispatch.connect(self.connection, self) return self.connection @@ -535,7 +599,7 @@ class _ConnectionRecord(object): def __connect(self): try: self.starttime = time.time() - connection = self.__pool._creator() + connection = self.__pool._invoke_creator(self) self.__pool.logger.debug("Created new connection %r", connection) return connection except Exception as e: @@ -668,7 +732,13 @@ class _ConnectionFairy(object): pool.logger.info( "Disconnection detected on checkout: %s", e) fairy._connection_record.invalidate(e) - fairy.connection = fairy._connection_record.get_connection() + try: + fairy.connection = \ + fairy._connection_record.get_connection() + except: + with util.safe_reraise(): + fairy._connection_record.checkin() + attempts -= 1 pool.logger.info("Reconnection attempts exhausted on checkout") @@ -736,7 +806,7 @@ class _ConnectionFairy(object): """ return self._connection_record.info - def invalidate(self, e=None): + def invalidate(self, e=None, soft=False): """Mark this connection as invalidated. This method can be called directly, and is also called as a result @@ -745,6 +815,13 @@ class _ConnectionFairy(object): further use by the pool. The invalidation mechanism proceeds via the :meth:`._ConnectionRecord.invalidate` internal method. + :param e: an exception object indicating a reason for the invalidation. + + :param soft: if True, the connection isn't closed; instead, this + connection will be recycled on next checkout. + + .. versionadded:: 1.0.3 + .. seealso:: :ref:`pool_connection_invalidation` @@ -755,9 +832,10 @@ class _ConnectionFairy(object): util.warn("Can't invalidate an already-closed connection.") return if self._connection_record: - self._connection_record.invalidate(e=e) - self.connection = None - self._checkin() + self._connection_record.invalidate(e=e, soft=soft) + if not soft: + self.connection = None + self._checkin() def cursor(self, *args, **kwargs): """Return a new DBAPI cursor for the underlying connection. @@ -806,6 +884,19 @@ class SingletonThreadPool(Pool): Maintains one connection per each thread, never moving a connection to a thread other than the one which it was created in. + .. warning:: the :class:`.SingletonThreadPool` will call ``.close()`` + on arbitrary connections that exist beyond the size setting of + ``pool_size``, e.g. if more unique **thread identities** + than what ``pool_size`` states are used. This cleanup is + non-deterministic and not sensitive to whether or not the connections + linked to those thread identities are currently in use. + + :class:`.SingletonThreadPool` may be improved in a future release, + however in its current status it is generally used only for test + scenarios using a SQLite ``:memory:`` database and is not recommended + for production use. + + Options are the same as those of :class:`.Pool`, as well as: :param pool_size: The number of threads in which to maintain connections diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 755193552..e9c3d0efa 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -1133,7 +1133,7 @@ class SQLCompiler(Compiled): anonname = name.apply_map(self.anon_map) - if len(anonname) > self.label_length: + if len(anonname) > self.label_length - 6: counter = self.truncated_names.get(ident_class, 1) truncname = anonname[0:max(self.label_length - 6, 0)] + \ "_" + hex(counter)[2:] @@ -1324,10 +1324,17 @@ class SQLCompiler(Compiled): result_expr = _CompileLabel(col_expr, elements._as_truncated(column.name), alt_names=(column.key,)) - elif not isinstance(column, - (elements.UnaryExpression, elements.TextClause)) \ - and (not hasattr(column, 'name') or - isinstance(column, functions.Function)): + elif ( + not isinstance(column, elements.TextClause) and + ( + not isinstance(column, elements.UnaryExpression) or + column.wraps_column_expression + ) and + ( + not hasattr(column, 'name') or + isinstance(column, functions.Function) + ) + ): result_expr = _CompileLabel(col_expr, column.anon_label) elif col_expr is not column: # TODO: are we sure "column" has a .name and .key here ? @@ -1528,6 +1535,12 @@ class SQLCompiler(Compiled): 'need_result_map_for_compound', False) ) or entry.get('need_result_map_for_nested', False) + # this was first proposed as part of #3372; however, it is not + # reached in current tests and could possibly be an assertion + # instead. + if not populate_result_map and 'add_to_result_map' in kwargs: + del kwargs['add_to_result_map'] + if needs_nested_translation: if populate_result_map: self._transform_result_map_for_nested_joins( @@ -1555,7 +1568,7 @@ class SQLCompiler(Compiled): text += self._generate_prefixes( select, select._prefixes, **kwargs) - text += self.get_select_precolumns(select) + text += self.get_select_precolumns(select, **kwargs) # the actual list of columns to print in the SELECT column list. inner_columns = [ @@ -1600,7 +1613,7 @@ class SQLCompiler(Compiled): if per_dialect: text += " " + self.get_statement_hint_text(per_dialect) - if self.ctes and toplevel: + if self.ctes and self._is_toplevel_select(select): text = self._render_cte_clause() + text if select._suffixes: @@ -1614,6 +1627,20 @@ class SQLCompiler(Compiled): else: return text + def _is_toplevel_select(self, select): + """Return True if the stack is placed at the given select, and + is also the outermost SELECT, meaning there is either no stack + before this one, or the enclosing stack is a topmost INSERT. + + """ + return ( + self.stack[-1]['selectable'] is select and + ( + len(self.stack) == 1 or self.isinsert and len(self.stack) == 2 + and self.statement is self.stack[0]['selectable'] + ) + ) + def _setup_select_hints(self, select): byfrom = dict([ (from_, hinttext % { @@ -1729,7 +1756,7 @@ class SQLCompiler(Compiled): else: return "WITH" - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): """Called when building a ``SELECT`` statement, position is just before column list. diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 3834f25f4..71018f132 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -711,8 +711,11 @@ class SchemaGenerator(DDLBase): seq_coll = [s for s in metadata._sequences.values() if s.column is None and self._can_create_sequence(s)] + event_collection = [ + t for (t, fks) in collection if t is not None + ] metadata.dispatch.before_create(metadata, self.connection, - tables=collection, + tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) @@ -730,7 +733,7 @@ class SchemaGenerator(DDLBase): self.traverse_single(fkc) metadata.dispatch.after_create(metadata, self.connection, - tables=collection, + tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) @@ -803,32 +806,50 @@ class SchemaDropper(DDLBase): tables = list(metadata.tables.values()) try: - collection = reversed( + unsorted_tables = [t for t in tables if self._can_drop_table(t)] + collection = list(reversed( sort_tables_and_constraints( - [t for t in tables if self._can_drop_table(t)], - filter_fn= - lambda constraint: True if not self.dialect.supports_alter - else False if constraint.name is None + unsorted_tables, + filter_fn=lambda constraint: False + if not self.dialect.supports_alter + or constraint.name is None else None ) - ) + )) except exc.CircularDependencyError as err2: - util.raise_from_cause( - exc.CircularDependencyError( - err2.args[0], - err2.cycles, err2.edges, - msg="Can't sort tables for DROP; an " + if not self.dialect.supports_alter: + util.warn( + "Can't sort tables for DROP; an " "unresolvable foreign key " - "dependency exists between tables: %s. Please ensure " - "that the ForeignKey and ForeignKeyConstraint objects " - "involved in the cycle have " - "names so that they can be dropped using DROP CONSTRAINT." + "dependency exists between tables: %s, and backend does " + "not support ALTER. To restore at least a partial sort, " + "apply use_alter=True to ForeignKey and " + "ForeignKeyConstraint " + "objects involved in the cycle to mark these as known " + "cycles that will be ignored." % ( ", ".join(sorted([t.fullname for t in err2.cycles])) ) + ) + collection = [(t, ()) for t in unsorted_tables] + else: + util.raise_from_cause( + exc.CircularDependencyError( + err2.args[0], + err2.cycles, err2.edges, + msg="Can't sort tables for DROP; an " + "unresolvable foreign key " + "dependency exists between tables: %s. Please ensure " + "that the ForeignKey and ForeignKeyConstraint objects " + "involved in the cycle have " + "names so that they can be dropped using " + "DROP CONSTRAINT." + % ( + ", ".join(sorted([t.fullname for t in err2.cycles])) + ) + ) ) - ) seq_coll = [ s @@ -836,8 +857,12 @@ class SchemaDropper(DDLBase): if s.column is None and self._can_drop_sequence(s) ] + event_collection = [ + t for (t, fks) in collection if t is not None + ] + metadata.dispatch.before_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) for table, fkcs in collection: @@ -852,7 +877,7 @@ class SchemaDropper(DDLBase): self.traverse_single(seq, drop_ok=True) metadata.dispatch.after_drop( - metadata, self.connection, tables=collection, + metadata, self.connection, tables=event_collection, checkfirst=self.checkfirst, _ddl_runner=self) def _can_drop_table(self, table): @@ -1041,7 +1066,8 @@ def sort_tables_and_constraints( try: candidate_sort = list( topological.sort( - fixed_dependencies.union(mutable_dependencies), tables + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True ) ) except exc.CircularDependencyError as err: @@ -1058,7 +1084,8 @@ def sort_tables_and_constraints( mutable_dependencies.discard((dependent_on, table)) candidate_sort = list( topological.sort( - fixed_dependencies.union(mutable_dependencies), tables + fixed_dependencies.union(mutable_dependencies), tables, + deterministic_order=True ) ) diff --git a/lib/sqlalchemy/sql/dml.py b/lib/sqlalchemy/sql/dml.py index 6a4768fa1..6756f1554 100644 --- a/lib/sqlalchemy/sql/dml.py +++ b/lib/sqlalchemy/sql/dml.py @@ -10,7 +10,8 @@ Provide :class:`.Insert`, :class:`.Update` and :class:`.Delete`. """ from .base import Executable, _generative, _from_objects, DialectKWArgs -from .elements import ClauseElement, _literal_as_text, Null, and_, _clone +from .elements import ClauseElement, _literal_as_text, Null, and_, _clone, \ + _column_as_key from .selectable import _interpret_as_from, _interpret_as_select, HasPrefixes from .. import util from .. import exc @@ -261,10 +262,14 @@ class ValuesBase(UpdateBase): has the effect of using the DBAPI `executemany() <http://www.python.org/dev/peps/pep-0249/#id18>`_ method, which provides a high-performance system of invoking - a single-row INSERT statement many times against a series + a single-row INSERT or single-criteria UPDATE or DELETE statement + many times against a series of parameter sets. The "executemany" style is supported by - all database backends, as it does not depend on a special SQL - syntax. + all database backends, and works equally well for INSERT, + UPDATE, and DELETE, as it does not depend on a special SQL + syntax. See :ref:`execute_multiple` for an introduction to + the traditional Core method of multiple parameter set invocation + using this system. .. versionadded:: 0.8 Support for multiple-VALUES INSERT statements. @@ -544,7 +549,8 @@ class Insert(ValuesBase): "This construct already inserts value expressions") self.parameters, self._has_multi_parameters = \ - self._process_colparams(dict((n, Null()) for n in names)) + self._process_colparams( + dict((_column_as_key(n), Null()) for n in names)) self.select_names = names self.inline = True diff --git a/lib/sqlalchemy/sql/elements.py b/lib/sqlalchemy/sql/elements.py index ca8ec1f55..27ecce2b0 100644 --- a/lib/sqlalchemy/sql/elements.py +++ b/lib/sqlalchemy/sql/elements.py @@ -2407,13 +2407,14 @@ class UnaryExpression(ColumnElement): __visit_name__ = 'unary' def __init__(self, element, operator=None, modifier=None, - type_=None, negate=None): + type_=None, negate=None, wraps_column_expression=False): self.operator = operator self.modifier = modifier self.element = element.self_group( against=self.operator or self.modifier) self.type = type_api.to_instance(type_) self.negate = negate + self.wraps_column_expression = wraps_column_expression @classmethod def _create_nullsfirst(cls, column): @@ -2455,7 +2456,8 @@ class UnaryExpression(ColumnElement): """ return UnaryExpression( _literal_as_label_reference(column), - modifier=operators.nullsfirst_op) + modifier=operators.nullsfirst_op, + wraps_column_expression=False) @classmethod def _create_nullslast(cls, column): @@ -2496,7 +2498,8 @@ class UnaryExpression(ColumnElement): """ return UnaryExpression( _literal_as_label_reference(column), - modifier=operators.nullslast_op) + modifier=operators.nullslast_op, + wraps_column_expression=False) @classmethod def _create_desc(cls, column): @@ -2534,7 +2537,9 @@ class UnaryExpression(ColumnElement): """ return UnaryExpression( - _literal_as_label_reference(column), modifier=operators.desc_op) + _literal_as_label_reference(column), + modifier=operators.desc_op, + wraps_column_expression=False) @classmethod def _create_asc(cls, column): @@ -2571,7 +2576,9 @@ class UnaryExpression(ColumnElement): """ return UnaryExpression( - _literal_as_label_reference(column), modifier=operators.asc_op) + _literal_as_label_reference(column), + modifier=operators.asc_op, + wraps_column_expression=False) @classmethod def _create_distinct(cls, expr): @@ -2611,7 +2618,8 @@ class UnaryExpression(ColumnElement): """ expr = _literal_as_binds(expr) return UnaryExpression( - expr, operator=operators.distinct_op, type_=expr.type) + expr, operator=operators.distinct_op, + type_=expr.type, wraps_column_expression=False) @property def _order_by_label_element(self): @@ -2648,7 +2656,8 @@ class UnaryExpression(ColumnElement): operator=self.negate, negate=self.operator, modifier=self.modifier, - type_=self.type) + type_=self.type, + wraps_column_expression=self.wraps_column_expression) else: return ClauseElement._negate(self) @@ -2667,6 +2676,7 @@ class AsBoolean(UnaryExpression): self.operator = operator self.negate = negate self.modifier = None + self.wraps_column_expression = True def self_group(self, against=None): return self @@ -3093,7 +3103,8 @@ class Label(ColumnElement): return self.element, def _copy_internals(self, clone=_clone, anonymize_labels=False, **kw): - self.element = clone(self.element, **kw) + self._element = clone(self._element, **kw) + self.__dict__.pop('element', None) self.__dict__.pop('_allow_label_resolve', None) if anonymize_labels: self.name = self._resolve_label = _anonymous_label( @@ -3714,6 +3725,16 @@ def _literal_as_label_reference(element): elif hasattr(element, '__clause_element__'): element = element.__clause_element__() + return _literal_as_text(element) + + +def _literal_and_labels_as_label_reference(element): + if isinstance(element, util.string_types): + return _textual_label_reference(element) + + elif hasattr(element, '__clause_element__'): + element = element.__clause_element__() + if isinstance(element, ColumnElement) and \ element._order_by_label_element is not None: return _label_reference(element) diff --git a/lib/sqlalchemy/sql/schema.py b/lib/sqlalchemy/sql/schema.py index 3aeba9804..a8989627d 100644 --- a/lib/sqlalchemy/sql/schema.py +++ b/lib/sqlalchemy/sql/schema.py @@ -2392,27 +2392,51 @@ class ColumnCollectionMixin(object): if _autoattach and self._pending_colargs: self._check_attach() + @classmethod + def _extract_col_expression_collection(cls, expressions): + for expr in expressions: + strname = None + column = None + if not isinstance(expr, ClauseElement): + # this assumes a string + strname = expr + else: + cols = [] + visitors.traverse(expr, {}, {'column': cols.append}) + if cols: + column = cols[0] + add_element = column if column is not None else strname + yield expr, column, strname, add_element + def _check_attach(self, evt=False): col_objs = [ c for c in self._pending_colargs if isinstance(c, Column) ] + cols_w_table = [ c for c in col_objs if isinstance(c.table, Table) ] + cols_wo_table = set(col_objs).difference(cols_w_table) if cols_wo_table: + # feature #3341 - place event listeners for Column objects + # such that when all those cols are attached, we autoattach. assert not evt, "Should not reach here on event call" - def _col_attached(column, table): - cols_wo_table.discard(column) - if not cols_wo_table: - self._check_attach(evt=True) - self._cols_wo_table = cols_wo_table - for col in cols_wo_table: - col._on_table_attach(_col_attached) - return + # issue #3411 - don't do the per-column auto-attach if some of the + # columns are specified as strings. + has_string_cols = set(self._pending_colargs).difference(col_objs) + if not has_string_cols: + def _col_attached(column, table): + cols_wo_table.discard(column) + if not cols_wo_table: + self._check_attach(evt=True) + self._cols_wo_table = cols_wo_table + for col in cols_wo_table: + col._on_table_attach(_col_attached) + return columns = cols_w_table @@ -3078,14 +3102,10 @@ class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): self.table = None columns = [] - for expr in expressions: - if not isinstance(expr, ClauseElement): - columns.append(expr) - else: - cols = [] - visitors.traverse(expr, {}, {'column': cols.append}) - if cols: - columns.append(cols[0]) + for expr, column, strname, add_element in self.\ + _extract_col_expression_collection(expressions): + if add_element is not None: + columns.append(add_element) self.expressions = expressions self.name = quoted_name(name, kw.pop("quote", None)) @@ -3359,11 +3379,14 @@ class MetaData(SchemaItem): 'schema': self.schema, 'schemas': self._schemas, 'sequences': self._sequences, - 'fk_memos': self._fk_memos} + 'fk_memos': self._fk_memos, + 'naming_convention': self.naming_convention + } def __setstate__(self, state): self.tables = state['tables'] self.schema = state['schema'] + self.naming_convention = state['naming_convention'] self._bind = None self._sequences = state['sequences'] self._schemas = state['schemas'] @@ -3450,7 +3473,7 @@ class MetaData(SchemaItem): """ - return ddl.sort_tables(self.tables.values()) + return ddl.sort_tables(sorted(self.tables.values(), key=lambda t: t.key)) def reflect(self, bind=None, schema=None, views=False, only=None, extend_existing=False, diff --git a/lib/sqlalchemy/sql/selectable.py b/lib/sqlalchemy/sql/selectable.py index f848ef6db..245c54817 100644 --- a/lib/sqlalchemy/sql/selectable.py +++ b/lib/sqlalchemy/sql/selectable.py @@ -16,7 +16,7 @@ from .elements import _clone, \ _literal_as_text, _interpret_as_column_or_from, _expand_cloned,\ _select_iterables, _anonymous_label, _clause_element_as_expr,\ _cloned_intersection, _cloned_difference, True_, \ - _literal_as_label_reference + _literal_as_label_reference, _literal_and_labels_as_label_reference from .base import Immutable, Executable, _generative, \ ColumnCollection, ColumnSet, _from_objects, Generative from . import type_api @@ -1723,7 +1723,7 @@ class GenerativeSelect(SelectBase): if order_by is not None: self._order_by_clause = ClauseList( *util.to_list(order_by), - _literal_as_text=_literal_as_label_reference) + _literal_as_text=_literal_and_labels_as_label_reference) if group_by is not None: self._group_by_clause = ClauseList( *util.to_list(group_by), @@ -1912,7 +1912,8 @@ class GenerativeSelect(SelectBase): if getattr(self, '_order_by_clause', None) is not None: clauses = list(self._order_by_clause) + list(clauses) self._order_by_clause = ClauseList( - *clauses, _literal_as_text=_literal_as_label_reference) + *clauses, + _literal_as_text=_literal_and_labels_as_label_reference) def append_group_by(self, *clauses): """Append the given GROUP BY criterion applied to this selectable. @@ -3343,7 +3344,8 @@ class Exists(UnaryExpression): s = Select(*args, **kwargs).as_scalar().self_group() UnaryExpression.__init__(self, s, operator=operators.exists, - type_=type_api.BOOLEANTYPE) + type_=type_api.BOOLEANTYPE, + wraps_column_expression=True) def select(self, whereclause=None, **params): return Select([self], whereclause, **params) diff --git a/lib/sqlalchemy/sql/type_api.py b/lib/sqlalchemy/sql/type_api.py index 4660850bd..a55eed981 100644 --- a/lib/sqlalchemy/sql/type_api.py +++ b/lib/sqlalchemy/sql/type_api.py @@ -128,6 +128,33 @@ class TypeEngine(Visitable): """ + def compare_against_backend(self, dialect, conn_type): + """Compare this type against the given backend type. + + This function is currently not implemented for SQLAlchemy + types, and for all built in types will return ``None``. However, + it can be implemented by a user-defined type + where it can be consumed by schema comparison tools such as + Alembic autogenerate. + + A future release of SQLAlchemy will potentially impement this method + for builtin types as well. + + The function should return True if this type is equivalent to the + given type; the type is typically reflected from the database + so should be database specific. The dialect in use is also + passed. It can also return False to assert that the type is + not equivalent. + + :param dialect: a :class:`.Dialect` that is involved in the comparison. + + :param conn_type: the type object reflected from the backend. + + .. versionadded:: 1.0.3 + + """ + return None + def copy_value(self, value): return value diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index bec5b5824..8f502fc86 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -16,7 +16,8 @@ from itertools import chain from collections import deque from .elements import BindParameter, ColumnClause, ColumnElement, \ - Null, UnaryExpression, literal_column, Label, _label_reference + Null, UnaryExpression, literal_column, Label, _label_reference, \ + _textual_label_reference from .selectable import ScalarSelect, Join, FromClause, FromGrouping from .schema import Column @@ -163,6 +164,8 @@ def unwrap_order_by(clause): ): if isinstance(t, _label_reference): t = t.element + if isinstance(t, (_textual_label_reference)): + continue cols.add(t) else: for c in t.get_children(): diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index bf83e9673..7482e32a1 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -19,12 +19,13 @@ def against(*queries): return _against(config._current, *queries) from .assertions import emits_warning, emits_warning_on, uses_deprecated, \ - eq_, ne_, is_, is_not_, startswith_, assert_raises, \ + eq_, ne_, le_, is_, is_not_, startswith_, assert_raises, \ assert_raises_message, AssertsCompiledSQL, ComparesTables, \ AssertsExecutionResults, expect_deprecated, expect_warnings from .util import run_as_contextmanager, rowset, fail, \ - provide_metadata, adict, force_drop_names + provide_metadata, adict, force_drop_names, \ + teardown_events crashes = skip diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index e5249c296..01fa0b8a9 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -50,8 +50,6 @@ def expect_warnings_on(db, *messages, **kw): if isinstance(db, util.string_types) and not spec(config._current): yield - elif not _is_excluded(*db): - yield else: with expect_warnings(*messages, **kw): yield @@ -90,7 +88,7 @@ def emits_warning_on(db, *messages): """ @decorator def decorate(fn, *args, **kw): - with expect_warnings_on(db, *messages): + with expect_warnings_on(db, assert_=False, *messages): return fn(*args, **kw) return decorate @@ -216,6 +214,11 @@ def ne_(a, b, msg=None): assert a != b, msg or "%r == %r" % (a, b) +def le_(a, b, msg=None): + """Assert a <= b, with repr messaging on failure.""" + assert a <= b, msg or "%r != %r" % (a, b) + + def is_(a, b, msg=None): """Assert a is b, with repr messaging on failure.""" assert a is b, msg or "%r is not %r" % (a, b) diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index a596d9743..243493607 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -85,7 +85,7 @@ class CompiledSQL(SQLMatchRule): column_keys=context.compiled.column_keys, inline=context.compiled.inline) ) - _received_statement = re.sub(r'[\n\t]', '', str(compiled)) + _received_statement = re.sub(r'[\n\t]', '', util.text_type(compiled)) parameters = execute_observed.parameters if not parameters: @@ -188,21 +188,27 @@ class DialectSQL(CompiledSQL): def _compile_dialect(self, execute_observed): return execute_observed.context.dialect + def _compare_no_space(self, real_stmt, received_stmt): + stmt = re.sub(r'[\n\t]', '', real_stmt) + return received_stmt == stmt + def _received_statement(self, execute_observed): received_stmt, received_params = super(DialectSQL, self).\ _received_statement(execute_observed) + + # TODO: why do we need this part? for real_stmt in execute_observed.statements: - if real_stmt.statement == received_stmt: + if self._compare_no_space(real_stmt.statement, received_stmt): break else: raise AssertionError( "Can't locate compiled statement %r in list of " "statements actually invoked" % received_stmt) + return received_stmt, execute_observed.context.compiled_parameters def _compare_sql(self, execute_observed, received_statement): stmt = re.sub(r'[\n\t]', '', self.statement) - # convert our comparison statement to have the # paramstyle of the received paramstyle = execute_observed.context.dialect.paramstyle diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index d429c9f4e..a3d6e1690 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -15,6 +15,7 @@ file_config = None test_schema = None test_schema_2 = None _current = None +_skip_test_exception = None class Config(object): @@ -83,3 +84,9 @@ class Config(object): for cfg in cls.all_configs(): yield cfg.db + def skip_test(self, msg): + skip_test(msg) + + +def skip_test(msg): + raise _skip_test_exception(msg) diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 3a8303546..1eaf62960 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -98,7 +98,14 @@ def drop_all_tables(metadata, bind): testing_reaper.close_all() if hasattr(bind, 'close'): bind.close() - metadata.drop_all(bind) + + if not config.db.dialect.supports_alter: + from . import assertions + with assertions.expect_warnings( + "Can't sort tables", assert_=False): + metadata.drop_all(bind) + else: + metadata.drop_all(bind) @decorator @@ -204,6 +211,7 @@ def testing_engine(url=None, options=None): """Produce an engine configured by --options with optional overrides.""" from sqlalchemy import create_engine + from sqlalchemy.engine.url import make_url if not options: use_reaper = True @@ -211,12 +219,16 @@ def testing_engine(url=None, options=None): use_reaper = options.pop('use_reaper', True) url = url or config.db.url + + url = make_url(url) if options is None: - options = config.db_opts + if config.db is None or url.drivername == config.db.url.drivername: + options = config.db_opts + else: + options = {} engine = create_engine(url, **options) - engine._has_events = True # enable event blocks, helps with - # profiling + engine._has_events = True # enable event blocks, helps with profiling if isinstance(engine.pool, pool.QueuePool): engine.pool._timeout = 0 diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 6aa4bf142..972dec3a9 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -7,7 +7,6 @@ import operator -from .plugin.plugin_base import SkipTest from ..util import decorator from . import config from .. import util @@ -116,7 +115,7 @@ class compound(object): fn.__name__, skip._as_string(config) ) - raise SkipTest(msg) + config.skip_test(msg) try: return_value = fn(*args, **kw) diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py index 7b421952f..e16bc77c0 100644 --- a/lib/sqlalchemy/testing/fixtures.py +++ b/lib/sqlalchemy/testing/fixtures.py @@ -134,13 +134,14 @@ class TablesTest(TestBase): def _teardown_each_tables(self): # no need to run deletes if tables are recreated on setup if self.run_define_tables != 'each' and self.run_deletes == 'each': - for table in reversed(self.metadata.sorted_tables): - try: - table.delete().execute().close() - except sa.exc.DBAPIError as ex: - util.print_( - ("Error emptying table %s: %r" % (table, ex)), - file=sys.stderr) + with self.bind.connect() as conn: + for table in reversed(self.metadata.sorted_tables): + try: + conn.execute(table.delete()) + except sa.exc.DBAPIError as ex: + util.print_( + ("Error emptying table %s: %r" % (table, ex)), + file=sys.stderr) def setup(self): self._setup_each_tables() diff --git a/lib/sqlalchemy/testing/mock.py b/lib/sqlalchemy/testing/mock.py index be83693cc..c836bb407 100644 --- a/lib/sqlalchemy/testing/mock.py +++ b/lib/sqlalchemy/testing/mock.py @@ -11,10 +11,10 @@ from __future__ import absolute_import from ..util import py33 if py33: - from unittest.mock import MagicMock, Mock, call, patch + from unittest.mock import MagicMock, Mock, call, patch, ANY else: try: - from mock import MagicMock, Mock, call, patch + from mock import MagicMock, Mock, call, patch, ANY except ImportError: raise ImportError( "SQLAlchemy's test suite requires the " diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py index 1ae6e28f5..4c390d409 100644 --- a/lib/sqlalchemy/testing/plugin/noseplugin.py +++ b/lib/sqlalchemy/testing/plugin/noseplugin.py @@ -24,6 +24,7 @@ import os import sys from nose.plugins import Plugin +import nose fixtures = None py3k = sys.version_info >= (3, 0) @@ -56,6 +57,8 @@ class NoseSQLAlchemy(Plugin): plugin_base.set_coverage_flag(options.enable_plugin_coverage) + plugin_base.set_skip_test(nose.SkipTest) + def begin(self): global fixtures from sqlalchemy.testing import fixtures # noqa diff --git a/lib/sqlalchemy/testing/plugin/plugin_base.py b/lib/sqlalchemy/testing/plugin/plugin_base.py index 14cf1eb31..ef304afa6 100644 --- a/lib/sqlalchemy/testing/plugin/plugin_base.py +++ b/lib/sqlalchemy/testing/plugin/plugin_base.py @@ -14,12 +14,6 @@ functionality via py.test. """ from __future__ import absolute_import -try: - # unitttest has a SkipTest also but pytest doesn't - # honor it unless nose is imported too... - from nose import SkipTest -except ImportError: - from _pytest.runner import Skipped as SkipTest import sys import re @@ -156,6 +150,13 @@ def pre_begin(opt): def set_coverage_flag(value): options.has_coverage = value +_skip_test_exception = None + + +def set_skip_test(exc): + global _skip_test_exception + _skip_test_exception = exc + def post_begin(): """things to set up later, once we know coverage is running.""" @@ -234,6 +235,13 @@ def _monkeypatch_cdecimal(options, file_config): @post +def _init_skiptest(options, file_config): + from sqlalchemy.testing import config + + config._skip_test_exception = _skip_test_exception + + +@post def _engine_uri(options, file_config): from sqlalchemy.testing import config from sqlalchemy import testing @@ -506,7 +514,7 @@ def _do_skips(cls): if getattr(cls, '__skip_if__', False): for c in getattr(cls, '__skip_if__'): if c(): - raise SkipTest("'%s' skipped by %s" % ( + config.skip_test("'%s' skipped by %s" % ( cls.__name__, c.__name__) ) @@ -529,7 +537,7 @@ def _do_skips(cls): ), ", ".join(reasons) ) - raise SkipTest(msg) + config.skip_test(msg) elif hasattr(cls, '__prefer_backends__'): non_preferred = set() spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__)) diff --git a/lib/sqlalchemy/testing/plugin/pytestplugin.py b/lib/sqlalchemy/testing/plugin/pytestplugin.py index fbab4966c..30d7aa73a 100644 --- a/lib/sqlalchemy/testing/plugin/pytestplugin.py +++ b/lib/sqlalchemy/testing/plugin/pytestplugin.py @@ -12,7 +12,7 @@ import collections import itertools try: - import xdist + import xdist # noqa has_xdist = True except ImportError: has_xdist = False @@ -48,6 +48,8 @@ def pytest_configure(config): plugin_base.set_coverage_flag(bool(getattr(config.option, "cov_source", False))) + plugin_base.set_skip_test(pytest.skip.Exception) + def pytest_sessionstart(session): plugin_base.post_begin() @@ -127,6 +129,7 @@ def pytest_pycollect_makeitem(collector, name, obj): _current_class = None + def pytest_runtest_setup(item): # here we seem to get called only based on what we collected # in pytest_collection_modifyitems. So to do class-based stuff diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index 65fe165cd..357735656 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -16,7 +16,6 @@ import os import sys from .util import gc_collect from . import config -from .plugin.plugin_base import SkipTest import pstats import collections import contextlib @@ -205,10 +204,11 @@ def count_functions(variance=0.05): raise SkipTest("cProfile is not installed") if not _profile_stats.has_stats() and not _profile_stats.write: - raise SkipTest("No profiling stats available on this " - "platform for this function. Run tests with " - "--write-profiles to add statistics to %s for " - "this platform." % _profile_stats.short_fname) + config.skip_test( + "No profiling stats available on this " + "platform for this function. Run tests with " + "--write-profiles to add statistics to %s for " + "this platform." % _profile_stats.short_fname) gc_collect() diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index c8f7fdf30..8469a0658 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -49,6 +49,7 @@ def configure_follower(follower_ident): def setup_config(db_url, db_opts, options, file_config, follower_ident): if follower_ident: db_url = _follower_url_from_main(db_url, follower_ident) + _update_db_opts(db_url, db_opts) eng = engines.testing_engine(db_url, db_opts) eng.connect().close() cfg = config.Config.register(eng, db_opts, options, file_config) @@ -94,6 +95,11 @@ def _drop_db(cfg, eng, ident): @register.init +def _update_db_opts(db_url, db_opts): + pass + + +@register.init def _configure_follower(cfg, ident): pass @@ -105,6 +111,11 @@ def _follower_url_from_main(url, ident): return url +@_update_db_opts.for_db("mssql") +def _mssql_update_db_opts(db_url, db_opts): + db_opts['legacy_schema_aliasing'] = False + + @_follower_url_from_main.for_db("sqlite") def _sqlite_follower_url_from_main(url, ident): url = sa_url.make_url(url) diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index 32465c47d..e8b3a995f 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -342,6 +342,14 @@ class SuiteRequirements(Requirements): return exclusions.open() @property + def duplicate_key_raises_integrity_error(self): + """target dialect raises IntegrityError when reporting an INSERT + with a primary key violation. (hint: it should) + + """ + return exclusions.open() + + @property def unbounded_varchar(self): """Target database must support VARCHAR with no length""" diff --git a/lib/sqlalchemy/testing/suite/__init__.py b/lib/sqlalchemy/testing/suite/__init__.py index 780aa40aa..9eeffd4cb 100644 --- a/lib/sqlalchemy/testing/suite/__init__.py +++ b/lib/sqlalchemy/testing/suite/__init__.py @@ -1,4 +1,5 @@ +from sqlalchemy.testing.suite.test_dialect import * from sqlalchemy.testing.suite.test_ddl import * from sqlalchemy.testing.suite.test_insert import * from sqlalchemy.testing.suite.test_sequence import * diff --git a/lib/sqlalchemy/testing/suite/test_dialect.py b/lib/sqlalchemy/testing/suite/test_dialect.py new file mode 100644 index 000000000..00884a212 --- /dev/null +++ b/lib/sqlalchemy/testing/suite/test_dialect.py @@ -0,0 +1,41 @@ +from .. import fixtures, config +from ..config import requirements +from sqlalchemy import exc +from sqlalchemy import Integer, String +from .. import assert_raises +from ..schema import Table, Column + + +class ExceptionTest(fixtures.TablesTest): + """Test basic exception wrapping. + + DBAPIs vary a lot in exception behavior so to actually anticipate + specific exceptions from real round trips, we need to be conservative. + + """ + run_deletes = 'each' + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table('manual_pk', metadata, + Column('id', Integer, primary_key=True, autoincrement=False), + Column('data', String(50)) + ) + + @requirements.duplicate_key_raises_integrity_error + def test_integrity_error(self): + + with config.db.begin() as conn: + conn.execute( + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) + + assert_raises( + exc.IntegrityError, + conn.execute, + self.tables.manual_pk.insert(), + {'id': 1, 'data': 'd1'} + ) diff --git a/lib/sqlalchemy/testing/suite/test_select.py b/lib/sqlalchemy/testing/suite/test_select.py index eaf3f03c2..d4bf63b55 100644 --- a/lib/sqlalchemy/testing/suite/test_select.py +++ b/lib/sqlalchemy/testing/suite/test_select.py @@ -131,6 +131,7 @@ class LimitOffsetTest(fixtures.TablesTest): [(1, 1, 2), (2, 2, 3)] ) + @testing.requires.offset def test_simple_offset(self): table = self.tables.some_table self._assert_result( @@ -138,13 +139,15 @@ class LimitOffsetTest(fixtures.TablesTest): [(3, 3, 4), (4, 4, 5)] ) + @testing.requires.offset def test_simple_limit_offset(self): table = self.tables.some_table self._assert_result( select([table]).order_by(table.c.id).limit(2).offset(1), [(2, 2, 3), (3, 3, 4)] ) - + + @testing.requires.offset def test_limit_offset_nobinds(self): """test that 'literal binds' mode works - no bound params.""" diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index 6d6fa094e..e9437948a 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -185,6 +185,7 @@ def provide_metadata(fn, *args, **kw): """Provide bound MetaData for a single test, dropping afterwards.""" from . import config + from . import engines from sqlalchemy import schema metadata = schema.MetaData(config.db) @@ -194,7 +195,7 @@ def provide_metadata(fn, *args, **kw): try: return fn(*args, **kw) finally: - metadata.drop_all() + engines.drop_all_tables(metadata, config.db) self.metadata = prev_meta @@ -266,3 +267,14 @@ def drop_all_tables(engine, inspector, schema=None, include_names=None): ForeignKeyConstraint( [tb.c.x], [tb.c.y], name=fkc) )) + + +def teardown_events(event_cls): + @decorator + def decorate(fn, *arg, **kw): + try: + return fn(*arg, **kw) + finally: + event_cls._clear() + return decorate + diff --git a/lib/sqlalchemy/util/__init__.py b/lib/sqlalchemy/util/__init__.py index d777d2e06..ed968f168 100644 --- a/lib/sqlalchemy/util/__init__.py +++ b/lib/sqlalchemy/util/__init__.py @@ -19,7 +19,7 @@ from ._collections import KeyedTuple, ImmutableContainer, immutabledict, \ OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \ column_dict, ordered_column_set, populate_column_dict, unique_list, \ UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \ - to_column_set, update_copy, flatten_iterator, \ + to_column_set, update_copy, flatten_iterator, has_intersection, \ LRUCache, ScopedRegistry, ThreadLocalRegistry, WeakSequence, \ coerce_generator_arg, lightweight_named_tuple diff --git a/lib/sqlalchemy/util/_collections.py b/lib/sqlalchemy/util/_collections.py index 4fb12d71b..3869775cf 100644 --- a/lib/sqlalchemy/util/_collections.py +++ b/lib/sqlalchemy/util/_collections.py @@ -19,6 +19,8 @@ EMPTY_SET = frozenset() class AbstractKeyedTuple(tuple): + __slots__ = () + def keys(self): """Return a list of string key names for this :class:`.KeyedTuple`. @@ -743,15 +745,16 @@ _property_getters = PopulateDict( def unique_list(seq, hashfunc=None): - seen = {} + seen = set() + seen_add = seen.add if not hashfunc: return [x for x in seq if x not in seen - and not seen.__setitem__(x, True)] + and not seen_add(x)] else: return [x for x in seq if hashfunc(x) not in seen - and not seen.__setitem__(hashfunc(x), True)] + and not seen_add(hashfunc(x))] class UniqueAppender(object): @@ -799,6 +802,19 @@ def to_list(x, default=None): return list(x) +def has_intersection(set_, iterable): + """return True if any items of set_ are present in iterable. + + Goes through special effort to ensure __hash__ is not called + on items in iterable that don't support it. + + """ + # TODO: optimize, write in C, etc. + return bool( + set_.intersection([i for i in iterable if i.__hash__]) + ) + + def to_set(x): if x is None: return set() diff --git a/lib/sqlalchemy/util/langhelpers.py b/lib/sqlalchemy/util/langhelpers.py index 3d7bfad0a..499515142 100644 --- a/lib/sqlalchemy/util/langhelpers.py +++ b/lib/sqlalchemy/util/langhelpers.py @@ -755,7 +755,7 @@ class memoized_property(object): obj.__dict__.pop(name, None) -class memoized_instancemethod(object): +def memoized_instancemethod(fn): """Decorate a method memoize its return value. Best applied to no-arg methods: memoization is not sensitive to @@ -764,26 +764,14 @@ class memoized_instancemethod(object): """ - def __init__(self, fget, doc=None): - self.fget = fget - self.__doc__ = doc or fget.__doc__ - self.__name__ = fget.__name__ - - def __get__(self, obj, cls): - if obj is None: - return self - - def oneshot(*args, **kw): - result = self.fget(obj, *args, **kw) - memo = lambda *a, **kw: result - memo.__name__ = self.__name__ - memo.__doc__ = self.__doc__ - obj.__dict__[self.__name__] = memo - return result - - oneshot.__name__ = self.__name__ - oneshot.__doc__ = self.__doc__ - return oneshot + def oneshot(self, *args, **kw): + result = fn(self, *args, **kw) + memo = lambda *a, **kw: result + memo.__name__ = fn.__name__ + memo.__doc__ = fn.__doc__ + self.__dict__[fn.__name__] = memo + return result + return update_wrapper(oneshot, fn) class group_expirable_memoized_property(object): diff --git a/lib/sqlalchemy/util/topological.py b/lib/sqlalchemy/util/topological.py index 80735c4df..0cd2bae29 100644 --- a/lib/sqlalchemy/util/topological.py +++ b/lib/sqlalchemy/util/topological.py @@ -13,18 +13,20 @@ from .. import util __all__ = ['sort', 'sort_as_subsets', 'find_cycles'] -def sort_as_subsets(tuples, allitems): +def sort_as_subsets(tuples, allitems, deterministic_order=False): edges = util.defaultdict(set) for parent, child in tuples: edges[child].add(parent) - todo = set(allitems) + Set = util.OrderedSet if deterministic_order else set + + todo = Set(allitems) while todo: - output = set() - for node in list(todo): - if not todo.intersection(edges[node]): + output = Set() + for node in todo: + if todo.isdisjoint(edges[node]): output.add(node) if not output: @@ -38,13 +40,14 @@ def sort_as_subsets(tuples, allitems): yield output -def sort(tuples, allitems): +def sort(tuples, allitems, deterministic_order=False): """sort the given list of items by dependency. 'tuples' is a list of tuples representing a partial ordering. + 'deterministic_order' keeps items within a dependency tier in list order. """ - for set_ in sort_as_subsets(tuples, allitems): + for set_ in sort_as_subsets(tuples, allitems, deterministic_order): for s in set_: yield s diff --git a/regen_callcounts.tox.ini b/regen_callcounts.tox.ini index 056208ca6..e74ceef36 100644 --- a/regen_callcounts.tox.ini +++ b/regen_callcounts.tox.ini @@ -12,8 +12,6 @@ deps=pytest py{27}-sqla_{cext,nocext}-db_{mysql}: mysql-python py{33,34}-sqla_{cext,nocext}-db_{mysql}: pymysql -usedevelop=False -sitepackages=True commands= @@ -22,7 +20,11 @@ commands= db_{postgresql}: {[base]basecommand} --db postgresql {posargs} db_{sqlite}: {[base]basecommand} --db sqlite {posargs} +# -E : ignore PYTHON* environment variables (such as PYTHONPATH) +# -s : don't add user site directory to sys.path; also PYTHONNOUSERSITE setenv= - sqla_nocext: DISABLE_SQLALCHEMY_CEXT=1 + PYTHONPATH= + PYTHONNOUSERSITE=1 + sqla_nocext: DISABLE_SQLALCHEMY_CEXT=1 diff --git a/sqla_nose.py b/sqla_nose.py index fc55f34f7..fe5c4d00b 100755 --- a/sqla_nose.py +++ b/sqla_nose.py @@ -10,10 +10,11 @@ import sys import nose import os - -for pth in ['./lib']: - sys.path.append( - os.path.join(os.path.dirname(os.path.abspath(__file__)), pth)) +if not sys.flags.no_user_site: + sys.path.insert( + 0, + os.path.join(os.path.dirname(os.path.abspath(__file__)), 'lib') + ) # use bootstrapping so that test plugins are loaded # without touching the main library before coverage starts diff --git a/test/aaa_profiling/test_orm.py b/test/aaa_profiling/test_orm.py index 71e7b32f4..aeb069d90 100644 --- a/test/aaa_profiling/test_orm.py +++ b/test/aaa_profiling/test_orm.py @@ -462,6 +462,7 @@ class SessionTest(fixtures.MappedTest): sess.expire_all() go() + class QueryTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py index 5627ef8e0..3d97caa6a 100644 --- a/test/aaa_profiling/test_zoomark.py +++ b/test/aaa_profiling/test_zoomark.py @@ -408,4 +408,3 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest): def _baseline_8_drop(self): self.metadata.drop_all() - diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py index dece30d65..bfc7c114c 100644 --- a/test/aaa_profiling/test_zoomark_orm.py +++ b/test/aaa_profiling/test_zoomark_orm.py @@ -174,10 +174,14 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest): # Animals - list(self.session.query(Animal).filter(Animal.Species == 'Leopard')) - list(self.session.query(Animal).filter(Animal.Species == 'Ostrich')) - list(self.session.query(Animal).filter(Animal.Legs == 1000000)) - list(self.session.query(Animal).filter(Animal.Species == 'Tick')) + list(self.session.query(Animal).filter( + Animal.Species == 'Leopard')) + list(self.session.query(Animal).filter( + Animal.Species == 'Ostrich')) + list(self.session.query(Animal).filter( + Animal.Legs == 1000000)) + list(self.session.query(Animal).filter( + Animal.Species == 'Tick')) def _baseline_4_expressions(self): for x in range(ITERATIONS): @@ -342,4 +346,3 @@ class ZooMarkTest(replay_fixture.ReplayFixtureTest): def _baseline_7_drop(self): self.session.rollback() self.metadata.drop_all() - diff --git a/test/base/test_dependency.py b/test/base/test_dependency.py index b16516f15..a06bd5a51 100644 --- a/test/base/test_dependency.py +++ b/test/base/test_dependency.py @@ -14,6 +14,11 @@ class DependencySortTest(fixtures.TestBase): result = list(topological.sort(tuples, allitems)) assert conforms_partial_ordering(tuples, result) + def assert_sort_deterministic(self, tuples, allitems, expected): + result = list(topological.sort(tuples, allitems, deterministic_order=True)) + assert conforms_partial_ordering(tuples, result) + assert result == expected + def _nodes_from_tuples(self, tups): s = set() for tup in tups: @@ -63,6 +68,30 @@ class DependencySortTest(fixtures.TestBase): (node3, node2)] self.assert_sort(tuples) + def test_sort_deterministic_one(self): + node1 = 'node1' + node2 = 'node2' + node3 = 'node3' + node4 = 'node4' + node5 = 'node5' + node6 = 'node6' + allitems = [node6, node5, node4, node3, node2, node1] + tuples = [(node6, node5), (node2, node1)] + expected = [node6, node4, node3, node2, node5, node1] + self.assert_sort_deterministic(tuples, allitems, expected) + + def test_sort_deterministic_two(self): + node1 = 1 + node2 = 2 + node3 = 3 + node4 = 4 + node5 = 5 + node6 = 6 + allitems = [node6, node5, node4, node3, node2, node1] + tuples = [(node6, node5), (node4, node3), (node2, node1)] + expected = [node6, node4, node2, node5, node3, node1] + self.assert_sort_deterministic(tuples, allitems, expected) + def test_raise_on_cycle_one(self): node1 = 'node1' node2 = 'node2' diff --git a/test/base/test_except.py b/test/base/test_except.py index 918e7a042..9e8dd4760 100644 --- a/test/base/test_except.py +++ b/test/base/test_except.py @@ -4,6 +4,7 @@ from sqlalchemy import exc as sa_exceptions from sqlalchemy.testing import fixtures from sqlalchemy.testing import eq_ +from sqlalchemy.engine import default class Error(Exception): @@ -28,8 +29,28 @@ class OutOfSpec(DatabaseError): pass +# exception with a totally different name... +class WrongNameError(DatabaseError): + pass + +# but they're going to call it their "IntegrityError" +IntegrityError = WrongNameError + + +# and they're going to subclass it! +class SpecificIntegrityError(WrongNameError): + pass + + class WrapTest(fixtures.TestBase): + def _translating_dialect_fixture(self): + d = default.DefaultDialect() + d.dbapi_exception_translation_map = { + "WrongNameError": "IntegrityError" + } + return d + def test_db_error_normal(self): try: raise sa_exceptions.DBAPIError.instance( @@ -160,6 +181,42 @@ class WrapTest(fixtures.TestBase): except sa_exceptions.ArgumentError: self.assert_(False) + dialect = self._translating_dialect_fixture() + try: + raise sa_exceptions.DBAPIError.instance( + '', [], + sa_exceptions.ArgumentError(), DatabaseError, + dialect=dialect) + except sa_exceptions.DBAPIError as e: + self.assert_(e.__class__ is sa_exceptions.DBAPIError) + except sa_exceptions.ArgumentError: + self.assert_(False) + + def test_db_error_dbapi_uses_wrong_names(self): + dialect = self._translating_dialect_fixture() + + try: + raise sa_exceptions.DBAPIError.instance( + '', [], IntegrityError(), + DatabaseError, dialect=dialect) + except sa_exceptions.DBAPIError as e: + self.assert_(e.__class__ is sa_exceptions.IntegrityError) + + try: + raise sa_exceptions.DBAPIError.instance( + '', [], SpecificIntegrityError(), + DatabaseError, dialect=dialect) + except sa_exceptions.DBAPIError as e: + self.assert_(e.__class__ is sa_exceptions.IntegrityError) + + try: + raise sa_exceptions.DBAPIError.instance( + '', [], SpecificIntegrityError(), + DatabaseError) + except sa_exceptions.DBAPIError as e: + # doesn't work without a dialect + self.assert_(e.__class__ is not sa_exceptions.IntegrityError) + def test_db_error_keyboard_interrupt(self): try: raise sa_exceptions.DBAPIError.instance( diff --git a/test/base/test_utils.py b/test/base/test_utils.py index df61d7874..256f52850 100644 --- a/test/base/test_utils.py +++ b/test/base/test_utils.py @@ -7,7 +7,7 @@ from sqlalchemy.testing.util import picklers, gc_collect from sqlalchemy.util import classproperty, WeakSequence, get_callable_argspec from sqlalchemy.sql import column from sqlalchemy.util import langhelpers - +import inspect class _KeyedTupleTest(object): @@ -276,6 +276,7 @@ class MemoizedAttrTest(fixtures.TestBase): val[0] += 1 return v + assert inspect.ismethod(Foo().bar) ne_(Foo.bar, None) f1 = Foo() assert 'bar' not in f1.__dict__ diff --git a/test/conftest.py b/test/conftest.py index 590b35700..36dfaa792 100755 --- a/test/conftest.py +++ b/test/conftest.py @@ -9,10 +9,12 @@ installs SQLAlchemy's testing plugin into the local environment. import sys import os -for pth in ['../lib']: - sys.path.append( - os.path.join(os.path.dirname(os.path.abspath(__file__)), pth)) - +if not sys.flags.no_user_site: + sys.path.insert( + 0, + os.path.join( + os.path.dirname(os.path.abspath(__file__)), '..', 'lib') + ) # use bootstrapping so that test plugins are loaded # without touching the main library before coverage starts diff --git a/test/dialect/mssql/test_compiler.py b/test/dialect/mssql/test_compiler.py index 0eb369ed9..9d89f040b 100644 --- a/test/dialect/mssql/test_compiler.py +++ b/test/dialect/mssql/test_compiler.py @@ -1,16 +1,18 @@ # -*- encoding: utf-8 from sqlalchemy.testing import eq_ -from sqlalchemy import * from sqlalchemy import schema from sqlalchemy.sql import table, column from sqlalchemy.databases import mssql from sqlalchemy.dialects.mssql import mxodbc from sqlalchemy.testing import fixtures, AssertsCompiledSQL from sqlalchemy import sql +from sqlalchemy import Integer, String, Table, Column, select, MetaData,\ + update, delete, insert, extract, union, func, PrimaryKeyConstraint, \ + UniqueConstraint, Index, Sequence, literal class CompileTest(fixtures.TestBase, AssertsCompiledSQL): - __dialect__ = mssql.dialect() + __dialect__ = mssql.dialect(legacy_schema_aliasing=False) def test_true_false(self): self.assert_compile( @@ -28,22 +30,32 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_select_with_nolock(self): t = table('sometable', column('somecolumn')) - self.assert_compile(t.select().with_hint(t, 'WITH (NOLOCK)'), - 'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)') + self.assert_compile( + t.select().with_hint(t, 'WITH (NOLOCK)'), + 'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)') + + def test_select_with_nolock_schema(self): + m = MetaData() + t = Table('sometable', m, Column('somecolumn', Integer), + schema='test_schema') + self.assert_compile( + t.select().with_hint(t, 'WITH (NOLOCK)'), + 'SELECT test_schema.sometable.somecolumn ' + 'FROM test_schema.sometable WITH (NOLOCK)') def test_join_with_hint(self): t1 = table('t1', - column('a', Integer), - column('b', String), - column('c', String), - ) + column('a', Integer), + column('b', String), + column('c', String), + ) t2 = table('t2', - column("a", Integer), - column("b", Integer), - column("c", Integer), - ) - join = t1.join(t2, t1.c.a==t2.c.a).\ - select().with_hint(t1, 'WITH (NOLOCK)') + column("a", Integer), + column("b", Integer), + column("c", Integer), + ) + join = t1.join(t2, t1.c.a == t2.c.a).\ + select().with_hint(t1, 'WITH (NOLOCK)') self.assert_compile( join, 'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c ' @@ -69,10 +81,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for darg in ("*", "mssql"): self.assert_compile( t.insert(). - values(somecolumn="x"). - with_hint("WITH (PAGLOCK)", - selectable=targ, - dialect_name=darg), + values(somecolumn="x"). + with_hint("WITH (PAGLOCK)", + selectable=targ, + dialect_name=darg), "INSERT INTO sometable WITH (PAGLOCK) " "(somecolumn) VALUES (:somecolumn)" ) @@ -82,11 +94,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for targ in (None, t): for darg in ("*", "mssql"): self.assert_compile( - t.update().where(t.c.somecolumn=="q"). - values(somecolumn="x"). - with_hint("WITH (PAGLOCK)", - selectable=targ, - dialect_name=darg), + t.update().where(t.c.somecolumn == "q"). + values(somecolumn="x"). + with_hint("WITH (PAGLOCK)", + selectable=targ, + dialect_name=darg), "UPDATE sometable WITH (PAGLOCK) " "SET somecolumn=:somecolumn " "WHERE sometable.somecolumn = :somecolumn_1" @@ -95,9 +107,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_update_exclude_hint(self): t = table('sometable', column('somecolumn')) self.assert_compile( - t.update().where(t.c.somecolumn=="q"). - values(somecolumn="x"). - with_hint("XYZ", "mysql"), + t.update().where(t.c.somecolumn == "q"). + values(somecolumn="x"). + with_hint("XYZ", "mysql"), "UPDATE sometable SET somecolumn=:somecolumn " "WHERE sometable.somecolumn = :somecolumn_1" ) @@ -107,10 +119,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): for targ in (None, t): for darg in ("*", "mssql"): self.assert_compile( - t.delete().where(t.c.somecolumn=="q"). - with_hint("WITH (PAGLOCK)", - selectable=targ, - dialect_name=darg), + t.delete().where(t.c.somecolumn == "q"). + with_hint("WITH (PAGLOCK)", + selectable=targ, + dialect_name=darg), "DELETE FROM sometable WITH (PAGLOCK) " "WHERE sometable.somecolumn = :somecolumn_1" ) @@ -118,9 +130,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_delete_exclude_hint(self): t = table('sometable', column('somecolumn')) self.assert_compile( - t.delete().\ - where(t.c.somecolumn=="q").\ - with_hint("XYZ", dialect_name="mysql"), + t.delete(). + where(t.c.somecolumn == "q"). + with_hint("XYZ", dialect_name="mysql"), "DELETE FROM sometable WHERE " "sometable.somecolumn = :somecolumn_1" ) @@ -130,18 +142,51 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): t2 = table('othertable', column('somecolumn')) for darg in ("*", "mssql"): self.assert_compile( - t.update().where(t.c.somecolumn==t2.c.somecolumn). - values(somecolumn="x"). - with_hint("WITH (PAGLOCK)", - selectable=t2, - dialect_name=darg), + t.update().where(t.c.somecolumn == t2.c.somecolumn). + values(somecolumn="x"). + with_hint("WITH (PAGLOCK)", + selectable=t2, + dialect_name=darg), "UPDATE sometable SET somecolumn=:somecolumn " "FROM sometable, othertable WITH (PAGLOCK) " "WHERE sometable.somecolumn = othertable.somecolumn" ) + def test_update_to_select_schema(self): + meta = MetaData() + table = Table( + "sometable", meta, + Column("sym", String), + Column("val", Integer), + schema="schema" + ) + other = Table( + "#other", meta, + Column("sym", String), + Column("newval", Integer) + ) + stmt = table.update().values( + val=select([other.c.newval]). + where(table.c.sym == other.c.sym).as_scalar()) + + self.assert_compile( + stmt, + "UPDATE [schema].sometable SET val=" + "(SELECT [#other].newval FROM [#other] " + "WHERE [schema].sometable.sym = [#other].sym)", + ) + + stmt = table.update().values(val=other.c.newval).\ + where(table.c.sym == other.c.sym) + self.assert_compile( + stmt, + "UPDATE [schema].sometable SET val=" + "[#other].newval FROM [schema].sometable, " + "[#other] WHERE [schema].sometable.sym = [#other].sym", + ) + # TODO: not supported yet. - #def test_delete_from_hint(self): + # def test_delete_from_hint(self): # t = table('sometable', column('somecolumn')) # t2 = table('othertable', column('somecolumn')) # for darg in ("*", "mssql"): @@ -173,8 +218,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "IN ('x', 'y', 'z')", ), ( - t.c.foo.in_([None]), - "sometable.foo IN (NULL)" + t.c.foo.in_([None]), + "sometable.foo IN (NULL)" ) ]: self.assert_compile(expr, compile, dialect=mxodbc_dialect) @@ -187,13 +232,13 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): t = table('sometable', column('somecolumn')) self.assert_compile(t.select().where(t.c.somecolumn - == t.select()), + == t.select()), 'SELECT sometable.somecolumn FROM ' 'sometable WHERE sometable.somecolumn = ' '(SELECT sometable.somecolumn FROM ' 'sometable)') self.assert_compile(t.select().where(t.c.somecolumn - != t.select()), + != t.select()), 'SELECT sometable.somecolumn FROM ' 'sometable WHERE sometable.somecolumn != ' '(SELECT sometable.somecolumn FROM ' @@ -210,10 +255,10 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): subqueries""" table1 = table('mytable', - column('myid', Integer), - column('name', String), - column('description', String), - ) + column('myid', Integer), + column('name', String), + column('description', String), + ) q = select([table1.c.myid], order_by=[table1.c.myid]).alias('foo') @@ -223,74 +268,83 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): "myid FROM mytable) AS foo, mytable WHERE " "foo.myid = mytable.myid") - - def test_delete_schema(self): metadata = MetaData() tbl = Table('test', metadata, Column('id', Integer, - primary_key=True), schema='paj') + primary_key=True), schema='paj') self.assert_compile(tbl.delete(tbl.c.id == 1), 'DELETE FROM paj.test WHERE paj.test.id = ' ':id_1') s = select([tbl.c.id]).where(tbl.c.id == 1) self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)), 'DELETE FROM paj.test WHERE paj.test.id IN ' - '(SELECT test_1.id FROM paj.test AS test_1 ' - 'WHERE test_1.id = :id_1)') + '(SELECT paj.test.id FROM paj.test ' + 'WHERE paj.test.id = :id_1)') def test_delete_schema_multipart(self): metadata = MetaData() - tbl = Table('test', metadata, Column('id', Integer, - primary_key=True), schema='banana.paj') + tbl = Table( + 'test', metadata, + Column('id', Integer, + primary_key=True), + schema='banana.paj') self.assert_compile(tbl.delete(tbl.c.id == 1), 'DELETE FROM banana.paj.test WHERE ' 'banana.paj.test.id = :id_1') s = select([tbl.c.id]).where(tbl.c.id == 1) self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)), 'DELETE FROM banana.paj.test WHERE ' - 'banana.paj.test.id IN (SELECT test_1.id ' - 'FROM banana.paj.test AS test_1 WHERE ' - 'test_1.id = :id_1)') + 'banana.paj.test.id IN (SELECT banana.paj.test.id ' + 'FROM banana.paj.test WHERE ' + 'banana.paj.test.id = :id_1)') def test_delete_schema_multipart_needs_quoting(self): metadata = MetaData() - tbl = Table('test', metadata, Column('id', Integer, - primary_key=True), schema='banana split.paj') + tbl = Table( + 'test', metadata, + Column('id', Integer, primary_key=True), + schema='banana split.paj') self.assert_compile(tbl.delete(tbl.c.id == 1), 'DELETE FROM [banana split].paj.test WHERE ' '[banana split].paj.test.id = :id_1') s = select([tbl.c.id]).where(tbl.c.id == 1) self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)), 'DELETE FROM [banana split].paj.test WHERE ' - '[banana split].paj.test.id IN (SELECT ' - 'test_1.id FROM [banana split].paj.test AS ' - 'test_1 WHERE test_1.id = :id_1)') + '[banana split].paj.test.id IN (' + + 'SELECT [banana split].paj.test.id FROM ' + '[banana split].paj.test WHERE ' + '[banana split].paj.test.id = :id_1)') def test_delete_schema_multipart_both_need_quoting(self): metadata = MetaData() tbl = Table('test', metadata, Column('id', Integer, - primary_key=True), + primary_key=True), schema='banana split.paj with a space') self.assert_compile(tbl.delete(tbl.c.id == 1), 'DELETE FROM [banana split].[paj with a ' 'space].test WHERE [banana split].[paj ' 'with a space].test.id = :id_1') s = select([tbl.c.id]).where(tbl.c.id == 1) - self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)), - 'DELETE FROM [banana split].[paj with a ' - 'space].test WHERE [banana split].[paj ' - 'with a space].test.id IN (SELECT ' - 'test_1.id FROM [banana split].[paj with a ' - 'space].test AS test_1 WHERE test_1.id = ' - ':id_1)') + self.assert_compile( + tbl.delete().where(tbl.c.id.in_(s)), + "DELETE FROM [banana split].[paj with a space].test " + "WHERE [banana split].[paj with a space].test.id IN " + "(SELECT [banana split].[paj with a space].test.id " + "FROM [banana split].[paj with a space].test " + "WHERE [banana split].[paj with a space].test.id = :id_1)" + ) def test_union(self): - t1 = table('t1', column('col1'), column('col2'), column('col3' - ), column('col4')) - t2 = table('t2', column('col1'), column('col2'), column('col3' - ), column('col4')) - s1, s2 = select([t1.c.col3.label('col3'), t1.c.col4.label('col4' - )], t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \ + t1 = table( + 't1', column('col1'), column('col2'), + column('col3'), column('col4')) + t2 = table( + 't2', column('col1'), column('col2'), + column('col3'), column('col4')) + s1, s2 = select( + [t1.c.col3.label('col3'), t1.c.col4.label('col4')], + t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \ select([t2.c.col3.label('col3'), t2.c.col4.label('col4')], t2.c.col2.in_(['t2col2r2', 't2col2r3'])) u = union(s1, s2, order_by=['col3', 'col4']) @@ -313,8 +367,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(func.current_time(), 'CURRENT_TIME') self.assert_compile(func.foo(), 'foo()') m = MetaData() - t = Table('sometable', m, Column('col1', Integer), Column('col2' - , Integer)) + t = Table( + 'sometable', m, Column('col1', Integer), Column('col2', Integer)) self.assert_compile(select([func.max(t.c.col1)]), 'SELECT max(sometable.col1) AS max_1 FROM ' 'sometable') @@ -332,11 +386,14 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field) def test_update_returning(self): - table1 = table('mytable', column('myid', Integer), column('name' - , String(128)), column('description', - String(128))) - u = update(table1, values=dict(name='foo' - )).returning(table1.c.myid, table1.c.name) + table1 = table( + 'mytable', + column('myid', Integer), + column('name', String(128)), + column('description', String(128))) + u = update( + table1, + values=dict(name='foo')).returning(table1.c.myid, table1.c.name) self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name') @@ -345,40 +402,43 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name, ' 'inserted.description') - u = update(table1, values=dict(name='foo' - )).returning(table1).where(table1.c.name == 'bar') + u = update( + table1, + values=dict( + name='foo')).returning(table1).where(table1.c.name == 'bar') self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'inserted.myid, inserted.name, ' 'inserted.description WHERE mytable.name = ' ':name_1') u = update(table1, values=dict(name='foo' - )).returning(func.length(table1.c.name)) + )).returning(func.length(table1.c.name)) self.assert_compile(u, 'UPDATE mytable SET name=:name OUTPUT ' 'LEN(inserted.name) AS length_1') def test_delete_returning(self): - table1 = table('mytable', column('myid', Integer), column('name' - , String(128)), column('description', - String(128))) + table1 = table( + 'mytable', column('myid', Integer), + column('name', String(128)), column('description', String(128))) d = delete(table1).returning(table1.c.myid, table1.c.name) self.assert_compile(d, 'DELETE FROM mytable OUTPUT deleted.myid, ' 'deleted.name') d = delete(table1).where(table1.c.name == 'bar' ).returning(table1.c.myid, - table1.c.name) + table1.c.name) self.assert_compile(d, 'DELETE FROM mytable OUTPUT deleted.myid, ' 'deleted.name WHERE mytable.name = :name_1') def test_insert_returning(self): - table1 = table('mytable', column('myid', Integer), column('name' - , String(128)), column('description', - String(128))) - i = insert(table1, values=dict(name='foo' - )).returning(table1.c.myid, table1.c.name) + table1 = table( + 'mytable', column('myid', Integer), + column('name', String(128)), column('description', String(128))) + i = insert( + table1, + values=dict(name='foo')).returning(table1.c.myid, table1.c.name) self.assert_compile(i, 'INSERT INTO mytable (name) OUTPUT ' 'inserted.myid, inserted.name VALUES ' @@ -389,7 +449,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'inserted.myid, inserted.name, ' 'inserted.description VALUES (:name)') i = insert(table1, values=dict(name='foo' - )).returning(func.length(table1.c.name)) + )).returning(func.length(table1.c.name)) self.assert_compile(i, 'INSERT INTO mytable (name) OUTPUT ' 'LEN(inserted.name) AS length_1 VALUES ' @@ -398,7 +458,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_limit_using_top(self): t = table('t', column('x', Integer), column('y', Integer)) - s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10) + s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10) self.assert_compile( s, @@ -409,7 +469,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_limit_zero_using_top(self): t = table('t', column('x', Integer), column('y', Integer)) - s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0) + s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0) self.assert_compile( s, @@ -444,7 +504,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_limit_offset_using_window(self): t = table('t', column('x', Integer), column('y', Integer)) - s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(10).offset(20) + s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20) self.assert_compile( s, @@ -490,7 +550,7 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_limit_zero_offset_using_window(self): t = table('t', column('x', Integer), column('y', Integer)) - s = select([t]).where(t.c.x==5).order_by(t.c.y).limit(0).offset(0) + s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0).offset(0) # render the LIMIT of zero, but not the OFFSET # of zero, so produces TOP 0 @@ -505,26 +565,29 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): metadata = MetaData() tbl = Table('test', metadata, Column('id', Integer, Sequence('', 0), primary_key=True)) - self.assert_compile(schema.CreateTable(tbl), - "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), " - "PRIMARY KEY (id))" - ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), " + "PRIMARY KEY (id))" + ) def test_sequence_non_primary_key(self): metadata = MetaData() tbl = Table('test', metadata, Column('id', Integer, Sequence(''), primary_key=False)) - self.assert_compile(schema.CreateTable(tbl), - "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))" - ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))" + ) def test_sequence_ignore_nullability(self): metadata = MetaData() tbl = Table('test', metadata, Column('id', Integer, Sequence(''), nullable=True)) - self.assert_compile(schema.CreateTable(tbl), - "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))" - ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1))" + ) def test_table_pkc_clustering(self): metadata = MetaData() @@ -532,10 +595,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): Column('x', Integer, autoincrement=False), Column('y', Integer, autoincrement=False), PrimaryKeyConstraint("x", "y", mssql_clustered=True)) - self.assert_compile(schema.CreateTable(tbl), - "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, " - "PRIMARY KEY CLUSTERED (x, y))" - ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, " + "PRIMARY KEY CLUSTERED (x, y))" + ) def test_table_uc_clustering(self): metadata = MetaData() @@ -544,10 +608,11 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): Column('y', Integer, autoincrement=False), PrimaryKeyConstraint("x"), UniqueConstraint("y", mssql_clustered=True)) - self.assert_compile(schema.CreateTable(tbl), - "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, " - "PRIMARY KEY (x), UNIQUE CLUSTERED (y))" - ) + self.assert_compile( + schema.CreateTable(tbl), + "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, " + "PRIMARY KEY (x), UNIQUE CLUSTERED (y))" + ) def test_index_clustering(self): metadata = MetaData() @@ -560,8 +625,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_index_ordering(self): metadata = MetaData() - tbl = Table('test', metadata, - Column('x', Integer), Column('y', Integer), Column('z', Integer)) + tbl = Table( + 'test', metadata, + Column('x', Integer), Column('y', Integer), Column('z', Integer)) idx = Index("foo", tbl.c.x.desc(), "y") self.assert_compile(schema.CreateIndex(idx), "CREATE INDEX foo ON test (x DESC, y)" @@ -570,8 +636,8 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_create_index_expr(self): m = MetaData() t1 = Table('foo', m, - Column('x', Integer) - ) + Column('x', Integer) + ) self.assert_compile( schema.CreateIndex(Index("bar", t1.c.x > 5)), "CREATE INDEX bar ON foo (x > 5)" @@ -580,9 +646,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_drop_index_w_schema(self): m = MetaData() t1 = Table('foo', m, - Column('x', Integer), - schema='bar' - ) + Column('x', Integer), + schema='bar' + ) self.assert_compile( schema.DropIndex(Index("idx_foo", t1.c.x)), "DROP INDEX idx_foo ON bar.foo" @@ -590,8 +656,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_index_extra_include_1(self): metadata = MetaData() - tbl = Table('test', metadata, - Column('x', Integer), Column('y', Integer), Column('z', Integer)) + tbl = Table( + 'test', metadata, + Column('x', Integer), Column('y', Integer), Column('z', Integer)) idx = Index("foo", tbl.c.x, mssql_include=['y']) self.assert_compile(schema.CreateIndex(idx), "CREATE INDEX foo ON test (x) INCLUDE (y)" @@ -599,8 +666,9 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): def test_index_extra_include_2(self): metadata = MetaData() - tbl = Table('test', metadata, - Column('x', Integer), Column('y', Integer), Column('z', Integer)) + tbl = Table( + 'test', metadata, + Column('x', Integer), Column('y', Integer), Column('z', Integer)) idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y]) self.assert_compile(schema.CreateIndex(idx), "CREATE INDEX foo ON test (x) INCLUDE (y)" @@ -611,14 +679,14 @@ class SchemaTest(fixtures.TestBase): def setup(self): t = Table('sometable', MetaData(), - Column('pk_column', Integer), - Column('test_column', String) - ) + Column('pk_column', Integer), + Column('test_column', String) + ) self.column = t.c.test_column dialect = mssql.dialect() self.ddl_compiler = dialect.ddl_compiler(dialect, - schema.CreateTable(t)) + schema.CreateTable(t)) def _column_spec(self): return self.ddl_compiler.get_column_specification(self.column) diff --git a/test/dialect/mssql/test_engine.py b/test/dialect/mssql/test_engine.py index a994b1787..929afc8f9 100644 --- a/test/dialect/mssql/test_engine.py +++ b/test/dialect/mssql/test_engine.py @@ -170,6 +170,17 @@ class ParseConnectTest(fixtures.TestBase): engine.connect) +class EngineFromConfigTest(fixtures.TestBase): + def test_legacy_schema_flag(self): + cfg = { + "sqlalchemy.url": "mssql://foodsn", + "sqlalchemy.legacy_schema_aliasing": "false" + } + e = engine_from_config( + cfg, module=Mock(version="MS SQL Server 11.0.92")) + eq_(e.dialect.legacy_schema_aliasing, False) + + class VersionDetectionTest(fixtures.TestBase): def test_pymssql_version(self): dialect = pymssql.MSDialect_pymssql() diff --git a/test/dialect/mssql/test_query.py b/test/dialect/mssql/test_query.py index 3fce5e6fc..61ae32ef4 100644 --- a/test/dialect/mssql/test_query.py +++ b/test/dialect/mssql/test_query.py @@ -1,25 +1,33 @@ # -*- encoding: utf-8 from sqlalchemy.testing import eq_, engines -from sqlalchemy import * from sqlalchemy.sql import table, column from sqlalchemy.databases import mssql -from sqlalchemy.testing import fixtures, AssertsCompiledSQL +from sqlalchemy.testing import fixtures, AssertsCompiledSQL, assertions from sqlalchemy import testing from sqlalchemy.util import ue from sqlalchemy import util from sqlalchemy.testing.assertsql import CursorSQL +from sqlalchemy import Integer, String, Table, Column, select, MetaData,\ + func, PrimaryKeyConstraint, desc, Sequence, DDL, ForeignKey, or_, and_ +from sqlalchemy import event +metadata = None +cattable = None +matchtable = None -class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL): - """SQL server cannot reference schema-qualified tables in a SELECT statement, they - must be aliased. +class LegacySchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL): + """Legacy behavior tried to prevent schema-qualified tables + from being rendered as dotted names, and were instead aliased. + + This behavior no longer seems to be required. + """ - __dialect__ = mssql.dialect() def setup(self): metadata = MetaData() - self.t1 = table('t1', + self.t1 = table( + 't1', column('a', Integer), column('b', String), column('c', String), @@ -29,64 +37,102 @@ class SchemaAliasingTest(fixtures.TestBase, AssertsCompiledSQL): Column("a", Integer), Column("b", Integer), Column("c", Integer), - schema = 'schema' + schema='schema' ) + def _assert_sql(self, element, legacy_sql, modern_sql=None): + dialect = mssql.dialect() + + with assertions.expect_warnings( + "legacy_schema_aliasing flag is defaulted to True.*"): + self.assert_compile( + element, + legacy_sql, + dialect=dialect + ) + + dialect = mssql.dialect(legacy_schema_aliasing=False) + self.assert_compile( + element, + modern_sql or "foob", + dialect=dialect + ) + + def _legacy_dialect(self): + return mssql.dialect(legacy_schema_aliasing=True) + def test_result_map(self): s = self.t2.select() - c = s.compile(dialect=self.__dialect__) + c = s.compile(dialect=self._legacy_dialect()) assert self.t2.c.a in set(c._create_result_map()['a'][1]) def test_result_map_use_labels(self): s = self.t2.select(use_labels=True) - c = s.compile(dialect=self.__dialect__) + c = s.compile(dialect=self._legacy_dialect()) assert self.t2.c.a in set(c._create_result_map()['schema_t2_a'][1]) def test_straight_select(self): - self.assert_compile(self.t2.select(), - "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1" + self._assert_sql( + self.t2.select(), + "SELECT t2_1.a, t2_1.b, t2_1.c FROM [schema].t2 AS t2_1", + "SELECT [schema].t2.a, [schema].t2.b, " + "[schema].t2.c FROM [schema].t2" ) def test_straight_select_use_labels(self): - self.assert_compile( + self._assert_sql( self.t2.select(use_labels=True), "SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b, " - "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1" + "t2_1.c AS schema_t2_c FROM [schema].t2 AS t2_1", + "SELECT [schema].t2.a AS schema_t2_a, " + "[schema].t2.b AS schema_t2_b, " + "[schema].t2.c AS schema_t2_c FROM [schema].t2" ) def test_join_to_schema(self): t1, t2 = self.t1, self.t2 - self.assert_compile( - t1.join(t2, t1.c.a==t2.c.a).select(), + self._assert_sql( + t1.join(t2, t1.c.a == t2.c.a).select(), "SELECT t1.a, t1.b, t1.c, t2_1.a, t2_1.b, t2_1.c FROM t1 " - "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a" + "JOIN [schema].t2 AS t2_1 ON t2_1.a = t1.a", + + "SELECT t1.a, t1.b, t1.c, [schema].t2.a, [schema].t2.b, " + "[schema].t2.c FROM t1 JOIN [schema].t2 ON [schema].t2.a = t1.a" ) def test_union_schema_to_non(self): t1, t2 = self.t1, self.t2 s = select([t2.c.a, t2.c.b]).apply_labels().\ - union( - select([t1.c.a, t1.c.b]).apply_labels() - ).alias().select() - self.assert_compile( + union( + select([t1.c.a, t1.c.b]).apply_labels()).alias().select() + self._assert_sql( s, "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM " "(SELECT t2_1.a AS schema_t2_a, t2_1.b AS schema_t2_b " "FROM [schema].t2 AS t2_1 UNION SELECT t1.a AS t1_a, " + "t1.b AS t1_b FROM t1) AS anon_1", + + "SELECT anon_1.schema_t2_a, anon_1.schema_t2_b FROM " + "(SELECT [schema].t2.a AS schema_t2_a, [schema].t2.b AS " + "schema_t2_b FROM [schema].t2 UNION SELECT t1.a AS t1_a, " "t1.b AS t1_b FROM t1) AS anon_1" ) def test_column_subquery_to_alias(self): a1 = self.t2.alias('a1') s = select([self.t2, select([a1.c.a]).as_scalar()]) - self.assert_compile( + self._assert_sql( s, "SELECT t2_1.a, t2_1.b, t2_1.c, " "(SELECT a1.a FROM [schema].t2 AS a1) " - "AS anon_1 FROM [schema].t2 AS t2_1" + "AS anon_1 FROM [schema].t2 AS t2_1", + + "SELECT [schema].t2.a, [schema].t2.b, [schema].t2.c, " + "(SELECT a1.a FROM [schema].t2 AS a1) AS anon_1 FROM [schema].t2" ) + class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL): __only_on__ = 'mssql' __dialect__ = mssql.MSDialect() @@ -97,10 +143,10 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL): metadata = MetaData(testing.db) cattable = Table('cattable', metadata, - Column('id', Integer), - Column('description', String(50)), - PrimaryKeyConstraint('id', name='PK_cattable'), - ) + Column('id', Integer), + Column('description', String(50)), + PrimaryKeyConstraint('id', name='PK_cattable'), + ) def setup(self): metadata.create_all() @@ -110,7 +156,7 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL): def test_compiled(self): self.assert_compile(cattable.insert().values(id=9, - description='Python'), + description='Python'), 'INSERT INTO cattable (id, description) ' 'VALUES (:id, :description)') @@ -127,27 +173,29 @@ class IdentityInsertTest(fixtures.TestBase, AssertsCompiledSQL): def test_executemany(self): cattable.insert().execute([{'id': 89, 'description': 'Python'}, - {'id': 8, 'description': 'Ruby'}, - {'id': 3, 'description': 'Perl'}, - {'id': 1, 'description': 'Java'}]) + {'id': 8, 'description': 'Ruby'}, + {'id': 3, 'description': 'Perl'}, + {'id': 1, 'description': 'Java'}]) cats = cattable.select().order_by(cattable.c.id).execute() eq_([(1, 'Java'), (3, 'Perl'), (8, 'Ruby'), (89, 'Python')], list(cats)) cattable.insert().execute([{'description': 'PHP'}, - {'description': 'Smalltalk'}]) + {'description': 'Smalltalk'}]) lastcats = \ cattable.select().order_by(desc(cattable.c.id)).limit(2).execute() eq_([(91, 'Smalltalk'), (90, 'PHP')], list(lastcats)) + class QueryUnicodeTest(fixtures.TestBase): __only_on__ = 'mssql' def test_convert_unicode(self): meta = MetaData(testing.db) - t1 = Table('unitest_table', meta, Column('id', Integer, - primary_key=True), Column('descr', - mssql.MSText(convert_unicode=True))) + t1 = Table( + 'unitest_table', meta, + Column('id', Integer, primary_key=True), + Column('descr', mssql.MSText(convert_unicode=True))) meta.create_all() con = testing.db.connect() @@ -159,11 +207,13 @@ class QueryUnicodeTest(fixtures.TestBase): try: r = t1.select().execute().first() assert isinstance(r[1], util.text_type), \ - '%s is %s instead of unicode, working on %s' % (r[1], - type(r[1]), meta.bind) + '%s is %s instead of unicode, working on %s' % ( + r[1], + type(r[1]), meta.bind) finally: meta.drop_all() + class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): __only_on__ = 'mssql' @@ -194,27 +244,27 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): with the init parameter 'implicit_returning = False'. """ - #todo: this same test needs to be tried in a multithreaded context + # todo: this same test needs to be tried in a multithreaded context # with multiple threads inserting to the same table. - #todo: check whether this error also occurs with clients other + # todo: check whether this error also occurs with clients other # than the SQL Server Native Client. Maybe an assert_raises # test should be written. meta = MetaData(testing.db) t1 = Table('t1', meta, - Column('id', Integer, Sequence('fred', 100, 1), - primary_key=True), - Column('descr', String(200)), - # the following flag will prevent the - # MSSQLCompiler.returning_clause from getting called, - # though the ExecutionContext will still have a - # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will - # hopefully be called instead. - implicit_returning = False - ) + Column('id', Integer, Sequence('fred', 100, 1), + primary_key=True), + Column('descr', String(200)), + # the following flag will prevent the + # MSSQLCompiler.returning_clause from getting called, + # though the ExecutionContext will still have a + # _select_lastrowid, so the SELECT SCOPE_IDENTITY() will + # hopefully be called instead. + implicit_returning=False + ) t2 = Table('t2', meta, - Column('id', Integer, Sequence('fred', 200, 1), - primary_key=True), - Column('descr', String(200))) + Column('id', Integer, Sequence('fred', 200, 1), + primary_key=True), + Column('descr', String(200))) meta.create_all() con = testing.db.connect() con.execute("""create trigger paj on t1 for insert as @@ -300,66 +350,117 @@ class QueryTest(testing.AssertsExecutionResults, fixtures.TestBase): ), ) + @testing.provide_metadata def test_insertid_schema(self): - meta = MetaData(testing.db) - con = testing.db.connect() + meta = self.metadata + eng = engines.testing_engine( + options=dict(legacy_schema_aliasing=False)) + meta.bind = eng + con = eng.connect() con.execute('create schema paj') + + @event.listens_for(meta, "after_drop") + def cleanup(target, connection, **kw): + connection.execute('drop schema paj') + tbl = Table('test', meta, Column('id', Integer, primary_key=True), schema='paj') tbl.create() - try: - tbl.insert().execute({'id':1}) - finally: - tbl.drop() - con.execute('drop schema paj') + tbl.insert().execute({'id': 1}) + eq_(tbl.select().scalar(), 1) + + @testing.provide_metadata + def test_insertid_schema_legacy(self): + meta = self.metadata + eng = engines.testing_engine( + options=dict(legacy_schema_aliasing=True)) + meta.bind = eng + con = eng.connect() + con.execute('create schema paj') + + @event.listens_for(meta, "after_drop") + def cleanup(target, connection, **kw): + connection.execute('drop schema paj') + tbl = Table('test', meta, + Column('id', Integer, primary_key=True), schema='paj') + tbl.create() + tbl.insert().execute({'id': 1}) + eq_(tbl.select().scalar(), 1) + + @testing.provide_metadata def test_returning_no_autoinc(self): - meta = MetaData(testing.db) - table = Table('t1', meta, Column('id', Integer, - primary_key=True), Column('data', String(50))) + meta = self.metadata + table = Table( + 't1', meta, + Column('id', Integer, primary_key=True), + Column('data', String(50))) table.create() - try: - result = table.insert().values(id=1, - data=func.lower('SomeString' - )).returning(table.c.id, table.c.data).execute() - eq_(result.fetchall(), [(1, 'somestring')]) - finally: + result = table.insert().values( + id=1, + data=func.lower('SomeString')).\ + returning(table.c.id, table.c.data).execute() + eq_(result.fetchall(), [(1, 'somestring')]) - # this will hang if the "SET IDENTITY_INSERT t1 OFF" occurs - # before the result is fetched + @testing.provide_metadata + def test_delete_schema(self): + meta = self.metadata + eng = engines.testing_engine( + options=dict(legacy_schema_aliasing=False)) + meta.bind = eng + con = eng.connect() + con.execute('create schema paj') - table.drop() + @event.listens_for(meta, "after_drop") + def cleanup(target, connection, **kw): + connection.execute('drop schema paj') - def test_delete_schema(self): - meta = MetaData(testing.db) - con = testing.db.connect() + tbl = Table( + 'test', meta, + Column('id', Integer, primary_key=True), schema='paj') + tbl.create() + tbl.insert().execute({'id': 1}) + eq_(tbl.select().scalar(), 1) + tbl.delete(tbl.c.id == 1).execute() + eq_(tbl.select().scalar(), None) + + @testing.provide_metadata + def test_delete_schema_legacy(self): + meta = self.metadata + eng = engines.testing_engine( + options=dict(legacy_schema_aliasing=True)) + meta.bind = eng + con = eng.connect() con.execute('create schema paj') - tbl = Table('test', meta, Column('id', Integer, - primary_key=True), schema='paj') + + @event.listens_for(meta, "after_drop") + def cleanup(target, connection, **kw): + connection.execute('drop schema paj') + + tbl = Table( + 'test', meta, + Column('id', Integer, primary_key=True), schema='paj') tbl.create() - try: - tbl.insert().execute({'id': 1}) - tbl.delete(tbl.c.id == 1).execute() - finally: - tbl.drop() - con.execute('drop schema paj') + tbl.insert().execute({'id': 1}) + eq_(tbl.select().scalar(), 1) + tbl.delete(tbl.c.id == 1).execute() + eq_(tbl.select().scalar(), None) + @testing.provide_metadata def test_insertid_reserved(self): - meta = MetaData(testing.db) + meta = self.metadata table = Table( 'select', meta, Column('col', Integer, primary_key=True) ) table.create() - meta2 = MetaData(testing.db) - try: - table.insert().execute(col=7) - finally: - table.drop() + table.insert().execute(col=7) + eq_(table.select().scalar(), 7) class Foo(object): + def __init__(self, **kw): for k in kw: setattr(self, k, kw[k]) @@ -380,6 +481,7 @@ def full_text_search_missing(): finally: connection.close() + class MatchTest(fixtures.TestBase, AssertsCompiledSQL): __only_on__ = 'mssql' @@ -399,29 +501,24 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): Column('title', String(200)), Column('category_id', Integer, ForeignKey('cattable.id')), PrimaryKeyConstraint('id', name='PK_matchtable'), - ) + ) DDL("""CREATE FULLTEXT INDEX ON cattable (description) - KEY INDEX PK_cattable""").execute_at('after-create' - , matchtable) + KEY INDEX PK_cattable""").\ + execute_at('after-create', matchtable) DDL("""CREATE FULLTEXT INDEX ON matchtable (title) - KEY INDEX PK_matchtable""").execute_at('after-create' - , matchtable) + KEY INDEX PK_matchtable""").\ + execute_at('after-create', matchtable) metadata.create_all() cattable.insert().execute([{'id': 1, 'description': 'Python'}, - {'id': 2, 'description': 'Ruby'}]) - matchtable.insert().execute([{'id': 1, 'title' - : 'Agile Web Development with Rails' - , 'category_id': 2}, {'id': 2, - 'title': 'Dive Into Python', - 'category_id': 1}, {'id': 3, 'title' - : "Programming Matz's Ruby", - 'category_id': 2}, {'id': 4, 'title' - : 'The Definitive Guide to Django', - 'category_id': 1}, {'id': 5, 'title' - : 'Python in a Nutshell', - 'category_id': 1}]) + {'id': 2, 'description': 'Ruby'}]) + matchtable.insert().execute([ + {'id': 1, 'title': 'Web Development with Rails', 'category_id': 2}, + {'id': 2, 'title': 'Dive Into Python', 'category_id': 1}, + {'id': 3, 'title': "Programming Matz's Ruby", 'category_id': 2}, + {'id': 4, 'title': 'Guide to Django', 'category_id': 1}, + {'id': 5, 'title': 'Python in a Nutshell', 'category_id': 1}]) DDL("WAITFOR DELAY '00:00:05'" ).execute(bind=engines.testing_engine()) @@ -438,59 +535,60 @@ class MatchTest(fixtures.TestBase, AssertsCompiledSQL): def test_simple_match(self): results = \ - matchtable.select().where(matchtable.c.title.match('python' - )).order_by(matchtable.c.id).execute().fetchall() + matchtable.select().where( + matchtable.c.title.match('python')).\ + order_by(matchtable.c.id).execute().fetchall() eq_([2, 5], [r.id for r in results]) def test_simple_match_with_apostrophe(self): results = \ - matchtable.select().where(matchtable.c.title.match("Matz's" - )).execute().fetchall() + matchtable.select().where( + matchtable.c.title.match("Matz's")).execute().fetchall() eq_([3], [r.id for r in results]) def test_simple_prefix_match(self): results = \ - matchtable.select().where(matchtable.c.title.match('"nut*"' - )).execute().fetchall() + matchtable.select().where( + matchtable.c.title.match('"nut*"')).execute().fetchall() eq_([5], [r.id for r in results]) def test_simple_inflectional_match(self): results = \ matchtable.select().where( matchtable.c.title.match('FORMSOF(INFLECTIONAL, "dives")' - )).execute().fetchall() + )).execute().fetchall() eq_([2], [r.id for r in results]) def test_or_match(self): results1 = \ - matchtable.select().where(or_(matchtable.c.title.match('nutshell' - ), matchtable.c.title.match('ruby' - ))).order_by(matchtable.c.id).execute().fetchall() + matchtable.select().where(or_( + matchtable.c.title.match('nutshell'), + matchtable.c.title.match('ruby'))).\ + order_by(matchtable.c.id).execute().fetchall() eq_([3, 5], [r.id for r in results1]) results2 = \ matchtable.select().where( - matchtable.c.title.match('nutshell OR ruby' - )).order_by(matchtable.c.id).execute().fetchall() + matchtable.c.title.match( + 'nutshell OR ruby')).\ + order_by(matchtable.c.id).execute().fetchall() eq_([3, 5], [r.id for r in results2]) def test_and_match(self): results1 = \ - matchtable.select().where(and_(matchtable.c.title.match('python' - ), matchtable.c.title.match('nutshell' - ))).execute().fetchall() + matchtable.select().where(and_( + matchtable.c.title.match('python'), + matchtable.c.title.match('nutshell'))).execute().fetchall() eq_([5], [r.id for r in results1]) results2 = \ matchtable.select().where( matchtable.c.title.match('python AND nutshell' - )).execute().fetchall() + )).execute().fetchall() eq_([5], [r.id for r in results2]) def test_match_across_joins(self): - results = matchtable.select().where(and_(cattable.c.id - == matchtable.c.category_id, - or_(cattable.c.description.match('Ruby'), - matchtable.c.title.match('nutshell' - )))).order_by(matchtable.c.id).execute().fetchall() + results = matchtable.select().where( + and_(cattable.c.id == matchtable.c.category_id, + or_(cattable.c.description.match('Ruby'), + matchtable.c.title.match('nutshell')))).\ + order_by(matchtable.c.id).execute().fetchall() eq_([1, 3, 5], [r.id for r in results]) - - diff --git a/test/dialect/mssql/test_types.py b/test/dialect/mssql/test_types.py index 5c9157379..17ceb6b61 100644 --- a/test/dialect/mssql/test_types.py +++ b/test/dialect/mssql/test_types.py @@ -712,7 +712,7 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): @classmethod def setup_class(cls): - global binary_table, MyPickleType + global MyPickleType class MyPickleType(types.TypeDecorator): impl = PickleType @@ -727,9 +727,13 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): value.stuff = 'this is the right stuff' return value - binary_table = Table( + def teardown(self): + self.binary_table.drop(testing.db) + + def _fixture(self, engine): + self.binary_table = binary_table = Table( 'binary_table', - MetaData(testing.db), + MetaData(), Column('primary_id', Integer, Sequence('binary_id_seq', optional=True), primary_key=True), Column('data', mssql.MSVarBinary(8000)), @@ -739,51 +743,55 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): Column('pickled', PickleType), Column('mypickle', MyPickleType), ) - binary_table.create() + binary_table.create(engine) + return binary_table - def teardown(self): - binary_table.delete().execute() + def test_binary_legacy_types(self): + self._test_binary(False) - @classmethod - def teardown_class(cls): - binary_table.drop() + @testing.only_on('mssql >= 11') + def test_binary_updated_types(self): + self._test_binary(True) - def test_binary(self): + def test_binary_none_legacy_types(self): + self._test_binary_none(False) + + @testing.only_on('mssql >= 11') + def test_binary_none_updated_types(self): + self._test_binary_none(True) + + def _test_binary(self, deprecate_large_types): testobj1 = pickleable.Foo('im foo 1') testobj2 = pickleable.Foo('im foo 2') testobj3 = pickleable.Foo('im foo 3') - stream1 = self.load_stream('binary_data_one.dat') - stream2 = self.load_stream('binary_data_two.dat') - binary_table.insert().execute( - primary_id=1, - misc='binary_data_one.dat', - data=stream1, - data_image=stream1, - data_slice=stream1[0:100], - pickled=testobj1, - mypickle=testobj3, - ) - binary_table.insert().execute( - primary_id=2, - misc='binary_data_two.dat', - data=stream2, - data_image=stream2, - data_slice=stream2[0:99], - pickled=testobj2, - ) + stream1 = self._load_stream('binary_data_one.dat') + stream2 = self._load_stream('binary_data_two.dat') + engine = engines.testing_engine( + options={"deprecate_large_types": deprecate_large_types}) + + binary_table = self._fixture(engine) + + with engine.connect() as conn: + conn.execute( + binary_table.insert(), + primary_id=1, + misc='binary_data_one.dat', + data=stream1, + data_image=stream1, + data_slice=stream1[0:100], + pickled=testobj1, + mypickle=testobj3, + ) + conn.execute( + binary_table.insert(), + primary_id=2, + misc='binary_data_two.dat', + data=stream2, + data_image=stream2, + data_slice=stream2[0:99], + pickled=testobj2, + ) - # TODO: pyodbc does not seem to accept "None" for a VARBINARY - # column (data=None). error: [Microsoft][ODBC SQL Server - # Driver][SQL Server]Implicit conversion from data type varchar - # to varbinary is not allowed. Use the CONVERT function to run - # this query. (257) binary_table.insert().execute(primary_id=3, - # misc='binary_data_two.dat', data=None, data_image=None, - # data_slice=stream2[0:99], pickled=None) - - binary_table.insert().execute( - primary_id=3, - misc='binary_data_two.dat', data_image=None, - data_slice=stream2[0:99], pickled=None) for stmt in \ binary_table.select(order_by=binary_table.c.primary_id), \ text( @@ -795,7 +803,8 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): data_slice=types.BINARY(100), pickled=PickleType, mypickle=MyPickleType), bind=testing.db): - l = stmt.execute().fetchall() + with engine.connect() as conn: + l = conn.execute(stmt).fetchall() eq_(list(stream1), list(l[0]['data'])) paddedstream = list(stream1[0:100]) paddedstream.extend(['\x00'] * (100 - len(paddedstream))) @@ -807,7 +816,48 @@ class BinaryTest(fixtures.TestBase, AssertsExecutionResults): eq_(testobj3.moredata, l[0]['mypickle'].moredata) eq_(l[0]['mypickle'].stuff, 'this is the right stuff') - def load_stream(self, name, len=3000): + def _test_binary_none(self, deprecate_large_types): + engine = engines.testing_engine( + options={"deprecate_large_types": deprecate_large_types}) + + binary_table = self._fixture(engine) + + stream2 = self._load_stream('binary_data_two.dat') + + with engine.connect() as conn: + conn.execute( + binary_table.insert(), + primary_id=3, + misc='binary_data_two.dat', data_image=None, + data_slice=stream2[0:99], pickled=None) + for stmt in \ + binary_table.select(), \ + text( + 'select * from binary_table', + typemap=dict( + data=mssql.MSVarBinary(8000), + data_image=mssql.MSImage, + data_slice=types.BINARY(100), + pickled=PickleType, + mypickle=MyPickleType), + bind=testing.db): + row = conn.execute(stmt).first() + eq_( + row['pickled'], None + ) + eq_( + row['data_image'], None + ) + + # the type we used here is 100 bytes + # so we will get 100 bytes zero-padded + paddedstream = list(stream2[0:99]) + paddedstream.extend(['\x00'] * (100 - len(paddedstream))) + eq_( + list(row['data_slice']), paddedstream + ) + + def _load_stream(self, name, len=3000): fp = open( os.path.join(os.path.dirname(__file__), "..", "..", name), 'rb') stream = fp.read(len) diff --git a/test/dialect/postgresql/test_compiler.py b/test/dialect/postgresql/test_compiler.py index aa3f80fdc..9fa5c9804 100644 --- a/test/dialect/postgresql/test_compiler.py +++ b/test/dialect/postgresql/test_compiler.py @@ -5,7 +5,8 @@ from sqlalchemy.testing.assertions import AssertsCompiledSQL, is_, \ from sqlalchemy.testing import engines, fixtures from sqlalchemy import testing from sqlalchemy import Sequence, Table, Column, Integer, update, String,\ - insert, func, MetaData, Enum, Index, and_, delete, select, cast, text + insert, func, MetaData, Enum, Index, and_, delete, select, cast, text, \ + Text from sqlalchemy.dialects.postgresql import ExcludeConstraint, array from sqlalchemy import exc, schema from sqlalchemy.dialects.postgresql import base as postgresql @@ -369,6 +370,28 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): 'USING hash (data)', dialect=postgresql.dialect()) + def test_create_index_with_with(self): + m = MetaData() + tbl = Table('testtbl', m, Column('data', String)) + + idx1 = Index('test_idx1', tbl.c.data) + idx2 = Index( + 'test_idx2', tbl.c.data, postgresql_with={"fillfactor": 50}) + idx3 = Index('test_idx3', tbl.c.data, postgresql_using="gist", + postgresql_with={"buffering": "off"}) + + self.assert_compile(schema.CreateIndex(idx1), + 'CREATE INDEX test_idx1 ON testtbl ' + '(data)') + self.assert_compile(schema.CreateIndex(idx2), + 'CREATE INDEX test_idx2 ON testtbl ' + '(data) ' + 'WITH (fillfactor = 50)') + self.assert_compile(schema.CreateIndex(idx3), + 'CREATE INDEX test_idx3 ON testtbl ' + 'USING gist (data) ' + 'WITH (buffering = off)') + def test_create_index_expr_gets_parens(self): m = MetaData() tbl = Table('testtbl', m, Column('x', Integer), Column('y', Integer)) @@ -443,8 +466,47 @@ class CompileTest(fixtures.TestBase, AssertsCompiledSQL): tbl.append_constraint(cons_copy) self.assert_compile(schema.AddConstraint(cons_copy), 'ALTER TABLE testtbl ADD EXCLUDE USING gist ' - '(room WITH =)', - dialect=postgresql.dialect()) + '(room WITH =)') + + def test_exclude_constraint_text(self): + m = MetaData() + cons = ExcludeConstraint((text('room::TEXT'), '=')) + Table( + 'testtbl', m, + Column('room', String), + cons) + self.assert_compile( + schema.AddConstraint(cons), + 'ALTER TABLE testtbl ADD EXCLUDE USING gist ' + '(room::TEXT WITH =)') + + def test_exclude_constraint_cast(self): + m = MetaData() + tbl = Table( + 'testtbl', m, + Column('room', String) + ) + cons = ExcludeConstraint((cast(tbl.c.room, Text), '=')) + tbl.append_constraint(cons) + self.assert_compile( + schema.AddConstraint(cons), + 'ALTER TABLE testtbl ADD EXCLUDE USING gist ' + '(CAST(room AS TEXT) WITH =)' + ) + + def test_exclude_constraint_cast_quote(self): + m = MetaData() + tbl = Table( + 'testtbl', m, + Column('Room', String) + ) + cons = ExcludeConstraint((cast(tbl.c.Room, Text), '=')) + tbl.append_constraint(cons) + self.assert_compile( + schema.AddConstraint(cons), + 'ALTER TABLE testtbl ADD EXCLUDE USING gist ' + '(CAST("Room" AS TEXT) WITH =)' + ) def test_substring(self): self.assert_compile(func.substring('abc', 1, 2), diff --git a/test/dialect/postgresql/test_dialect.py b/test/dialect/postgresql/test_dialect.py index bdd292fff..52620bb78 100644 --- a/test/dialect/postgresql/test_dialect.py +++ b/test/dialect/postgresql/test_dialect.py @@ -14,6 +14,7 @@ from sqlalchemy.dialects.postgresql import base as postgresql import logging import logging.handlers from sqlalchemy.testing.mock import Mock +from sqlalchemy.engine import engine_from_config class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): @@ -59,16 +60,19 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): eq_(testing.db.dialect._get_server_version_info(mock_conn(string)), version) - @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature') + @testing.requires.psycopg2_compatibility def test_psycopg2_version(self): v = testing.db.dialect.psycopg2_version assert testing.db.dialect.dbapi.__version__.\ startswith(".".join(str(x) for x in v)) - @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature') + @testing.requires.psycopg2_compatibility def test_psycopg2_non_standard_err(self): - from psycopg2.extensions import TransactionRollbackError - import psycopg2 + # under pypy the name here is psycopg2cffi + psycopg2 = testing.db.dialect.dbapi + TransactionRollbackError = __import__( + "%s.extensions" % psycopg2.__name__ + ).extensions.TransactionRollbackError exception = exc.DBAPIError.instance( "some statement", {}, TransactionRollbackError("foo"), @@ -78,7 +82,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): # currently not passing with pg 9.3 that does not seem to generate # any notices here, would rather find a way to mock this @testing.requires.no_coverage - @testing.only_on('postgresql+psycopg2', 'psycopg2-specific feature') + @testing.requires.psycopg2_compatibility def _test_notice_logging(self): log = logging.getLogger('sqlalchemy.dialects.postgresql') buf = logging.handlers.BufferingHandler(100) @@ -99,9 +103,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): assert 'will create implicit sequence' in msgs assert 'will create implicit index' in msgs - @testing.only_on( - ['postgresql+psycopg2', 'postgresql+pg8000'], - 'psycopg2/pg8000-specific feature') + @testing.requires.psycopg2_or_pg8000_compatibility @engines.close_open_connections def test_client_encoding(self): c = testing.db.connect() @@ -120,10 +122,23 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): new_encoding = c.execute("show client_encoding").fetchone()[0] eq_(new_encoding, test_encoding) - @testing.only_on( - ['postgresql+psycopg2', 'postgresql+pg8000', - 'postgresql+psycopg2cffi'], - 'psycopg2 / pg8000 - specific feature') + @testing.requires.psycopg2_compatibility + def test_pg_dialect_use_native_unicode_from_config(self): + config = { + 'sqlalchemy.url': testing.db.url, + 'sqlalchemy.use_native_unicode': "false"} + + e = engine_from_config(config, _initialize=False) + eq_(e.dialect.use_native_unicode, False) + + config = { + 'sqlalchemy.url': testing.db.url, + 'sqlalchemy.use_native_unicode': "true"} + + e = engine_from_config(config, _initialize=False) + eq_(e.dialect.use_native_unicode, True) + + @testing.requires.psycopg2_or_pg8000_compatibility @engines.close_open_connections def test_autocommit_isolation_level(self): c = testing.db.connect().execution_options( @@ -217,8 +232,7 @@ class MiscTest(fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL): testing.db.execute('drop table speedy_users') @testing.fails_on('+zxjdbc', 'psycopg2/pg8000 specific assertion') - @testing.fails_on('pypostgresql', - 'psycopg2/pg8000 specific assertion') + @testing.requires.psycopg2_or_pg8000_compatibility def test_numeric_raise(self): stmt = text( "select cast('hi' as char) as hi", typemap={'hi': Numeric}) diff --git a/test/dialect/postgresql/test_query.py b/test/dialect/postgresql/test_query.py index 27cb958fd..4a33644e0 100644 --- a/test/dialect/postgresql/test_query.py +++ b/test/dialect/postgresql/test_query.py @@ -549,7 +549,7 @@ class InsertTest(fixtures.TestBase, AssertsExecutionResults): class ServerSideCursorsTest(fixtures.TestBase, AssertsExecutionResults): - __only_on__ = 'postgresql+psycopg2' + __requires__ = 'psycopg2_compatibility', def _fixture(self, server_side_cursors): self.engine = engines.testing_engine( diff --git a/test/dialect/postgresql/test_reflection.py b/test/dialect/postgresql/test_reflection.py index 0ebe68cba..0354fa436 100644 --- a/test/dialect/postgresql/test_reflection.py +++ b/test/dialect/postgresql/test_reflection.py @@ -12,6 +12,7 @@ from sqlalchemy import Table, Column, MetaData, Integer, String, \ from sqlalchemy import exc import sqlalchemy as sa from sqlalchemy.dialects.postgresql import base as postgresql +from sqlalchemy.dialects.postgresql import ARRAY class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): @@ -70,7 +71,7 @@ class ForeignTableReflectionTest(fixtures.TablesTest, AssertsExecutionResults): eq_(names, ['testtable']) -class MaterialiedViewReflectionTest( +class MaterializedViewReflectionTest( fixtures.TablesTest, AssertsExecutionResults): """Test reflection on materialized views""" @@ -673,6 +674,59 @@ class ReflectionTest(fixtures.TestBase): conn.close() @testing.provide_metadata + def test_index_reflection_with_storage_options(self): + """reflect indexes with storage options set""" + + metadata = self.metadata + + Table( + 't', metadata, + Column('id', Integer, primary_key=True), + Column('x', Integer) + ) + metadata.create_all() + + with testing.db.connect().execution_options(autocommit=True) as conn: + conn.execute("CREATE INDEX idx1 ON t (x) WITH (fillfactor = 50)") + + ind = testing.db.dialect.get_indexes(conn, "t", None) + eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1', + 'dialect_options': + {"postgresql_with": {"fillfactor": "50"}}}]) + + m = MetaData() + t1 = Table('t', m, autoload_with=conn) + eq_( + list(t1.indexes)[0].dialect_options['postgresql']['with'], + {"fillfactor": "50"} + ) + + @testing.provide_metadata + def test_index_reflection_with_access_method(self): + """reflect indexes with storage options set""" + + metadata = self.metadata + + Table( + 't', metadata, + Column('id', Integer, primary_key=True), + Column('x', ARRAY(Integer)) + ) + metadata.create_all() + with testing.db.connect().execution_options(autocommit=True) as conn: + conn.execute("CREATE INDEX idx1 ON t USING gin (x)") + + ind = testing.db.dialect.get_indexes(conn, "t", None) + eq_(ind, [{'unique': False, 'column_names': ['x'], 'name': 'idx1', + 'dialect_options': {'postgresql_using': 'gin'}}]) + m = MetaData() + t1 = Table('t', m, autoload_with=conn) + eq_( + list(t1.indexes)[0].dialect_options['postgresql']['using'], + 'gin' + ) + + @testing.provide_metadata def test_foreign_key_option_inspection(self): metadata = self.metadata Table( @@ -817,7 +871,7 @@ class ReflectionTest(fixtures.TestBase): }]) @testing.provide_metadata - @testing.only_on("postgresql>=8.5") + @testing.only_on("postgresql >= 8.5") def test_reflection_with_unique_constraint(self): insp = inspect(testing.db) diff --git a/test/dialect/postgresql/test_types.py b/test/dialect/postgresql/test_types.py index 393ef43de..fac0f2df8 100644 --- a/test/dialect/postgresql/test_types.py +++ b/test/dialect/postgresql/test_types.py @@ -171,8 +171,9 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults): (util.u('réveillé'), util.u('drôle'), util.u('S’il')) ) - def test_non_native_type(self): - metadata = MetaData() + @testing.provide_metadata + def test_non_native_enum(self): + metadata = self.metadata t1 = Table( 'foo', metadata, @@ -188,14 +189,53 @@ class EnumTest(fixtures.TestBase, AssertsExecutionResults): def go(): t1.create(testing.db) - try: - self.assert_sql( - testing.db, go, [ - ("CREATE TABLE foo (\tbar " - "VARCHAR(5), \tCONSTRAINT myenum CHECK " - "(bar IN ('one', 'two', 'three')))", {})]) - finally: - metadata.drop_all(testing.db) + self.assert_sql( + testing.db, go, [ + ("CREATE TABLE foo (\tbar " + "VARCHAR(5), \tCONSTRAINT myenum CHECK " + "(bar IN ('one', 'two', 'three')))", {})]) + with testing.db.begin() as conn: + conn.execute( + t1.insert(), {'bar': 'two'} + ) + eq_( + conn.scalar(select([t1.c.bar])), 'two' + ) + + @testing.provide_metadata + def test_non_native_enum_w_unicode(self): + metadata = self.metadata + t1 = Table( + 'foo', + metadata, + Column( + 'bar', + Enum('B', util.u('Ü'), name='myenum', native_enum=False))) + + def go(): + t1.create(testing.db) + + self.assert_sql( + testing.db, + go, + [ + ( + util.u( + "CREATE TABLE foo (\tbar " + "VARCHAR(1), \tCONSTRAINT myenum CHECK " + "(bar IN ('B', 'Ü')))" + ), + {} + ) + ]) + + with testing.db.begin() as conn: + conn.execute( + t1.insert(), {'bar': util.u('Ü')} + ) + eq_( + conn.scalar(select([t1.c.bar])), util.u('Ü') + ) @testing.provide_metadata def test_disable_create(self): @@ -1527,7 +1567,7 @@ class HStoreRoundTripTest(fixtures.TablesTest): self._assert_data([{"k1": "r1v1", "k2": "r1v2"}]) def _non_native_engine(self): - if testing.against("postgresql+psycopg2"): + if testing.requires.psycopg2_native_hstore.enabled: engine = engines.testing_engine( options=dict( use_native_hstore=False)) @@ -1541,7 +1581,7 @@ class HStoreRoundTripTest(fixtures.TablesTest): cols = insp.get_columns('data_table') assert isinstance(cols[2]['type'], HSTORE) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_insert_native(self): engine = testing.db self._test_insert(engine) @@ -1550,7 +1590,7 @@ class HStoreRoundTripTest(fixtures.TablesTest): engine = self._non_native_engine() self._test_insert(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_criterion_native(self): engine = testing.db self._fixture_data(engine) @@ -1584,7 +1624,7 @@ class HStoreRoundTripTest(fixtures.TablesTest): engine = self._non_native_engine() self._test_fixed_round_trip(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_fixed_round_trip_native(self): engine = testing.db self._test_fixed_round_trip(engine) @@ -1605,12 +1645,12 @@ class HStoreRoundTripTest(fixtures.TablesTest): } ) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_unicode_round_trip_python(self): engine = self._non_native_engine() self._test_unicode_round_trip(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_unicode_round_trip_native(self): engine = testing.db self._test_unicode_round_trip(engine) @@ -1619,7 +1659,7 @@ class HStoreRoundTripTest(fixtures.TablesTest): engine = self._non_native_engine() self._test_escaped_quotes_round_trip(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_hstore def test_escaped_quotes_round_trip_native(self): engine = testing.db self._test_escaped_quotes_round_trip(engine) @@ -1651,14 +1691,16 @@ class HStoreRoundTripTest(fixtures.TablesTest): class _RangeTypeMixin(object): - __requires__ = 'range_types', - __dialect__ = 'postgresql+psycopg2' + __requires__ = 'range_types', 'psycopg2_compatibility' __backend__ = True def extras(self): # done this way so we don't get ImportErrors with # older psycopg2 versions. - from psycopg2 import extras + if testing.against("postgresql+psycopg2cffi"): + from psycopg2cffi import extras + else: + from psycopg2 import extras return extras @classmethod @@ -1926,7 +1968,7 @@ class DateTimeTZRangeTests(_RangeTypeMixin, fixtures.TablesTest): def tstzs(self): if self._tstzs is None: - lower = testing.db.connect().scalar( + lower = testing.db.scalar( func.current_timestamp().select() ) upper = lower + datetime.timedelta(1) @@ -2176,17 +2218,17 @@ class JSONRoundTripTest(fixtures.TablesTest): cols = insp.get_columns('data_table') assert isinstance(cols[2]['type'], self.test_type) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_insert_native(self): engine = testing.db self._test_insert(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_insert_native_nulls(self): engine = testing.db self._test_insert_nulls(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_insert_native_none_as_null(self): engine = testing.db self._test_insert_none_as_null(engine) @@ -2244,15 +2286,15 @@ class JSONRoundTripTest(fixtures.TablesTest): }, ) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_custom_native(self): self._test_custom_serialize_deserialize(True) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_custom_python(self): self._test_custom_serialize_deserialize(False) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_criterion_native(self): engine = testing.db self._fixture_data(engine) @@ -2324,7 +2366,7 @@ class JSONRoundTripTest(fixtures.TablesTest): engine = self._non_native_engine() self._test_fixed_round_trip(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_fixed_round_trip_native(self): engine = testing.db self._test_fixed_round_trip(engine) @@ -2351,7 +2393,7 @@ class JSONRoundTripTest(fixtures.TablesTest): engine = self._non_native_engine() self._test_unicode_round_trip(engine) - @testing.only_on("postgresql+psycopg2") + @testing.requires.psycopg2_native_json def test_unicode_round_trip_native(self): engine = testing.db self._test_unicode_round_trip(engine) diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py index 8f6c547f1..69ab721c1 100644 --- a/test/engine/test_bind.py +++ b/test/engine/test_bind.py @@ -11,6 +11,7 @@ import sqlalchemy as sa from sqlalchemy import testing from sqlalchemy.testing import fixtures + class BindTest(fixtures.TestBase): def test_bind_close_engine(self): e = testing.db @@ -76,7 +77,8 @@ class BindTest(fixtures.TestBase): ]: assert_raises_message( exc.UnboundExecutionError, - "Table object 'test_table' is not bound to an Engine or Connection.", + ("Table object 'test_table' is not bound to an Engine or " + "Connection."), meth ) @@ -163,7 +165,6 @@ class BindTest(fixtures.TestBase): finally: metadata.drop_all(bind=conn) - def test_clauseelement(self): metadata = MetaData() table = Table('test_table', metadata, @@ -198,5 +199,3 @@ class BindTest(fixtures.TestBase): if isinstance(bind, engine.Connection): bind.close() metadata.drop_all(bind=testing.db) - - diff --git a/test/engine/test_ddlevents.py b/test/engine/test_ddlevents.py index 0d828b340..8beb255eb 100644 --- a/test/engine/test_ddlevents.py +++ b/test/engine/test_ddlevents.py @@ -11,38 +11,10 @@ from sqlalchemy import testing from sqlalchemy.testing import engines from sqlalchemy.testing import AssertsCompiledSQL, eq_ from sqlalchemy.testing import fixtures +from sqlalchemy.testing import mock class DDLEventTest(fixtures.TestBase): - class Canary(object): - def __init__(self, schema_item, bind): - self.state = None - self.schema_item = schema_item - self.bind = bind - - def before_create(self, schema_item, bind, **kw): - assert self.state is None - assert schema_item is self.schema_item - assert bind is self.bind - self.state = 'before-create' - - def after_create(self, schema_item, bind, **kw): - assert self.state in ('before-create', 'skipped') - assert schema_item is self.schema_item - assert bind is self.bind - self.state = 'after-create' - - def before_drop(self, schema_item, bind, **kw): - assert self.state is None - assert schema_item is self.schema_item - assert bind is self.bind - self.state = 'before-drop' - - def after_drop(self, schema_item, bind, **kw): - assert self.state in ('before-drop', 'skipped') - assert schema_item is self.schema_item - assert bind is self.bind - self.state = 'after-drop' def setup(self): self.bind = engines.mock_engine() @@ -51,128 +23,276 @@ class DDLEventTest(fixtures.TestBase): def test_table_create_before(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'before_create', canary.before_create) table.create(bind) - assert canary.state == 'before-create' table.drop(bind) - assert canary.state == 'before-create' + eq_( + canary.mock_calls, + [ + mock.call.before_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY) + ] + ) def test_table_create_after(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'after_create', canary.after_create) - canary.state = 'skipped' table.create(bind) - assert canary.state == 'after-create' table.drop(bind) - assert canary.state == 'after-create' + eq_( + canary.mock_calls, + [ + mock.call.after_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY) + ] + ) def test_table_create_both(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'before_create', canary.before_create) event.listen(table, 'after_create', canary.after_create) table.create(bind) - assert canary.state == 'after-create' table.drop(bind) - assert canary.state == 'after-create' + eq_( + canary.mock_calls, + [ + mock.call.before_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + mock.call.after_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY) + ] + ) def test_table_drop_before(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'before_drop', canary.before_drop) table.create(bind) - assert canary.state is None table.drop(bind) - assert canary.state == 'before-drop' + eq_( + canary.mock_calls, + [ + mock.call.before_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + ] + ) def test_table_drop_after(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'after_drop', canary.after_drop) table.create(bind) - assert canary.state is None canary.state = 'skipped' table.drop(bind) - assert canary.state == 'after-drop' + eq_( + canary.mock_calls, + [ + mock.call.after_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + ] + ) def test_table_drop_both(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'before_drop', canary.before_drop) event.listen(table, 'after_drop', canary.after_drop) table.create(bind) - assert canary.state is None table.drop(bind) - assert canary.state == 'after-drop' + eq_( + canary.mock_calls, + [ + mock.call.before_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + mock.call.after_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + ] + ) def test_table_all(self): table, bind = self.table, self.bind - canary = self.Canary(table, bind) + canary = mock.Mock() event.listen(table, 'before_create', canary.before_create) event.listen(table, 'after_create', canary.after_create) event.listen(table, 'before_drop', canary.before_drop) event.listen(table, 'after_drop', canary.after_drop) - assert canary.state is None table.create(bind) - assert canary.state == 'after-create' - canary.state = None table.drop(bind) - assert canary.state == 'after-drop' + eq_( + canary.mock_calls, + [ + mock.call.before_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + mock.call.after_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + mock.call.before_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + mock.call.after_drop( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + ] + ) - def test_table_create_before(self): + def test_metadata_create_before(self): metadata, bind = self.metadata, self.bind - canary = self.Canary(metadata, bind) + canary = mock.Mock() event.listen(metadata, 'before_create', canary.before_create) metadata.create_all(bind) - assert canary.state == 'before-create' metadata.drop_all(bind) - assert canary.state == 'before-create' + eq_( + canary.mock_calls, + [ + mock.call.before_create( + # checkfirst is False because of the MockConnection + # used in the current testing strategy. + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) def test_metadata_create_after(self): metadata, bind = self.metadata, self.bind - canary = self.Canary(metadata, bind) + canary = mock.Mock() event.listen(metadata, 'after_create', canary.after_create) - canary.state = 'skipped' metadata.create_all(bind) - assert canary.state == 'after-create' metadata.drop_all(bind) - assert canary.state == 'after-create' + eq_( + canary.mock_calls, + [ + mock.call.after_create( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) def test_metadata_create_both(self): metadata, bind = self.metadata, self.bind - canary = self.Canary(metadata, bind) + canary = mock.Mock() event.listen(metadata, 'before_create', canary.before_create) event.listen(metadata, 'after_create', canary.after_create) metadata.create_all(bind) - assert canary.state == 'after-create' metadata.drop_all(bind) - assert canary.state == 'after-create' + eq_( + canary.mock_calls, + [ + mock.call.before_create( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + mock.call.after_create( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) + + def test_metadata_drop_before(self): + metadata, bind = self.metadata, self.bind + canary = mock.Mock() + event.listen(metadata, 'before_drop', canary.before_drop) + + metadata.create_all(bind) + metadata.drop_all(bind) + eq_( + canary.mock_calls, + [ + mock.call.before_drop( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) + + def test_metadata_drop_after(self): + metadata, bind = self.metadata, self.bind + canary = mock.Mock() + event.listen(metadata, 'after_drop', canary.after_drop) + + metadata.create_all(bind) + metadata.drop_all(bind) + eq_( + canary.mock_calls, + [ + mock.call.after_drop( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) + + def test_metadata_drop_both(self): + metadata, bind = self.metadata, self.bind + canary = mock.Mock() + + event.listen(metadata, 'before_drop', canary.before_drop) + event.listen(metadata, 'after_drop', canary.after_drop) + + metadata.create_all(bind) + metadata.drop_all(bind) + eq_( + canary.mock_calls, + [ + mock.call.before_drop( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + mock.call.after_drop( + metadata, self.bind, checkfirst=False, + tables=list(metadata.tables.values()), + _ddl_runner=mock.ANY), + ] + ) def test_metadata_table_isolation(self): - metadata, table, bind = self.metadata, self.table, self.bind - table_canary = self.Canary(table, bind) + metadata, table = self.metadata, self.table + table_canary = mock.Mock() + metadata_canary = mock.Mock() event.listen(table, 'before_create', table_canary.before_create) - metadata_canary = self.Canary(metadata, bind) event.listen(metadata, 'before_create', metadata_canary.before_create) self.table.create(self.bind) - assert metadata_canary.state == None + eq_( + table_canary.mock_calls, + [ + mock.call.before_create( + table, self.bind, checkfirst=False, + _ddl_runner=mock.ANY, _is_metadata_operation=mock.ANY), + ] + ) + eq_( + metadata_canary.mock_calls, + [] + ) def test_append_listener(self): metadata, table, bind = self.metadata, self.table, self.bind @@ -266,16 +386,16 @@ class DDLExecutionTest(fixtures.TestBase): metadata, users, engine = self.metadata, self.users, self.engine canary = [] users.append_ddl_listener('before-create', - lambda e, t, b:canary.append('mxyzptlk') + lambda e, t, b: canary.append('mxyzptlk') ) users.append_ddl_listener('after-create', - lambda e, t, b:canary.append('klptzyxm') + lambda e, t, b: canary.append('klptzyxm') ) users.append_ddl_listener('before-drop', - lambda e, t, b:canary.append('xyzzy') + lambda e, t, b: canary.append('xyzzy') ) users.append_ddl_listener('after-drop', - lambda e, t, b:canary.append('fnord') + lambda e, t, b: canary.append('fnord') ) metadata.create_all() @@ -295,16 +415,16 @@ class DDLExecutionTest(fixtures.TestBase): metadata, users, engine = self.metadata, self.users, self.engine canary = [] metadata.append_ddl_listener('before-create', - lambda e, t, b, tables=None:canary.append('mxyzptlk') + lambda e, t, b, tables=None: canary.append('mxyzptlk') ) metadata.append_ddl_listener('after-create', - lambda e, t, b, tables=None:canary.append('klptzyxm') + lambda e, t, b, tables=None: canary.append('klptzyxm') ) metadata.append_ddl_listener('before-drop', - lambda e, t, b, tables=None:canary.append('xyzzy') + lambda e, t, b, tables=None: canary.append('xyzzy') ) metadata.append_ddl_listener('after-drop', - lambda e, t, b, tables=None:canary.append('fnord') + lambda e, t, b, tables=None: canary.append('fnord') ) metadata.create_all() @@ -369,8 +489,8 @@ class DDLExecutionTest(fixtures.TestBase): metadata, users, engine = self.metadata, self.users, self.engine nonpg_mock = engines.mock_engine(dialect_name='sqlite') pg_mock = engines.mock_engine(dialect_name='postgresql') - constraint = CheckConstraint('a < b', name='my_test_constraint' - , table=users) + constraint = CheckConstraint('a < b', name='my_test_constraint', + table=users) # by placing the constraint in an Add/Drop construct, the # 'inline_ddl' flag is set to False @@ -405,8 +525,8 @@ class DDLExecutionTest(fixtures.TestBase): metadata, users, engine = self.metadata, self.users, self.engine nonpg_mock = engines.mock_engine(dialect_name='sqlite') pg_mock = engines.mock_engine(dialect_name='postgresql') - constraint = CheckConstraint('a < b', name='my_test_constraint' - , table=users) + constraint = CheckConstraint('a < b', name='my_test_constraint', + table=users) # by placing the constraint in an Add/Drop construct, the # 'inline_ddl' flag is set to False @@ -489,8 +609,6 @@ class DDLExecutionTest(fixtures.TestBase): ) - - class DDLTest(fixtures.TestBase, AssertsCompiledSQL): def mock_engine(self): executor = lambda *a, **kw: None @@ -527,12 +645,11 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): dialect=dialect) self.assert_compile(ddl.against(sane_schema), 'S S-T T-s.t-b', dialect=dialect) - self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b' - , dialect=dialect) + self.assert_compile(ddl.against(insane_alone), 'S S-T T-"t t"-b', + dialect=dialect) self.assert_compile(ddl.against(insane_schema), 'S S-T T-"s s"."t t"-b', dialect=dialect) - def test_filter(self): cx = self.mock_engine() @@ -543,10 +660,10 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): assert DDL('').execute_if(dialect=target)._should_execute(tbl, cx) assert not DDL('').execute_if(dialect='bogus').\ _should_execute(tbl, cx) - assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\ + assert DDL('').execute_if(callable_=lambda d, y, z, **kw: True).\ _should_execute(tbl, cx) assert(DDL('').execute_if( - callable_=lambda d, y,z, **kw: z.engine.name + callable_=lambda d, y, z, **kw: z.engine.name != 'bogus'). _should_execute(tbl, cx)) @@ -561,16 +678,14 @@ class DDLTest(fixtures.TestBase, AssertsCompiledSQL): assert DDL('', on=target)._should_execute_deprecated('x', tbl, cx) assert not DDL('', on='bogus').\ _should_execute_deprecated('x', tbl, cx) - assert DDL('', on=lambda d, x,y,z: True).\ + assert DDL('', on=lambda d, x, y, z: True).\ _should_execute_deprecated('x', tbl, cx) - assert(DDL('', on=lambda d, x,y,z: z.engine.name != 'bogus'). + assert(DDL('', on=lambda d, x, y, z: z.engine.name != 'bogus'). _should_execute_deprecated('x', tbl, cx)) def test_repr(self): assert repr(DDL('s')) assert repr(DDL('s', on='engine')) assert repr(DDL('s', on=lambda x: 1)) - assert repr(DDL('s', context={'a':1})) - assert repr(DDL('s', on='engine', context={'a':1})) - - + assert repr(DDL('s', context={'a': 1})) + assert repr(DDL('s', on='engine', context={'a': 1})) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index b0256d325..fbb1878dc 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -1,7 +1,7 @@ # coding: utf-8 from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \ - config, is_ + config, is_, is_not_, le_ import re from sqlalchemy.testing.util import picklers from sqlalchemy.interfaces import ConnectionProxy @@ -484,6 +484,32 @@ class ExecuteTest(fixtures.TestBase): eq_(canary, ["l1", "l2", "l3", "l1", "l2"]) @testing.requires.ad_hoc_engines + def test_dispose_event(self): + canary = Mock() + eng = create_engine(testing.db.url) + event.listen(eng, "engine_disposed", canary) + + conn = eng.connect() + conn.close() + eng.dispose() + + + conn = eng.connect() + conn.close() + + eq_( + canary.mock_calls, + [call(eng)] + ) + + eng.dispose() + + eq_( + canary.mock_calls, + [call(eng), call(eng)] + ) + + @testing.requires.ad_hoc_engines def test_autocommit_option_no_issue_first_connect(self): eng = create_engine(testing.db.url) eng.update_execution_options(autocommit=True) @@ -1021,76 +1047,91 @@ class ExecutionOptionsTest(fixtures.TestBase): ) -class AlternateResultProxyTest(fixtures.TestBase): +class AlternateResultProxyTest(fixtures.TablesTest): __requires__ = ('sqlite', ) @classmethod - def setup_class(cls): + def setup_bind(cls): cls.engine = engine = testing_engine('sqlite://') - m = MetaData() - cls.table = t = Table('test', m, - Column('x', Integer, primary_key=True), - Column('y', String(50, convert_unicode='force')) - ) - m.create_all(engine) - engine.execute(t.insert(), [ + return engine + + @classmethod + def define_tables(cls, metadata): + Table( + 'test', metadata, + Column('x', Integer, primary_key=True), + Column('y', String(50, convert_unicode='force')) + ) + + @classmethod + def insert_data(cls): + cls.engine.execute(cls.tables.test.insert(), [ {'x': i, 'y': "t_%d" % i} for i in range(1, 12) ]) - def _test_proxy(self, cls): + @contextmanager + def _proxy_fixture(self, cls): + self.table = self.tables.test + class ExcCtx(default.DefaultExecutionContext): def get_result_proxy(self): return cls(self) - self.engine.dialect.execution_ctx_cls = ExcCtx - rows = [] - r = self.engine.execute(select([self.table])) - assert isinstance(r, cls) - for i in range(5): - rows.append(r.fetchone()) - eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) + self.patcher = patch.object( + self.engine.dialect, "execution_ctx_cls", ExcCtx) + with self.patcher: + yield - rows = r.fetchmany(3) - eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)]) + def _test_proxy(self, cls): + with self._proxy_fixture(cls): + rows = [] + r = self.engine.execute(select([self.table])) + assert isinstance(r, cls) + for i in range(5): + rows.append(r.fetchone()) + eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) + + rows = r.fetchmany(3) + eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)]) - rows = r.fetchall() - eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)]) + rows = r.fetchall() + eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)]) - r = self.engine.execute(select([self.table])) - rows = r.fetchmany(None) - eq_(rows[0], (1, "t_1")) - # number of rows here could be one, or the whole thing - assert len(rows) == 1 or len(rows) == 11 + r = self.engine.execute(select([self.table])) + rows = r.fetchmany(None) + eq_(rows[0], (1, "t_1")) + # number of rows here could be one, or the whole thing + assert len(rows) == 1 or len(rows) == 11 - r = self.engine.execute(select([self.table]).limit(1)) - r.fetchone() - eq_(r.fetchone(), None) + r = self.engine.execute(select([self.table]).limit(1)) + r.fetchone() + eq_(r.fetchone(), None) - r = self.engine.execute(select([self.table]).limit(5)) - rows = r.fetchmany(6) - eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) + r = self.engine.execute(select([self.table]).limit(5)) + rows = r.fetchmany(6) + eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)]) - # result keeps going just fine with blank results... - eq_(r.fetchmany(2), []) + # result keeps going just fine with blank results... + eq_(r.fetchmany(2), []) - eq_(r.fetchmany(2), []) + eq_(r.fetchmany(2), []) - eq_(r.fetchall(), []) + eq_(r.fetchall(), []) - eq_(r.fetchone(), None) + eq_(r.fetchone(), None) - # until we close - r.close() + # until we close + r.close() - self._assert_result_closed(r) + self._assert_result_closed(r) - r = self.engine.execute(select([self.table]).limit(5)) - eq_(r.first(), (1, "t_1")) - self._assert_result_closed(r) + r = self.engine.execute(select([self.table]).limit(5)) + eq_(r.first(), (1, "t_1")) + self._assert_result_closed(r) - r = self.engine.execute(select([self.table]).limit(5)) - eq_(r.scalar(), 1) - self._assert_result_closed(r) + r = self.engine.execute(select([self.table]).limit(5)) + eq_(r.scalar(), 1) + self._assert_result_closed(r) def _assert_result_closed(self, r): assert_raises_message( @@ -1123,6 +1164,42 @@ class AlternateResultProxyTest(fixtures.TestBase): def test_buffered_column_result_proxy(self): self._test_proxy(_result.BufferedColumnResultProxy) + def test_buffered_row_growth(self): + with self._proxy_fixture(_result.BufferedRowResultProxy): + with self.engine.connect() as conn: + conn.execute(self.table.insert(), [ + {'x': i, 'y': "t_%d" % i} for i in range(15, 1200) + ]) + result = conn.execute(self.table.select()) + checks = { + 0: 5, 1: 10, 9: 20, 135: 250, 274: 500, + 1351: 1000 + } + for idx, row in enumerate(result, 0): + if idx in checks: + eq_(result._bufsize, checks[idx]) + le_( + len(result._BufferedRowResultProxy__rowbuffer), + 1000 + ) + + def test_max_row_buffer_option(self): + with self._proxy_fixture(_result.BufferedRowResultProxy): + with self.engine.connect() as conn: + conn.execute(self.table.insert(), [ + {'x': i, 'y': "t_%d" % i} for i in range(15, 1200) + ]) + result = conn.execution_options(max_row_buffer=27).execute( + self.table.select() + ) + for idx, row in enumerate(result, 0): + if idx in (16, 70, 150, 250): + eq_(result._bufsize, 27) + le_( + len(result._BufferedRowResultProxy__rowbuffer), + 27 + ) + class EngineEventsTest(fixtures.TestBase): __requires__ = 'ad_hoc_engines', @@ -1943,6 +2020,47 @@ class HandleErrorTest(fixtures.TestBase): self._test_alter_disconnect(True, False) self._test_alter_disconnect(False, False) + @testing.requires.independent_connections + def _test_alter_invalidate_pool_to_false(self, set_to_false): + orig_error = True + + engine = engines.testing_engine() + + @event.listens_for(engine, "handle_error") + def evt(ctx): + if set_to_false: + ctx.invalidate_pool_on_disconnect = False + + c1, c2, c3 = engine.pool.connect(), \ + engine.pool.connect(), engine.pool.connect() + crecs = [conn._connection_record for conn in (c1, c2, c3)] + c1.close() + c2.close() + c3.close() + + with patch.object(engine.dialect, "is_disconnect", + Mock(return_value=orig_error)): + + with engine.connect() as c: + target_crec = c.connection._connection_record + try: + c.execute("SELECT x FROM nonexistent") + assert False + except tsa.exc.StatementError as st: + eq_(st.connection_invalidated, True) + + for crec in crecs: + if crec is target_crec or not set_to_false: + is_not_(crec.connection, crec.get_connection()) + else: + is_(crec.connection, crec.get_connection()) + + def test_alter_invalidate_pool_to_false(self): + self._test_alter_invalidate_pool_to_false(True) + + def test_alter_invalidate_pool_stays_true(self): + self._test_alter_invalidate_pool_to_false(False) + def test_handle_error_event_connect_isolation_level(self): engine = engines.testing_engine() @@ -2532,3 +2650,87 @@ class DialectEventTest(fixtures.TestBase): def test_cursor_execute_wo_replace(self): self._test_cursor_execute(False) + + def test_connect_replace_params(self): + e = engines.testing_engine(options={"_initialize": False}) + + @event.listens_for(e, "do_connect") + def evt(dialect, conn_rec, cargs, cparams): + cargs[:] = ['foo', 'hoho'] + cparams.clear() + cparams['bar'] = 'bat' + conn_rec.info['boom'] = "bap" + + m1 = Mock() + e.dialect.connect = m1.real_connect + + with e.connect() as conn: + eq_(m1.mock_calls, [call.real_connect('foo', 'hoho', bar='bat')]) + eq_(conn.info['boom'], 'bap') + + def test_connect_do_connect(self): + e = engines.testing_engine(options={"_initialize": False}) + + m1 = Mock() + + @event.listens_for(e, "do_connect") + def evt1(dialect, conn_rec, cargs, cparams): + cargs[:] = ['foo', 'hoho'] + cparams.clear() + cparams['bar'] = 'bat' + conn_rec.info['boom'] = "one" + + @event.listens_for(e, "do_connect") + def evt2(dialect, conn_rec, cargs, cparams): + conn_rec.info['bap'] = "two" + return m1.our_connect(cargs, cparams) + + with e.connect() as conn: + # called with args + eq_( + m1.mock_calls, + [call.our_connect(['foo', 'hoho'], {'bar': 'bat'})]) + + eq_(conn.info['boom'], "one") + eq_(conn.info['bap'], "two") + + # returned our mock connection + is_(conn.connection.connection, m1.our_connect()) + + def test_connect_do_connect_info_there_after_recycle(self): + # test that info is maintained after the do_connect() + # event for a soft invalidation. + + e = engines.testing_engine(options={"_initialize": False}) + + @event.listens_for(e, "do_connect") + def evt1(dialect, conn_rec, cargs, cparams): + conn_rec.info['boom'] = "one" + + conn = e.connect() + eq_(conn.info['boom'], "one") + + conn.connection.invalidate(soft=True) + conn.close() + conn = e.connect() + eq_(conn.info['boom'], "one") + + def test_connect_do_connect_info_there_after_invalidate(self): + # test that info is maintained after the do_connect() + # event for a hard invalidation. + + e = engines.testing_engine(options={"_initialize": False}) + + @event.listens_for(e, "do_connect") + def evt1(dialect, conn_rec, cargs, cparams): + assert not conn_rec.info + conn_rec.info['boom'] = "one" + + conn = e.connect() + eq_(conn.info['boom'], "one") + + conn.connection.invalidate() + conn = e.connect() + eq_(conn.info['boom'], "one") + + diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index e53a99e15..4601a6bda 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -5,7 +5,7 @@ from sqlalchemy.engine.default import DefaultDialect import sqlalchemy as tsa from sqlalchemy.testing import fixtures from sqlalchemy import testing -from sqlalchemy.testing.mock import Mock, MagicMock +from sqlalchemy.testing.mock import Mock, MagicMock, call from sqlalchemy import event from sqlalchemy import select @@ -138,6 +138,38 @@ class CreateEngineTest(fixtures.TestBase): 'z=somevalue') assert e.echo is True + def test_pool_threadlocal_from_config(self): + dbapi = mock_dbapi + + config = { + 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test', + 'sqlalchemy.pool_threadlocal': "false"} + + e = engine_from_config(config, module=dbapi, _initialize=False) + eq_(e.pool._use_threadlocal, False) + + config = { + 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test', + 'sqlalchemy.pool_threadlocal': "true"} + + e = engine_from_config(config, module=dbapi, _initialize=False) + eq_(e.pool._use_threadlocal, True) + + def test_pool_reset_on_return_from_config(self): + dbapi = mock_dbapi + + for value, expected in [ + ("rollback", pool.reset_rollback), + ("commit", pool.reset_commit), + ("none", pool.reset_none) + ]: + config = { + 'sqlalchemy.url': 'postgresql://scott:tiger@somehost/test', + 'sqlalchemy.pool_reset_on_return': value} + + e = engine_from_config(config, module=dbapi, _initialize=False) + eq_(e.pool._reset_on_return, expected) + def test_engine_from_config_custom(self): from sqlalchemy import util from sqlalchemy.dialects import registry @@ -325,6 +357,33 @@ class TestRegNewDBAPI(fixtures.TestBase): e = create_engine("mysql+my_mock_dialect://") assert isinstance(e.dialect, MockDialect) + @testing.requires.sqlite + def test_wrapper_hooks(self): + def get_dialect_cls(url): + url.drivername = "sqlite" + return url.get_dialect() + + global WrapperFactory + WrapperFactory = Mock() + WrapperFactory.get_dialect_cls.side_effect = get_dialect_cls + + from sqlalchemy.dialects import registry + registry.register("wrapperdialect", __name__, "WrapperFactory") + + from sqlalchemy.dialects import sqlite + e = create_engine("wrapperdialect://") + + eq_(e.dialect.name, "sqlite") + assert isinstance(e.dialect, sqlite.dialect) + + eq_( + WrapperFactory.mock_calls, + [ + call.get_dialect_cls(url.make_url("sqlite://")), + call.engine_created(e) + ] + ) + class MockDialect(DefaultDialect): @classmethod diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py index 0c4557d49..451cb8b0e 100644 --- a/test/engine/test_pool.py +++ b/test/engine/test_pool.py @@ -4,11 +4,12 @@ from sqlalchemy import pool, select, event import sqlalchemy as tsa from sqlalchemy import testing from sqlalchemy.testing.util import gc_collect, lazy_gc -from sqlalchemy.testing import eq_, assert_raises, is_not_ +from sqlalchemy.testing import eq_, assert_raises, is_not_, is_ from sqlalchemy.testing.engines import testing_engine from sqlalchemy.testing import fixtures import random -from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.mock import Mock, call, patch +import weakref join_timeout = 10 @@ -25,21 +26,34 @@ def MockDBAPI(): db.connect = Mock(side_effect=Exception("connect failed")) else: db.connect = Mock(side_effect=connect) + db.is_shutdown = value db = Mock( connect=Mock(side_effect=connect), - shutdown=shutdown, _shutdown=False) + shutdown=shutdown, + is_shutdown=False) return db class PoolTestBase(fixtures.TestBase): def setup(self): pool.clear_managers() + self._teardown_conns = [] + + def teardown(self): + for ref in self._teardown_conns: + conn = ref() + if conn: + conn.close() @classmethod def teardown_class(cls): pool.clear_managers() + def _with_teardown(self, connection): + self._teardown_conns.append(weakref.ref(connection)) + return connection + def _queuepool_fixture(self, **kw): dbapi, pool = self._queuepool_dbapi_fixture(**kw) return pool @@ -49,6 +63,7 @@ class PoolTestBase(fixtures.TestBase): return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), **kw) + class PoolTest(PoolTestBase): def test_manager(self): manager = pool.manage(MockDBAPI(), use_threadlocal=True) @@ -86,7 +101,6 @@ class PoolTest(PoolTestBase): ] ) - def test_bad_args(self): manager = pool.manage(MockDBAPI()) manager.connect(None) @@ -218,6 +232,7 @@ class PoolTest(PoolTestBase): class PoolDialectTest(PoolTestBase): def _dialect(self): canary = [] + class PoolDialect(object): def do_rollback(self, dbapi_connection): canary.append('R') @@ -266,6 +281,7 @@ class PoolEventsTest(PoolTestBase): def _first_connect_event_fixture(self): p = self._queuepool_fixture() canary = [] + def first_connect(*arg, **kw): canary.append('first_connect') @@ -276,8 +292,10 @@ class PoolEventsTest(PoolTestBase): def _connect_event_fixture(self): p = self._queuepool_fixture() canary = [] + def connect(*arg, **kw): canary.append('connect') + event.listen(p, 'connect', connect) return p, canary @@ -285,6 +303,7 @@ class PoolEventsTest(PoolTestBase): def _checkout_event_fixture(self): p = self._queuepool_fixture() canary = [] + def checkout(*arg, **kw): canary.append('checkout') event.listen(p, 'checkout', checkout) @@ -294,6 +313,7 @@ class PoolEventsTest(PoolTestBase): def _checkin_event_fixture(self): p = self._queuepool_fixture() canary = [] + def checkin(*arg, **kw): canary.append('checkin') event.listen(p, 'checkin', checkin) @@ -303,6 +323,7 @@ class PoolEventsTest(PoolTestBase): def _reset_event_fixture(self): p = self._queuepool_fixture() canary = [] + def reset(*arg, **kw): canary.append('reset') event.listen(p, 'reset', reset) @@ -316,6 +337,13 @@ class PoolEventsTest(PoolTestBase): return p, canary + def _soft_invalidate_event_fixture(self): + p = self._queuepool_fixture() + canary = Mock() + event.listen(p, 'soft_invalidate', canary) + + return p, canary + def test_first_connect_event(self): p, canary = self._first_connect_event_fixture() @@ -419,6 +447,31 @@ class PoolEventsTest(PoolTestBase): c1.close() eq_(canary, ['reset']) + def test_soft_invalidate_event_no_exception(self): + p, canary = self._soft_invalidate_event_fixture() + + c1 = p.connect() + c1.close() + assert not canary.called + c1 = p.connect() + dbapi_con = c1.connection + c1.invalidate(soft=True) + assert canary.call_args_list[0][0][0] is dbapi_con + assert canary.call_args_list[0][0][2] is None + + def test_soft_invalidate_event_exception(self): + p, canary = self._soft_invalidate_event_fixture() + + c1 = p.connect() + c1.close() + assert not canary.called + c1 = p.connect() + dbapi_con = c1.connection + exc = Exception("hi") + c1.invalidate(exc, soft=True) + assert canary.call_args_list[0][0][0] is dbapi_con + assert canary.call_args_list[0][0][2] is exc + def test_invalidate_event_no_exception(self): p, canary = self._invalidate_event_fixture() @@ -470,12 +523,16 @@ class PoolEventsTest(PoolTestBase): def test_listen_targets_scope(self): canary = [] + def listen_one(*args): canary.append("listen_one") + def listen_two(*args): canary.append("listen_two") + def listen_three(*args): canary.append("listen_three") + def listen_four(*args): canary.append("listen_four") @@ -492,13 +549,17 @@ class PoolEventsTest(PoolTestBase): ) def test_listen_targets_per_subclass(self): - """test that listen() called on a subclass remains specific to that subclass.""" + """test that listen() called on a subclass remains specific to + that subclass.""" canary = [] + def listen_one(*args): canary.append("listen_one") + def listen_two(*args): canary.append("listen_two") + def listen_three(*args): canary.append("listen_three") @@ -526,6 +587,7 @@ class PoolEventsTest(PoolTestBase): # going pool.Pool.dispatch._clear() + class PoolFirstConnectSyncTest(PoolTestBase): # test [ticket:2964] @@ -560,11 +622,14 @@ class PoolFirstConnectSyncTest(PoolTestBase): th.join(join_timeout) eq_(evt.mock_calls, - [call.first_connect(), call.connect(), call.connect(), call.connect()] + [ + call.first_connect(), + call.connect(), + call.connect(), + call.connect()] ) - class DeprecatedPoolListenerTest(PoolTestBase): @testing.requires.predictable_gc @testing.uses_deprecated(r".*Use event.listen") @@ -580,38 +645,45 @@ class DeprecatedPoolListenerTest(PoolTestBase): if hasattr(self, 'checkin'): self.checkin = self.inst_checkin self.clear() + def clear(self): self.connected = [] self.first_connected = [] self.checked_out = [] self.checked_in = [] + def assert_total(innerself, conn, fconn, cout, cin): eq_(len(innerself.connected), conn) eq_(len(innerself.first_connected), fconn) eq_(len(innerself.checked_out), cout) eq_(len(innerself.checked_in), cin) + def assert_in(innerself, item, in_conn, in_fconn, in_cout, in_cin): self.assert_((item in innerself.connected) == in_conn) self.assert_((item in innerself.first_connected) == in_fconn) self.assert_((item in innerself.checked_out) == in_cout) self.assert_((item in innerself.checked_in) == in_cin) + def inst_connect(self, con, record): print("connect(%s, %s)" % (con, record)) assert con is not None assert record is not None self.connected.append(con) + def inst_first_connect(self, con, record): print("first_connect(%s, %s)" % (con, record)) assert con is not None assert record is not None self.first_connected.append(con) + def inst_checkout(self, con, record, proxy): print("checkout(%s, %s, %s)" % (con, record, proxy)) assert con is not None assert record is not None assert proxy is not None self.checked_out.append(con) + def inst_checkin(self, con, record): print("checkin(%s, %s)" % (con, record)) # con can be None if invalidated @@ -620,15 +692,19 @@ class DeprecatedPoolListenerTest(PoolTestBase): class ListenAll(tsa.interfaces.PoolListener, InstrumentingListener): pass + class ListenConnect(InstrumentingListener): def connect(self, con, record): pass + class ListenFirstConnect(InstrumentingListener): def first_connect(self, con, record): pass + class ListenCheckOut(InstrumentingListener): def checkout(self, con, record, proxy, num): pass + class ListenCheckIn(InstrumentingListener): def checkin(self, con, record): pass @@ -746,8 +822,10 @@ class DeprecatedPoolListenerTest(PoolTestBase): def test_listeners_callables(self): def connect(dbapi_con, con_record): counts[0] += 1 + def checkout(dbapi_con, con_record, con_proxy): counts[1] += 1 + def checkin(dbapi_con, con_record): counts[2] += 1 @@ -884,6 +962,7 @@ class QueuePoolTest(PoolTestBase): pool_size=2, max_overflow=1, use_threadlocal=False, timeout=3) timeouts = [] + def checkout(): for x in range(1): now = time.time() @@ -915,6 +994,7 @@ class QueuePoolTest(PoolTestBase): dbapi = MockDBAPI() mutex = threading.Lock() + def creator(): time.sleep(.05) with mutex: @@ -924,6 +1004,7 @@ class QueuePoolTest(PoolTestBase): pool_size=3, timeout=2, max_overflow=max_overflow) peaks = [] + def whammy(): for i in range(10): try: @@ -947,7 +1028,6 @@ class QueuePoolTest(PoolTestBase): lazy_gc() assert not pool._refs - def test_overflow_reset_on_failed_connect(self): dbapi = Mock() @@ -956,13 +1036,14 @@ class QueuePoolTest(PoolTestBase): raise Exception("connection failed") creator = dbapi.connect + def create(): return creator() p = pool.QueuePool(creator=create, pool_size=2, max_overflow=3) - c1 = p.connect() - c2 = p.connect() - c3 = p.connect() + c1 = self._with_teardown(p.connect()) + c2 = self._with_teardown(p.connect()) + c3 = self._with_teardown(p.connect()) eq_(p._overflow, 1) creator = failing_dbapi assert_raises(Exception, p.connect) @@ -1029,7 +1110,6 @@ class QueuePoolTest(PoolTestBase): call("overflow_one")] ) - @testing.requires.threading_with_mock @testing.requires.timing_intensive def test_waiters_handled(self): @@ -1039,6 +1119,7 @@ class QueuePoolTest(PoolTestBase): """ mutex = threading.Lock() dbapi = MockDBAPI() + def creator(): mutex.acquire() try: @@ -1052,6 +1133,7 @@ class QueuePoolTest(PoolTestBase): p = pool.QueuePool(creator=creator, pool_size=2, timeout=timeout, max_overflow=max_overflow) + def waiter(p, timeout, max_overflow): success_key = (timeout, max_overflow) conn = p.connect() @@ -1082,18 +1164,58 @@ class QueuePoolTest(PoolTestBase): eq_(len(success), 12, "successes: %s" % success) + def test_connrec_invalidated_within_checkout_no_race(self): + """Test that a concurrent ConnectionRecord.invalidate() which + occurs after the ConnectionFairy has called _ConnectionRecord.checkout() + but before the ConnectionFairy tests "fairy.connection is None" + will not result in an InvalidRequestError. + + This use case assumes that a listener on the checkout() event + will be raising DisconnectionError so that a reconnect attempt + may occur. + + """ + dbapi = MockDBAPI() + + def creator(): + return dbapi.connect() + + p = pool.QueuePool(creator=creator, pool_size=1, max_overflow=0) + + conn = p.connect() + conn.close() + + _existing_checkout = pool._ConnectionRecord.checkout + + @classmethod + def _decorate_existing_checkout(cls, *arg, **kw): + fairy = _existing_checkout(*arg, **kw) + connrec = fairy._connection_record + connrec.invalidate() + return fairy + + with patch( + "sqlalchemy.pool._ConnectionRecord.checkout", + _decorate_existing_checkout): + conn = p.connect() + is_(conn._connection_record.connection, None) + conn.close() + + @testing.requires.threading_with_mock @testing.requires.timing_intensive def test_notify_waiters(self): dbapi = MockDBAPI() canary = [] + def creator(): canary.append(1) return dbapi.connect() p1 = pool.QueuePool(creator=creator, pool_size=1, timeout=None, max_overflow=0) + def waiter(p): conn = p.connect() canary.append(2) @@ -1165,7 +1287,8 @@ class QueuePoolTest(PoolTestBase): def test_mixed_close(self): pool._refs.clear() - p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True) + p = self._queuepool_fixture(pool_size=3, max_overflow=-1, + use_threadlocal=True) c1 = p.connect() c2 = p.connect() assert c1 is c2 @@ -1191,6 +1314,7 @@ class QueuePoolTest(PoolTestBase): # disable weakref collection of the # underlying connections strong_refs = set() + def _conn(): c = p.connect() strong_refs.add(c.connection) @@ -1271,35 +1395,74 @@ class QueuePoolTest(PoolTestBase): c2 = p.connect() assert id(c2.connection) == c_id + c2_rec = c2._connection_record p._invalidate(c2) + assert c2_rec.connection is None + c2.close() + time.sleep(.5) + c3 = p.connect() + assert id(c3.connection) != c_id + + @testing.requires.timing_intensive + def test_recycle_on_soft_invalidate(self): + p = self._queuepool_fixture(pool_size=1, + max_overflow=0) + c1 = p.connect() + c_id = id(c1.connection) + c1.close() + c2 = p.connect() + assert id(c2.connection) == c_id + + c2_rec = c2._connection_record + c2.invalidate(soft=True) + assert c2_rec.connection is c2.connection + c2.close() time.sleep(.5) c3 = p.connect() assert id(c3.connection) != c_id + assert c3._connection_record is c2_rec + assert c2_rec.connection is c3.connection + + def _no_wr_finalize(self): + finalize_fairy = pool._finalize_fairy + + def assert_no_wr_callback( + connection, connection_record, + pool, ref, echo, fairy=None): + if fairy is None: + raise AssertionError( + "finalize fairy was called as a weakref callback") + return finalize_fairy( + connection, connection_record, pool, ref, echo, fairy) + return patch.object( + pool, '_finalize_fairy', assert_no_wr_callback) def _assert_cleanup_on_pooled_reconnect(self, dbapi, p): # p is QueuePool with size=1, max_overflow=2, # and one connection in the pool that will need to # reconnect when next used (either due to recycle or invalidate) - eq_(p.checkedout(), 0) - eq_(p._overflow, 0) - dbapi.shutdown(True) - assert_raises( - Exception, - p.connect - ) - eq_(p._overflow, 0) - eq_(p.checkedout(), 0) # and not 1 - dbapi.shutdown(False) + with self._no_wr_finalize(): + eq_(p.checkedout(), 0) + eq_(p._overflow, 0) + dbapi.shutdown(True) + assert_raises( + Exception, + p.connect + ) + eq_(p._overflow, 0) + eq_(p.checkedout(), 0) # and not 1 - c1 = p.connect() - assert p._pool.empty() # poolsize is one, so we're empty OK - c2 = p.connect() - eq_(p._overflow, 1) # and not 2 + dbapi.shutdown(False) - # this hangs if p._overflow is 2 - c3 = p.connect() + c1 = self._with_teardown(p.connect()) + assert p._pool.empty() # poolsize is one, so we're empty OK + c2 = self._with_teardown(p.connect()) + eq_(p._overflow, 1) # and not 2 + + # this hangs if p._overflow is 2 + c3 = self._with_teardown(p.connect()) def test_error_on_pooled_reconnect_cleanup_invalidate(self): dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=2) @@ -1317,6 +1480,20 @@ class QueuePoolTest(PoolTestBase): time.sleep(1.5) self._assert_cleanup_on_pooled_reconnect(dbapi, p) + def test_error_on_pooled_reconnect_cleanup_wcheckout_event(self): + dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, + max_overflow=2) + + c1 = p.connect() + c1.close() + + @event.listens_for(p, "checkout") + def handle_checkout_event(dbapi_con, con_record, con_proxy): + if dbapi.is_shutdown: + raise tsa.exc.DisconnectionError() + + self._assert_cleanup_on_pooled_reconnect(dbapi, p) + @testing.requires.timing_intensive def test_recycle_pool_no_race(self): def slow_close(): @@ -1334,6 +1511,7 @@ class QueuePoolTest(PoolTestBase): dialect.dbapi.Error = Error pools = [] + class TrackQueuePool(pool.QueuePool): def __init__(self, *arg, **kw): pools.append(self) @@ -1357,11 +1535,13 @@ class QueuePoolTest(PoolTestBase): def attempt(conn): time.sleep(random.random()) try: - conn._handle_dbapi_exception(Error(), "statement", {}, Mock(), Mock()) + conn._handle_dbapi_exception(Error(), "statement", {}, + Mock(), Mock()) except tsa.exc.DBAPIError: pass - # run an error + invalidate operation on the remaining 7 open connections + # run an error + invalidate operation on the remaining 7 open + #connections threads = [] for conn in conns: t = threading.Thread(target=attempt, args=(conn, )) @@ -1399,7 +1579,8 @@ class QueuePoolTest(PoolTestBase): assert c1.connection.id != c_id def test_recreate(self): - p = self._queuepool_fixture(reset_on_return=None, pool_size=1, max_overflow=0) + p = self._queuepool_fixture(reset_on_return=None, pool_size=1, + max_overflow=0) p2 = p.recreate() assert p2.size() == 1 assert p2._reset_on_return is pool.reset_none @@ -1454,16 +1635,19 @@ class QueuePoolTest(PoolTestBase): eq_(c2_con.close.call_count, 0) def test_threadfairy(self): - p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True) + p = self._queuepool_fixture(pool_size=3, max_overflow=-1, + use_threadlocal=True) c1 = p.connect() c1.close() c2 = p.connect() assert c2.connection is not None + class ResetOnReturnTest(PoolTestBase): def _fixture(self, **kw): dbapi = Mock() - return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), **kw) + return dbapi, pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), + **kw) def test_plain_rollback(self): dbapi, p = self._fixture(reset_on_return='rollback') @@ -1550,6 +1734,7 @@ class ResetOnReturnTest(PoolTestBase): assert not dbapi.connect().rollback.called assert dbapi.connect().commit.called + class SingletonThreadPoolTest(PoolTestBase): @testing.requires.threading_with_mock @@ -1567,6 +1752,7 @@ class SingletonThreadPoolTest(PoolTestBase): dbapi = MockDBAPI() lock = threading.Lock() + def creator(): # the mock iterator isn't threadsafe... with lock: @@ -1575,6 +1761,7 @@ class SingletonThreadPoolTest(PoolTestBase): if strong_refs: sr = set() + def _conn(): c = p.connect() sr.add(c.connection) @@ -1604,6 +1791,7 @@ class SingletonThreadPoolTest(PoolTestBase): still_opened = len([c for c in sr if not c.close.call_count]) eq_(still_opened, 3) + class AssertionPoolTest(PoolTestBase): def test_connect_error(self): dbapi = MockDBAPI() @@ -1622,6 +1810,7 @@ class AssertionPoolTest(PoolTestBase): c3 = p.connect() assert_raises(AssertionError, p.connect) + class NullPoolTest(PoolTestBase): def test_reconnect(self): dbapi = MockDBAPI() @@ -1649,3 +1838,53 @@ class StaticPoolTest(PoolTestBase): p = pool.StaticPool(creator) p2 = p.recreate() assert p._creator is p2._creator + + +class CreatorCompatibilityTest(PoolTestBase): + def test_creator_callable_outside_noarg(self): + e = testing_engine() + + creator = e.pool._creator + try: + conn = creator() + finally: + conn.close() + + def test_creator_callable_outside_witharg(self): + e = testing_engine() + + creator = e.pool._creator + try: + conn = creator(Mock()) + finally: + conn.close() + + def test_creator_patching_arg_to_noarg(self): + e = testing_engine() + creator = e.pool._creator + try: + # the creator is the two-arg form + conn = creator(Mock()) + finally: + conn.close() + + def mock_create(): + return creator() + + conn = e.connect() + conn.invalidate() + conn.close() + + # test that the 'should_wrap_creator' status + # will dynamically switch if the _creator is monkeypatched. + + # patch it with a zero-arg form + with patch.object(e.pool, "_creator", mock_create): + conn = e.connect() + conn.invalidate() + conn.close() + + conn = e.connect() + conn.close() + + diff --git a/test/engine/test_processors.py b/test/engine/test_processors.py index b1c482f09..f4df7827c 100644 --- a/test/engine/test_processors.py +++ b/test/engine/test_processors.py @@ -57,8 +57,10 @@ class PyDateProcessorTest(_DateProcessorTest): ) ) + class CDateProcessorTest(_DateProcessorTest): __requires__ = ('cextensions',) + @classmethod def setup_class(cls): from sqlalchemy import cprocessors @@ -104,7 +106,8 @@ class _DistillArgsTest(fixtures.TestBase): def test_distill_single_list_tuples(self): eq_( - self.module._distill_params(([("foo", "bar"), ("bat", "hoho")],), {}), + self.module._distill_params( + ([("foo", "bar"), ("bat", "hoho")],), {}), [('foo', 'bar'), ('bat', 'hoho')] ) @@ -117,9 +120,7 @@ class _DistillArgsTest(fixtures.TestBase): def test_distill_multi_list_tuple(self): eq_( self.module._distill_params( - ([("foo", "bar")], [("bar", "bat")]), - {} - ), + ([("foo", "bar")], [("bar", "bat")]), {}), ([('foo', 'bar')], [('bar', 'bat')]) ) @@ -131,7 +132,8 @@ class _DistillArgsTest(fixtures.TestBase): def test_distill_single_list_dicts(self): eq_( - self.module._distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {}), + self.module._distill_params( + ([{"foo": "bar"}, {"foo": "hoho"}],), {}), [{'foo': 'bar'}, {'foo': 'hoho'}] ) @@ -148,7 +150,6 @@ class _DistillArgsTest(fixtures.TestBase): ) - class PyDistillArgsTest(_DistillArgsTest): @classmethod def setup_class(cls): @@ -160,8 +161,10 @@ class PyDistillArgsTest(_DistillArgsTest): ) ) + class CDistillArgsTest(_DistillArgsTest): __requires__ = ('cextensions', ) + @classmethod def setup_class(cls): from sqlalchemy import cutils as util diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index 619319693..39ebcc91b 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -370,6 +370,9 @@ class MockReconnectTest(fixtures.TestBase): mock_dialect = Mock() class MyURL(URL): + def _get_entrypoint(self): + return Dialect + def get_dialect(self): return Dialect @@ -420,6 +423,7 @@ class CursorErrTest(fixtures.TestBase): from sqlalchemy.engine import default url = Mock( get_dialect=lambda: default.DefaultDialect, + _get_entrypoint=lambda: default.DefaultDialect, translate_connect_args=lambda: {}, query={},) eng = testing_engine( url, options=dict(module=dbapi, _initialize=initialize)) diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index 087610333..83650609d 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -14,6 +14,7 @@ from sqlalchemy.util import ue metadata, users = None, None + class ReflectionTest(fixtures.TestBase, ComparesTables): __backend__ = True @@ -253,7 +254,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): """ Table('a', self.metadata, Column('id', Integer, primary_key=True)) Table('b', self.metadata, Column('id', Integer, primary_key=True), - Column('a_id', Integer, sa.ForeignKey('a.id'))) + Column('a_id', Integer, sa.ForeignKey('a.id'))) self.metadata.create_all() m2 = MetaData() @@ -275,7 +276,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): """ Table('a', self.metadata, Column('id', Integer, primary_key=True)) Table('b', self.metadata, Column('id', Integer, primary_key=True), - Column('a_id', Integer, sa.ForeignKey('a.id'))) + Column('a_id', Integer, sa.ForeignKey('a.id'))) self.metadata.create_all() m2 = MetaData() @@ -404,7 +405,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): eq_(list(table.primary_key), [table.c.col1]) eq_(table.c.col1.primary_key, True) - @testing.provide_metadata def test_override_pkfk(self): """test that you can override columns which contain foreign keys @@ -419,7 +419,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): Column('id', sa.Integer, primary_key=True), Column('street', sa.String(30))) - meta.create_all() meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, @@ -541,8 +540,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): assert f1 in b1.constraints assert len(b1.constraints) == 2 - - @testing.provide_metadata def test_override_keys(self): """test that columns can be overridden with a 'key', @@ -654,12 +651,13 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): backends with {dialect}.get_foreign_keys() support)""" if testing.against('postgresql'): - test_attrs = ('match', 'onupdate', 'ondelete', 'deferrable', 'initially') + test_attrs = ('match', 'onupdate', 'ondelete', + 'deferrable', 'initially') addresses_user_id_fkey = sa.ForeignKey( # Each option is specifically not a Postgres default, or # it won't be returned by PG's inspection 'users.id', - name = 'addresses_user_id_fkey', + name='addresses_user_id_fkey', match='FULL', onupdate='RESTRICT', ondelete='RESTRICT', @@ -672,7 +670,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): # elided by MySQL's inspection addresses_user_id_fkey = sa.ForeignKey( 'users.id', - name = 'addresses_user_id_fkey', + name='addresses_user_id_fkey', onupdate='CASCADE', ondelete='CASCADE' ) @@ -726,11 +724,12 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): Column('slot', sa.String(128)), ) - assert_raises_message(sa.exc.InvalidRequestError, - "Foreign key associated with column 'slots.pkg_id' " - "could not find table 'pkgs' with which to generate " - "a foreign key to target column 'pkg_id'", - metadata.create_all) + assert_raises_message( + sa.exc.InvalidRequestError, + "Foreign key associated with column 'slots.pkg_id' " + "could not find table 'pkgs' with which to generate " + "a foreign key to target column 'pkg_id'", + metadata.create_all) def test_composite_pks(self): """test reflection of a composite primary key""" @@ -797,7 +796,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): table.c.multi_hoho == table2.c.lala).compare(j.onclause)) - @testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on') @testing.requires.check_constraints @testing.provide_metadata @@ -869,7 +867,6 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): def test_reflect_uses_bind_engine_reflect(self): self._test_reflect_uses_bind(lambda e: MetaData().reflect(e)) - @testing.provide_metadata def test_reflect_all(self): existing = testing.db.table_names() @@ -1053,6 +1050,7 @@ class ReflectionTest(fixtures.TestBase, ComparesTables): finally: _drop_views(metadata.bind) + class CreateDropTest(fixtures.TestBase): __backend__ = True @@ -1101,7 +1099,6 @@ class CreateDropTest(fixtures.TestBase): eq_(ua, ['users', 'email_addresses']) eq_(oi, ['orders', 'items']) - def test_checkfirst(self): try: assert not users.exists(testing.db) @@ -1141,6 +1138,7 @@ class CreateDropTest(fixtures.TestBase): - set(testing.db.table_names())) metadata.drop_all(bind=testing.db) + class SchemaManipulationTest(fixtures.TestBase): __backend__ = True @@ -1159,6 +1157,7 @@ class SchemaManipulationTest(fixtures.TestBase): assert len(addresses.c.user_id.foreign_keys) == 1 assert addresses.constraints == set([addresses.primary_key, fk]) + class UnicodeReflectionTest(fixtures.TestBase): __backend__ = True @@ -1170,16 +1169,40 @@ class UnicodeReflectionTest(fixtures.TestBase): ('plain', 'col_plain', 'ix_plain') ]) no_has_table = [ - ('no_has_table_1', ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')), - ('no_has_table_2', ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')), + ( + 'no_has_table_1', + ue('col_Unit\u00e9ble'), + ue('ix_Unit\u00e9ble') + ), + ( + 'no_has_table_2', + ue('col_\u6e2c\u8a66'), + ue('ix_\u6e2c\u8a66') + ), ] no_case_sensitivity = [ - (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')), - (ue('unit\u00e9ble'), ue('col_unit\u00e9ble'), ue('ix_unit\u00e9ble')), + ( + ue('\u6e2c\u8a66'), + ue('col_\u6e2c\u8a66'), + ue('ix_\u6e2c\u8a66') + ), + ( + ue('unit\u00e9ble'), + ue('col_unit\u00e9ble'), + ue('ix_unit\u00e9ble') + ), ] full = [ - (ue('Unit\u00e9ble'), ue('col_Unit\u00e9ble'), ue('ix_Unit\u00e9ble')), - (ue('\u6e2c\u8a66'), ue('col_\u6e2c\u8a66'), ue('ix_\u6e2c\u8a66')), + ( + ue('Unit\u00e9ble'), + ue('col_Unit\u00e9ble'), + ue('ix_Unit\u00e9ble') + ), + ( + ue('\u6e2c\u8a66'), + ue('col_\u6e2c\u8a66'), + ue('ix_\u6e2c\u8a66') + ), ] # as you can see, our options for this kind of thing @@ -1268,6 +1291,7 @@ class UnicodeReflectionTest(fixtures.TestBase): [(names[tname][1], names[tname][0])] ) + class SchemaTest(fixtures.TestBase): __backend__ = True @@ -1398,8 +1422,6 @@ class SchemaTest(fixtures.TestBase): ) - - # Tests related to engine.reflection @@ -1432,7 +1454,8 @@ def createTables(meta, schema=None): dingalings = Table("dingalings", meta, Column('dingaling_id', sa.Integer, primary_key=True), Column('address_id', sa.Integer, - sa.ForeignKey('%semail_addresses.address_id' % schema_prefix)), + sa.ForeignKey( + '%semail_addresses.address_id' % schema_prefix)), Column('data', sa.String(30)), schema=schema, test_needs_fk=True, ) @@ -1448,6 +1471,7 @@ def createTables(meta, schema=None): return (users, addresses, dingalings) + def createIndexes(con, schema=None): fullname = 'users' if schema: @@ -1455,6 +1479,7 @@ def createIndexes(con, schema=None): query = "CREATE INDEX users_t_idx ON %s (test1, test2)" % fullname con.execute(sa.sql.text(query)) + @testing.requires.views def _create_views(con, schema=None): for table_name in ('users', 'email_addresses'): @@ -1462,10 +1487,10 @@ def _create_views(con, schema=None): if schema: fullname = "%s.%s" % (schema, table_name) view_name = fullname + '_v' - query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, - fullname) + query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, fullname) con.execute(sa.sql.text(query)) + @testing.requires.views def _drop_views(con, schema=None): for table_name in ('email_addresses', 'users'): @@ -1504,6 +1529,7 @@ class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL): 'weird_casing."Col2", weird_casing."col3" ' 'FROM weird_casing') + class CaseSensitiveTest(fixtures.TablesTest): """Nail down case sensitive behaviors, mostly on MySQL.""" __backend__ = True @@ -1539,7 +1565,8 @@ class CaseSensitiveTest(fixtures.TablesTest): ) def test_reflect_via_fk(self): m = MetaData() - t2 = Table("SomeOtherTable", m, autoload=True, autoload_with=testing.db) + t2 = Table("SomeOtherTable", m, autoload=True, + autoload_with=testing.db) eq_(t2.name, "SomeOtherTable") assert "SomeTable" in m.tables @@ -1551,7 +1578,6 @@ class CaseSensitiveTest(fixtures.TablesTest): eq_(t2.name, "sOmEtAbLe") - class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase): __backend__ = True @@ -1584,6 +1610,7 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase): from sqlalchemy.schema import Table m = MetaData(testing.db) + def column_reflect(insp, table, column_info): if column_info['name'] == col: column_info.update(update) @@ -1620,6 +1647,7 @@ class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase): def test_override_key_fk(self): m = MetaData(testing.db) + def column_reflect(insp, table, column_info): if column_info['name'] == 'q': diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index b662c7fcd..7f8a7c97c 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -12,6 +12,8 @@ from sqlalchemy.testing import fixtures users, metadata = None, None + + class TransactionTest(fixtures.TestBase): __backend__ = True @@ -20,7 +22,7 @@ class TransactionTest(fixtures.TestBase): global users, metadata metadata = MetaData() users = Table('query_users', metadata, - Column('user_id', INT, primary_key = True), + Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) @@ -497,6 +499,7 @@ class TransactionTest(fixtures.TestBase): order_by(users.c.user_id)) eq_(result.fetchall(), []) + class ResetAgentTest(fixtures.TestBase): __backend__ = True @@ -600,6 +603,7 @@ class ResetAgentTest(fixtures.TestBase): trans.rollback() assert connection.connection._reset_agent is None + class AutoRollbackTest(fixtures.TestBase): __backend__ = True @@ -633,6 +637,7 @@ class AutoRollbackTest(fixtures.TestBase): users.drop(conn2) conn2.close() + class ExplicitAutoCommitTest(fixtures.TestBase): """test the 'autocommit' flag on select() and text() objects. @@ -1440,4 +1445,3 @@ class IsolationLevelTest(fixtures.TestBase): eq_(conn.get_isolation_level(), self._non_default_isolation_level()) eq_(c2.get_isolation_level(), self._non_default_isolation_level()) - diff --git a/test/ext/declarative/test_basic.py b/test/ext/declarative/test_basic.py index 3fac39cac..ab0de801c 100644 --- a/test/ext/declarative/test_basic.py +++ b/test/ext/declarative/test_basic.py @@ -13,7 +13,10 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \ column_property, composite, Session, properties from sqlalchemy.util import with_metaclass from sqlalchemy.ext.declarative import declared_attr, synonym_for -from sqlalchemy.testing import fixtures +from sqlalchemy.testing import fixtures, mock +from sqlalchemy.orm.events import MapperEvents +from sqlalchemy.orm import mapper +from sqlalchemy import event Base = None @@ -1671,6 +1674,32 @@ class DeclarativeTest(DeclarativeTestBase): )) ) + @testing.teardown_events(MapperEvents) + def test_instrument_class_before_instrumentation(self): + # test #3388 + + canary = mock.Mock() + + @event.listens_for(mapper, "instrument_class") + def instrument_class(mp, cls): + canary.instrument_class(mp, cls) + + @event.listens_for(object, "class_instrument") + def class_instrument(cls): + canary.class_instrument(cls) + + class Test(Base): + __tablename__ = 'test' + id = Column(Integer, primary_key=True) + # MARKMARK + eq_( + canary.mock_calls, + [ + mock.call.instrument_class(Test.__mapper__, Test), + mock.call.class_instrument(Test) + ] + ) + def _produce_test(inline, stringbased): diff --git a/test/ext/declarative/test_inheritance.py b/test/ext/declarative/test_inheritance.py index 2ecee99fd..3e6980190 100644 --- a/test/ext/declarative/test_inheritance.py +++ b/test/ext/declarative/test_inheritance.py @@ -1451,4 +1451,5 @@ class ConcreteExtensionConfigTest( "actual_documents.send_method AS send_method, " "actual_documents.id AS id, 'actual' AS type " "FROM actual_documents) AS pjoin" - )
\ No newline at end of file + ) + diff --git a/test/ext/declarative/test_mixin.py b/test/ext/declarative/test_mixin.py index 45b881671..b9e40421c 100644 --- a/test/ext/declarative/test_mixin.py +++ b/test/ext/declarative/test_mixin.py @@ -9,7 +9,8 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \ configure_mappers, clear_mappers, \ deferred, column_property, Session, base as orm_base from sqlalchemy.util import classproperty -from sqlalchemy.ext.declarative import declared_attr +from sqlalchemy.ext.declarative import declared_attr, declarative_base +from sqlalchemy.orm import events as orm_events from sqlalchemy.testing import fixtures, mock from sqlalchemy.testing.util import gc_collect @@ -438,6 +439,90 @@ class DeclarativeMixinTest(DeclarativeTestBase): eq_(MyModel.__table__.kwargs, {'mysql_engine': 'InnoDB'}) + @testing.teardown_events(orm_events.MapperEvents) + def test_declare_first_mixin(self): + canary = mock.Mock() + + class MyMixin(object): + @classmethod + def __declare_first__(cls): + canary.declare_first__(cls) + + @classmethod + def __declare_last__(cls): + canary.declare_last__(cls) + + class MyModel(Base, MyMixin): + __tablename__ = 'test' + id = Column(Integer, primary_key=True) + + configure_mappers() + + eq_( + canary.mock_calls, + [ + mock.call.declare_first__(MyModel), + mock.call.declare_last__(MyModel), + ] + ) + + @testing.teardown_events(orm_events.MapperEvents) + def test_declare_first_base(self): + canary = mock.Mock() + + class MyMixin(object): + @classmethod + def __declare_first__(cls): + canary.declare_first__(cls) + + @classmethod + def __declare_last__(cls): + canary.declare_last__(cls) + + class Base(MyMixin): + pass + Base = declarative_base(cls=Base) + + class MyModel(Base): + __tablename__ = 'test' + id = Column(Integer, primary_key=True) + + configure_mappers() + + eq_( + canary.mock_calls, + [ + mock.call.declare_first__(MyModel), + mock.call.declare_last__(MyModel), + ] + ) + + @testing.teardown_events(orm_events.MapperEvents) + def test_declare_first_direct(self): + canary = mock.Mock() + + class MyOtherModel(Base): + __tablename__ = 'test2' + id = Column(Integer, primary_key=True) + + @classmethod + def __declare_first__(cls): + canary.declare_first__(cls) + + @classmethod + def __declare_last__(cls): + canary.declare_last__(cls) + + configure_mappers() + + eq_( + canary.mock_calls, + [ + mock.call.declare_first__(MyOtherModel), + mock.call.declare_last__(MyOtherModel) + ] + ) + def test_mapper_args_declared_attr(self): class ComputedMapperArgs: diff --git a/test/ext/test_associationproxy.py b/test/ext/test_associationproxy.py index 9e328a35f..8fb335b06 100644 --- a/test/ext/test_associationproxy.py +++ b/test/ext/test_associationproxy.py @@ -13,6 +13,7 @@ from sqlalchemy.testing import fixtures, AssertsCompiledSQL from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.assertions import expect_warnings class DictCollection(dict): @collection.appender @@ -1088,7 +1089,8 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def define_tables(cls, metadata): Table('userkeywords', metadata, Column('keyword_id', Integer, ForeignKey('keywords.id'), primary_key=True), - Column('user_id', Integer, ForeignKey('users.id')) + Column('user_id', Integer, ForeignKey('users.id')), + Column('value', String(50)) ) Table('users', metadata, Column('id', Integer, @@ -1127,6 +1129,9 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): # nonuselist singular_value = association_proxy('singular', 'value') + # o2m -> scalar + singular_collection = association_proxy('user_keywords', 'value') + class Keyword(cls.Comparable): def __init__(self, keyword): self.keyword = keyword @@ -1194,8 +1199,9 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): for jj in words[(ii % len(words)):((ii + 3) % len(words))]: k = Keyword(jj) user.keywords.append(k) - if ii % 3 == None: + if ii % 2 == 0: user.singular.keywords.append(k) + user.user_keywords[-1].value = "singular%d" % ii orphan = Keyword('orphan') orphan.user_keyword = UserKeyword(keyword=orphan, user=None) @@ -1212,6 +1218,27 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def _equivalent(self, q_proxy, q_direct): eq_(q_proxy.all(), q_direct.all()) + def test_filter_any_criterion_ul_scalar(self): + UserKeyword, User = self.classes.UserKeyword, self.classes.User + + q1 = self.session.query(User).filter( + User.singular_collection.any(UserKeyword.value == 'singular8')) + self.assert_compile( + q1, + "SELECT users.id AS users_id, users.name AS users_name, " + "users.singular_id AS users_singular_id " + "FROM users " + "WHERE EXISTS (SELECT 1 " + "FROM userkeywords " + "WHERE users.id = userkeywords.user_id AND " + "userkeywords.value = :value_1)", + checkparams={'value_1': 'singular8'} + ) + + q2 = self.session.query(User).filter( + User.user_keywords.any(UserKeyword.value == 'singular8')) + self._equivalent(q1, q2) + def test_filter_any_kwarg_ul_nul(self): UserKeyword, User = self.classes.UserKeyword, self.classes.User @@ -1300,16 +1327,18 @@ class ComparatorTest(fixtures.MappedTest, AssertsCompiledSQL): def test_filter_contains_nul_ul(self): User, Singular = self.classes.User, self.classes.Singular - self._equivalent( - self.session.query(User).filter( - User.singular_keywords.contains(self.kw) - ), - self.session.query(User).filter( - User.singular.has( - Singular.keywords.contains(self.kw) - ) - ), - ) + with expect_warnings( + "Got None for value of column keywords.singular_id;"): + self._equivalent( + self.session.query(User).filter( + User.singular_keywords.contains(self.kw) + ), + self.session.query(User).filter( + User.singular.has( + Singular.keywords.contains(self.kw) + ) + ), + ) def test_filter_eq_nul_nul(self): Keyword = self.classes.Keyword diff --git a/test/ext/test_baked.py b/test/ext/test_baked.py index 61d0fe126..78c43fc7e 100644 --- a/test/ext/test_baked.py +++ b/test/ext/test_baked.py @@ -242,6 +242,26 @@ class ResultTest(BakedTest): }) mapper(Address, cls.tables.addresses) + def test_cachekeys_on_constructor(self): + User = self.classes.User + + queue = [7, 8] + fn = lambda s: s.query(User.id).filter_by(id=queue.pop(0)) + bq1 = self.bakery(fn, 7) + bq2 = self.bakery(fn, 8) + + for i in range(3): + session = Session(autocommit=True) + eq_( + bq1(session).all(), + [(7,)] + ) + + eq_( + bq2(session).all(), + [(8,)] + ) + def test_no_steps(self): User = self.classes.User @@ -249,7 +269,7 @@ class ResultTest(BakedTest): lambda s: s.query(User.id, User.name).order_by(User.id)) for i in range(3): - session = Session() + session = Session(autocommit=True) eq_( bq(session).all(), [(7, 'jack'), (8, 'ed'), (9, 'fred'), (10, 'chuck')] @@ -262,7 +282,7 @@ class ResultTest(BakedTest): lambda s: s.query(User.id, User.name).order_by(User.id)) bq += lambda q: q.limit(bindparam('limit')).offset(bindparam('offset')) - session = Session() + session = Session(autocommit=True) for i in range(4): for limit, offset, exp in [ @@ -297,7 +317,7 @@ class ResultTest(BakedTest): bq += fn2 - sess = Session() + sess = Session(autocommit=True) eq_( bq.spoil(full=True).add_criteria(fn3)(sess).params(id=7).all(), [(7, 'jack')] @@ -336,7 +356,7 @@ class ResultTest(BakedTest): bq += fn2 - sess = Session() + sess = Session(autocommit=True) eq_( bq.spoil().add_criteria(fn3)(sess).params(id=7).all(), [(7, 'jack')] @@ -363,7 +383,7 @@ class ResultTest(BakedTest): func.count(User.id)) for i in range(3): - session = Session() + session = Session(autocommit=True) eq_( bq(session).all(), [(4, )] @@ -400,7 +420,7 @@ class ResultTest(BakedTest): if cond4: bq += lambda q: q.from_self().with_entities( func.count(User.id)) - sess = Session() + sess = Session(autocommit=True) result = bq(sess).all() if cond4: if cond1: @@ -448,7 +468,7 @@ class ResultTest(BakedTest): # we were using (filename, firstlineno) as cache key, # which fails for this kind of thing! bq += (lambda q: q.filter(User.name != 'jack')) if cond1 else (lambda q: q.filter(User.name == 'jack')) # noqa - sess = Session() + sess = Session(autocommit=True) result = bq(sess).all() if cond1: diff --git a/test/ext/test_extendedattr.py b/test/ext/test_extendedattr.py index c7627c8b2..c4147ed85 100644 --- a/test/ext/test_extendedattr.py +++ b/test/ext/test_extendedattr.py @@ -1,17 +1,20 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, ne_ from sqlalchemy import util +import sqlalchemy as sa +from sqlalchemy.orm import class_mapper from sqlalchemy.orm import attributes -from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute +from sqlalchemy.orm.attributes import set_attribute, \ + get_attribute, del_attribute from sqlalchemy.orm.instrumentation import is_instrumented from sqlalchemy.orm import clear_mappers -from sqlalchemy import testing from sqlalchemy.testing import fixtures from sqlalchemy.ext import instrumentation -from sqlalchemy.orm.instrumentation import register_class +from sqlalchemy.orm.instrumentation import register_class, manager_of_class from sqlalchemy.testing.util import decorator from sqlalchemy.orm import events from sqlalchemy import event + @decorator def modifies_instrumentation_finders(fn, *args, **kw): pristine = instrumentation.instrumentation_finders[:] @@ -21,15 +24,11 @@ def modifies_instrumentation_finders(fn, *args, **kw): del instrumentation.instrumentation_finders[:] instrumentation.instrumentation_finders.extend(pristine) -def with_lookup_strategy(strategy): - @decorator - def decorate(fn, *args, **kw): - try: - ext_instrumentation._install_instrumented_lookups() - return fn(*args, **kw) - finally: - ext_instrumentation._reinstall_default_lookups() - return decorate + +class _ExtBase(object): + @classmethod + def teardown_class(cls): + instrumentation._reinstall_default_lookups() class MyTypesManager(instrumentation.InstrumentationManager): @@ -58,16 +57,19 @@ class MyTypesManager(instrumentation.InstrumentationManager): def state_getter(self, class_): return lambda instance: instance.__dict__['_my_state'] + class MyListLike(list): # add @appender, @remover decorators as needed _sa_iterator = list.__iter__ _sa_linker = None _sa_converter = None + def _sa_appender(self, item, _sa_initiator=None): if _sa_initiator is not False: self._sa_adapter.fire_append_event(item, _sa_initiator) list.append(self, item) append = _sa_appender + def _sa_remover(self, item, _sa_initiator=None): self._sa_adapter.fire_pre_remove_event(_sa_initiator) if _sa_initiator is not False: @@ -75,57 +77,64 @@ class MyListLike(list): list.remove(self, item) remove = _sa_remover -class MyBaseClass(object): - __sa_instrumentation_manager__ = instrumentation.InstrumentationManager - -class MyClass(object): - - # This proves that a staticmethod will work here; don't - # flatten this back to a class assignment! - def __sa_instrumentation_manager__(cls): - return MyTypesManager(cls) - - __sa_instrumentation_manager__ = staticmethod(__sa_instrumentation_manager__) - - # This proves SA can handle a class with non-string dict keys - if not util.pypy and not util.jython: - locals()[42] = 99 # Don't remove this line! - - def __init__(self, **kwargs): - for k in kwargs: - setattr(self, k, kwargs[k]) - - def __getattr__(self, key): - if is_instrumented(self, key): - return get_attribute(self, key) - else: - try: - return self._goofy_dict[key] - except KeyError: - raise AttributeError(key) - - def __setattr__(self, key, value): - if is_instrumented(self, key): - set_attribute(self, key, value) - else: - self._goofy_dict[key] = value - - def __hasattr__(self, key): - if is_instrumented(self, key): - return True - else: - return key in self._goofy_dict - - def __delattr__(self, key): - if is_instrumented(self, key): - del_attribute(self, key) - else: - del self._goofy_dict[key] - -class UserDefinedExtensionTest(fixtures.ORMTest): + +MyBaseClass, MyClass = None, None + + +class UserDefinedExtensionTest(_ExtBase, fixtures.ORMTest): + @classmethod - def teardown_class(cls): - instrumentation._reinstall_default_lookups() + def setup_class(cls): + global MyBaseClass, MyClass + + class MyBaseClass(object): + __sa_instrumentation_manager__ = \ + instrumentation.InstrumentationManager + + class MyClass(object): + + # This proves that a staticmethod will work here; don't + # flatten this back to a class assignment! + def __sa_instrumentation_manager__(cls): + return MyTypesManager(cls) + + __sa_instrumentation_manager__ = staticmethod( + __sa_instrumentation_manager__) + + # This proves SA can handle a class with non-string dict keys + if not util.pypy and not util.jython: + locals()[42] = 99 # Don't remove this line! + + def __init__(self, **kwargs): + for k in kwargs: + setattr(self, k, kwargs[k]) + + def __getattr__(self, key): + if is_instrumented(self, key): + return get_attribute(self, key) + else: + try: + return self._goofy_dict[key] + except KeyError: + raise AttributeError(key) + + def __setattr__(self, key, value): + if is_instrumented(self, key): + set_attribute(self, key, value) + else: + self._goofy_dict[key] = value + + def __hasattr__(self, key): + if is_instrumented(self, key): + return True + else: + return key in self._goofy_dict + + def __delattr__(self, key): + if is_instrumented(self, key): + del_attribute(self, key) + else: + del self._goofy_dict[key] def teardown(self): clear_mappers() @@ -135,15 +144,25 @@ class UserDefinedExtensionTest(fixtures.ORMTest): pass register_class(User) - attributes.register_attribute(User, 'user_id', uselist = False, useobject=False) - attributes.register_attribute(User, 'user_name', uselist = False, useobject=False) - attributes.register_attribute(User, 'email_address', uselist = False, useobject=False) + attributes.register_attribute( + User, 'user_id', uselist=False, useobject=False) + attributes.register_attribute( + User, 'user_name', uselist=False, useobject=False) + attributes.register_attribute( + User, 'email_address', uselist=False, useobject=False) u = User() u.user_id = 7 u.user_name = 'john' u.email_address = 'lala@123.com' - self.assert_(u.__dict__ == {'_my_state':u._my_state, '_goofy_dict':{'user_id':7, 'user_name':'john', 'email_address':'lala@123.com'}}, u.__dict__) + eq_( + u.__dict__, + { + '_my_state': u._my_state, + '_goofy_dict': { + 'user_id': 7, 'user_name': 'john', + 'email_address': 'lala@123.com'}} + ) def test_basic(self): for base in (object, MyBaseClass, MyClass): @@ -151,29 +170,40 @@ class UserDefinedExtensionTest(fixtures.ORMTest): pass register_class(User) - attributes.register_attribute(User, 'user_id', uselist = False, useobject=False) - attributes.register_attribute(User, 'user_name', uselist = False, useobject=False) - attributes.register_attribute(User, 'email_address', uselist = False, useobject=False) + attributes.register_attribute( + User, 'user_id', uselist=False, useobject=False) + attributes.register_attribute( + User, 'user_name', uselist=False, useobject=False) + attributes.register_attribute( + User, 'email_address', uselist=False, useobject=False) u = User() u.user_id = 7 u.user_name = 'john' u.email_address = 'lala@123.com' - self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com') - attributes.instance_state(u)._commit_all(attributes.instance_dict(u)) - self.assert_(u.user_id == 7 and u.user_name == 'john' and u.email_address == 'lala@123.com') + eq_(u.user_id, 7) + eq_(u.user_name, "john") + eq_(u.email_address, "lala@123.com") + attributes.instance_state(u)._commit_all( + attributes.instance_dict(u)) + eq_(u.user_id, 7) + eq_(u.user_name, "john") + eq_(u.email_address, "lala@123.com") u.user_name = 'heythere' u.email_address = 'foo@bar.com' - self.assert_(u.user_id == 7 and u.user_name == 'heythere' and u.email_address == 'foo@bar.com') + eq_(u.user_id, 7) + eq_(u.user_name, "heythere") + eq_(u.email_address, "foo@bar.com") def test_deferred(self): for base in (object, MyBaseClass, MyClass): class Foo(base): pass - data = {'a':'this is a', 'b':12} + data = {'a': 'this is a', 'b': 12} + def loader(state, keys): for k in keys: state.dict[k] = data[k] @@ -181,30 +211,38 @@ class UserDefinedExtensionTest(fixtures.ORMTest): manager = register_class(Foo) manager.deferred_scalar_loader = loader - attributes.register_attribute(Foo, 'a', uselist=False, useobject=False) - attributes.register_attribute(Foo, 'b', uselist=False, useobject=False) + attributes.register_attribute( + Foo, 'a', uselist=False, useobject=False) + attributes.register_attribute( + Foo, 'b', uselist=False, useobject=False) if base is object: - assert Foo not in instrumentation._instrumentation_factory._state_finders + assert Foo not in \ + instrumentation._instrumentation_factory._state_finders else: - assert Foo in instrumentation._instrumentation_factory._state_finders + assert Foo in \ + instrumentation._instrumentation_factory._state_finders f = Foo() - attributes.instance_state(f)._expire(attributes.instance_dict(f), set()) + attributes.instance_state(f)._expire( + attributes.instance_dict(f), set()) eq_(f.a, "this is a") eq_(f.b, 12) f.a = "this is some new a" - attributes.instance_state(f)._expire(attributes.instance_dict(f), set()) + attributes.instance_state(f)._expire( + attributes.instance_dict(f), set()) eq_(f.a, "this is a") eq_(f.b, 12) - attributes.instance_state(f)._expire(attributes.instance_dict(f), set()) + attributes.instance_state(f)._expire( + attributes.instance_dict(f), set()) f.a = "this is another new a" eq_(f.a, "this is another new a") eq_(f.b, 12) - attributes.instance_state(f)._expire(attributes.instance_dict(f), set()) + attributes.instance_state(f)._expire( + attributes.instance_dict(f), set()) eq_(f.a, "this is a") eq_(f.b, 12) @@ -212,7 +250,8 @@ class UserDefinedExtensionTest(fixtures.ORMTest): eq_(f.a, None) eq_(f.b, 12) - attributes.instance_state(f)._commit_all(attributes.instance_dict(f)) + attributes.instance_state(f)._commit_all( + attributes.instance_dict(f)) eq_(f.a, None) eq_(f.b, 12) @@ -220,27 +259,32 @@ class UserDefinedExtensionTest(fixtures.ORMTest): """tests that attributes are polymorphic""" for base in (object, MyBaseClass, MyClass): - class Foo(base):pass - class Bar(Foo):pass + class Foo(base): + pass + + class Bar(Foo): + pass register_class(Foo) register_class(Bar) def func1(state, passive): return "this is the foo attr" + def func2(state, passive): return "this is the bar attr" + def func3(state, passive): return "this is the shared attr" attributes.register_attribute(Foo, 'element', - uselist=False, callable_=func1, - useobject=True) + uselist=False, callable_=func1, + useobject=True) attributes.register_attribute(Foo, 'element2', - uselist=False, callable_=func3, - useobject=True) + uselist=False, callable_=func3, + useobject=True) attributes.register_attribute(Bar, 'element', - uselist=False, callable_=func2, - useobject=True) + uselist=False, callable_=func2, + useobject=True) x = Foo() y = Bar() @@ -251,15 +295,20 @@ class UserDefinedExtensionTest(fixtures.ORMTest): def test_collection_with_backref(self): for base in (object, MyBaseClass, MyClass): - class Post(base):pass - class Blog(base):pass + class Post(base): + pass + + class Blog(base): + pass register_class(Post) register_class(Blog) - attributes.register_attribute(Post, 'blog', uselist=False, - backref='posts', trackparent=True, useobject=True) - attributes.register_attribute(Blog, 'posts', uselist=True, - backref='blog', trackparent=True, useobject=True) + attributes.register_attribute( + Post, 'blog', uselist=False, + backref='posts', trackparent=True, useobject=True) + attributes.register_attribute( + Blog, 'posts', uselist=True, + backref='blog', trackparent=True, useobject=True) b = Blog() (p1, p2, p3) = (Post(), Post(), Post()) b.posts.append(p1) @@ -287,47 +336,77 @@ class UserDefinedExtensionTest(fixtures.ORMTest): for base in (object, MyBaseClass, MyClass): class Foo(base): pass + class Bar(base): pass register_class(Foo) register_class(Bar) - attributes.register_attribute(Foo, "name", uselist=False, useobject=False) - attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True) - attributes.register_attribute(Bar, "name", uselist=False, useobject=False) - + attributes.register_attribute( + Foo, "name", uselist=False, useobject=False) + attributes.register_attribute( + Foo, "bars", uselist=True, trackparent=True, useobject=True) + attributes.register_attribute( + Bar, "name", uselist=False, useobject=False) f1 = Foo() f1.name = 'f1' - eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1'], (), ())) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), 'name'), + (['f1'], (), ())) b1 = Bar() b1.name = 'b1' f1.bars.append(b1) - eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b1], [], [])) - - attributes.instance_state(f1)._commit_all(attributes.instance_dict(f1)) - attributes.instance_state(b1)._commit_all(attributes.instance_dict(b1)) - - eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), ((), ['f1'], ())) - eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ((), [b1], ())) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), 'bars'), + ([b1], [], [])) + + attributes.instance_state(f1)._commit_all( + attributes.instance_dict(f1)) + attributes.instance_state(b1)._commit_all( + attributes.instance_dict(b1)) + + eq_( + attributes.get_state_history( + attributes.instance_state(f1), + 'name'), + ((), ['f1'], ())) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), + 'bars'), + ((), [b1], ())) f1.name = 'f1mod' b2 = Bar() b2.name = 'b2' f1.bars.append(b2) - eq_(attributes.get_state_history(attributes.instance_state(f1), 'name'), (['f1mod'], (), ['f1'])) - eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [b1], [])) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), 'name'), + (['f1mod'], (), ['f1'])) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), 'bars'), + ([b2], [b1], [])) f1.bars.remove(b1) - eq_(attributes.get_state_history(attributes.instance_state(f1), 'bars'), ([b2], [], [b1])) + eq_( + attributes.get_state_history( + attributes.instance_state(f1), 'bars'), + ([b2], [], [b1])) def test_null_instrumentation(self): class Foo(MyBaseClass): pass register_class(Foo) - attributes.register_attribute(Foo, "name", uselist=False, useobject=False) - attributes.register_attribute(Foo, "bars", uselist=True, trackparent=True, useobject=True) + attributes.register_attribute( + Foo, "name", uselist=False, useobject=False) + attributes.register_attribute( + Foo, "bars", uselist=True, trackparent=True, useobject=True) assert Foo.name == attributes.manager_of_class(Foo)['name'] assert Foo.bars == attributes.manager_of_class(Foo)['bars'] @@ -335,8 +414,11 @@ class UserDefinedExtensionTest(fixtures.ORMTest): def test_alternate_finders(self): """Ensure the generic finder front-end deals with edge cases.""" - class Unknown(object): pass - class Known(MyBaseClass): pass + class Unknown(object): + pass + + class Known(MyBaseClass): + pass register_class(Known) k, u = Known(), Unknown() @@ -347,61 +429,105 @@ class UserDefinedExtensionTest(fixtures.ORMTest): assert attributes.instance_state(k) is not None assert_raises((AttributeError, KeyError), - attributes.instance_state, u) + attributes.instance_state, u) assert_raises((AttributeError, KeyError), - attributes.instance_state, None) + attributes.instance_state, None) + + def test_unmapped_not_type_error(self): + """extension version of the same test in test_mapper. + + fixes #3408 + """ + assert_raises_message( + sa.exc.ArgumentError, + "Class object expected, got '5'.", + class_mapper, 5 + ) + + def test_unmapped_not_type_error_iter_ok(self): + """extension version of the same test in test_mapper. + fixes #3408 + """ + assert_raises_message( + sa.exc.ArgumentError, + r"Class object expected, got '\(5, 6\)'.", + class_mapper, (5, 6) + ) + + +class FinderTest(_ExtBase, fixtures.ORMTest): -class FinderTest(fixtures.ORMTest): def test_standard(self): - class A(object): pass + class A(object): + pass register_class(A) - eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager) + eq_( + type(manager_of_class(A)), + instrumentation.ClassManager) def test_nativeext_interfaceexact(self): class A(object): - __sa_instrumentation_manager__ = instrumentation.InstrumentationManager + __sa_instrumentation_manager__ = \ + instrumentation.InstrumentationManager register_class(A) - ne_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager) + ne_( + type(manager_of_class(A)), + instrumentation.ClassManager) def test_nativeext_submanager(self): - class Mine(instrumentation.ClassManager): pass + class Mine(instrumentation.ClassManager): + pass + class A(object): __sa_instrumentation_manager__ = Mine register_class(A) - eq_(type(instrumentation.manager_of_class(A)), Mine) + eq_(type(manager_of_class(A)), Mine) @modifies_instrumentation_finders def test_customfinder_greedy(self): - class Mine(instrumentation.ClassManager): pass - class A(object): pass + class Mine(instrumentation.ClassManager): + pass + + class A(object): + pass + def find(cls): return Mine instrumentation.instrumentation_finders.insert(0, find) register_class(A) - eq_(type(instrumentation.manager_of_class(A)), Mine) + eq_(type(manager_of_class(A)), Mine) @modifies_instrumentation_finders def test_customfinder_pass(self): - class A(object): pass + class A(object): + pass + def find(cls): return None instrumentation.instrumentation_finders.insert(0, find) register_class(A) - eq_(type(instrumentation.manager_of_class(A)), instrumentation.ClassManager) -class InstrumentationCollisionTest(fixtures.ORMTest): + eq_( + type(manager_of_class(A)), + instrumentation.ClassManager) + + +class InstrumentationCollisionTest(_ExtBase, fixtures.ORMTest): + def test_none(self): - class A(object): pass + class A(object): + pass register_class(A) mgr_factory = lambda cls: instrumentation.ClassManager(cls) + class B(object): __sa_instrumentation_manager__ = staticmethod(mgr_factory) register_class(B) @@ -411,79 +537,114 @@ class InstrumentationCollisionTest(fixtures.ORMTest): register_class(C) def test_single_down(self): - class A(object): pass + class A(object): + pass register_class(A) mgr_factory = lambda cls: instrumentation.ClassManager(cls) + class B(A): __sa_instrumentation_manager__ = staticmethod(mgr_factory) - assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B) + assert_raises_message( + TypeError, "multiple instrumentation implementations", + register_class, B) def test_single_up(self): - class A(object): pass + class A(object): + pass # delay registration mgr_factory = lambda cls: instrumentation.ClassManager(cls) + class B(A): __sa_instrumentation_manager__ = staticmethod(mgr_factory) register_class(B) - assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, A) + assert_raises_message( + TypeError, "multiple instrumentation implementations", + register_class, A) def test_diamond_b1(self): mgr_factory = lambda cls: instrumentation.ClassManager(cls) - class A(object): pass - class B1(A): pass + class A(object): + pass + + class B1(A): + pass + class B2(A): __sa_instrumentation_manager__ = staticmethod(mgr_factory) - class C(object): pass - assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1) + class C(object): + pass + + assert_raises_message( + TypeError, "multiple instrumentation implementations", + register_class, B1) def test_diamond_b2(self): mgr_factory = lambda cls: instrumentation.ClassManager(cls) - class A(object): pass - class B1(A): pass + class A(object): + pass + + class B1(A): + pass + class B2(A): __sa_instrumentation_manager__ = staticmethod(mgr_factory) - class C(object): pass + + class C(object): + pass register_class(B2) - assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1) + assert_raises_message( + TypeError, "multiple instrumentation implementations", + register_class, B1) def test_diamond_c_b(self): mgr_factory = lambda cls: instrumentation.ClassManager(cls) - class A(object): pass - class B1(A): pass + class A(object): + pass + + class B1(A): + pass + class B2(A): __sa_instrumentation_manager__ = staticmethod(mgr_factory) - class C(object): pass + + class C(object): + pass register_class(C) - assert_raises_message(TypeError, "multiple instrumentation implementations", register_class, B1) + assert_raises_message( + TypeError, "multiple instrumentation implementations", + register_class, B1) -class ExtendedEventsTest(fixtures.ORMTest): +class ExtendedEventsTest(_ExtBase, fixtures.ORMTest): + """Allow custom Events implementations.""" @modifies_instrumentation_finders def test_subclassed(self): class MyEvents(events.InstanceEvents): pass + class MyClassManager(instrumentation.ClassManager): dispatch = event.dispatcher(MyEvents) - instrumentation.instrumentation_finders.insert(0, lambda cls: MyClassManager) + instrumentation.instrumentation_finders.insert( + 0, lambda cls: MyClassManager) - class A(object): pass + class A(object): + pass register_class(A) manager = instrumentation.manager_of_class(A) assert issubclass(manager.dispatch._events, MyEvents) - diff --git a/test/ext/test_hybrid.py b/test/ext/test_hybrid.py index b895d2fb2..e36b8f7e9 100644 --- a/test/ext/test_hybrid.py +++ b/test/ext/test_hybrid.py @@ -7,6 +7,7 @@ from sqlalchemy.testing import eq_, AssertsCompiledSQL, assert_raises_message from sqlalchemy.testing import fixtures from sqlalchemy import inspect + class PropertyComparatorTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' diff --git a/test/ext/test_mutable.py b/test/ext/test_mutable.py index f2d0123bd..a6bcdc47f 100644 --- a/test/ext/test_mutable.py +++ b/test/ext/test_mutable.py @@ -20,6 +20,7 @@ class SubFoo(Foo): class FooWithEq(object): + def __init__(self, **kw): for k in kw: setattr(self, k, kw[k]) @@ -32,6 +33,7 @@ class FooWithEq(object): class Point(MutableComposite): + def __init__(self, x, y): self.x = x self.y = y @@ -56,6 +58,7 @@ class Point(MutableComposite): class MyPoint(Point): + @classmethod def coerce(cls, key, value): if isinstance(value, tuple): @@ -63,23 +66,25 @@ class MyPoint(Point): return value -class _MutableDictTestBase(object): - run_define_tables = 'each' - +class _MutableDictTestFixture(object): @classmethod def _type_fixture(cls): return MutableDict - def setup_mappers(cls): - foo = cls.tables.foo - - mapper(Foo, foo) - def teardown(self): # clear out mapper events Mapper.dispatch._clear() ClassManager.dispatch._clear() - super(_MutableDictTestBase, self).teardown() + super(_MutableDictTestFixture, self).teardown() + + +class _MutableDictTestBase(_MutableDictTestFixture): + run_define_tables = 'each' + + def setup_mappers(cls): + foo = cls.tables.foo + + mapper(Foo, foo) def test_coerce_none(self): sess = Session() @@ -208,24 +213,63 @@ class _MutableDictTestBase(object): eq_(f1.non_mutable_data, {'a': 'b'}) + +class MutableColumnDefaultTest(_MutableDictTestFixture, fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + MutableDict = cls._type_fixture() + + mutable_pickle = MutableDict.as_mutable(PickleType) + Table( + 'foo', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', mutable_pickle, default={}), + ) + + def setup_mappers(cls): + foo = cls.tables.foo + + mapper(Foo, foo) + + def test_evt_on_flush_refresh(self): + # test for #3427 + + sess = Session() + + f1 = Foo() + sess.add(f1) + sess.flush() + assert isinstance(f1.data, self._type_fixture()) + assert f1 not in sess.dirty + f1.data['foo'] = 'bar' + assert f1 in sess.dirty + + + class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): MutableDict = cls._type_fixture() mutable_pickle = MutableDict.as_mutable(PickleType) Table('foo', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('skip', mutable_pickle), - Column('data', mutable_pickle), - Column('non_mutable_data', PickleType), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('skip', mutable_pickle), + Column('data', mutable_pickle), + Column('non_mutable_data', PickleType), + Column('unrelated_data', String(50)) + ) def test_non_mutable(self): self._test_non_mutable() + class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): import json @@ -247,29 +291,34 @@ class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): MutableDict = cls._type_fixture() Table('foo', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('data', MutableDict.as_mutable(JSONEncodedDict)), - Column('non_mutable_data', JSONEncodedDict), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', MutableDict.as_mutable(JSONEncodedDict)), + Column('non_mutable_data', JSONEncodedDict), + Column('unrelated_data', String(50)) + ) def test_non_mutable(self): self._test_non_mutable() -class MutableAssocWithAttrInheritTest(_MutableDictTestBase, fixtures.MappedTest): + +class MutableAssocWithAttrInheritTest(_MutableDictTestBase, + fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('foo', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('data', PickleType), - Column('non_mutable_data', PickleType), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', PickleType), + Column('non_mutable_data', PickleType), + Column('unrelated_data', String(50)) + ) Table('subfoo', metadata, - Column('id', Integer, ForeignKey('foo.id'), primary_key=True), - ) + Column('id', Integer, ForeignKey('foo.id'), primary_key=True), + ) def setup_mappers(cls): foo = cls.tables.foo @@ -301,20 +350,27 @@ class MutableAssocWithAttrInheritTest(_MutableDictTestBase, fixtures.MappedTest) sess.commit() eq_(f1.data, {'b': 'c'}) -class MutableAssociationScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest): + +class MutableAssociationScalarPickleTest(_MutableDictTestBase, + fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): MutableDict = cls._type_fixture() MutableDict.associate_with(PickleType) Table('foo', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('skip', PickleType), - Column('data', PickleType), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('skip', PickleType), + Column('data', PickleType), + Column('unrelated_data', String(50)) + ) + + +class MutableAssociationScalarJSONTest(_MutableDictTestBase, + fixtures.MappedTest): -class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): @classmethod def define_tables(cls, metadata): import json @@ -337,21 +393,24 @@ class MutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest MutableDict.associate_with(JSONEncodedDict) Table('foo', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', JSONEncodedDict), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', JSONEncodedDict), + Column('unrelated_data', String(50)) + ) -class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest): +class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, + fixtures.MappedTest): CustomMutableDict = None @classmethod def _type_fixture(cls): if not(getattr(cls, 'CustomMutableDict')): - MutableDict = super(CustomMutableAssociationScalarJSONTest, cls)._type_fixture() + MutableDict = super( + CustomMutableAssociationScalarJSONTest, cls)._type_fixture() + class CustomMutableDict(MutableDict): pass cls.CustomMutableDict = CustomMutableDict @@ -379,14 +438,15 @@ class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.Mapp CustomMutableDict.associate_with(JSONEncodedDict) Table('foo', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('data', JSONEncodedDict), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('data', JSONEncodedDict), + Column('unrelated_data', String(50)) + ) def test_pickle_parent(self): - # Picklers don't know how to pickle CustomMutableDict, but we aren't testing that here + # Picklers don't know how to pickle CustomMutableDict, + # but we aren't testing that here pass def test_coerce(self): @@ -398,22 +458,22 @@ class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase, fixtures.Mapp class _CompositeTestBase(object): + @classmethod def define_tables(cls, metadata): Table('foo', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('x', Integer), - Column('y', Integer), - Column('unrelated_data', String(50)) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('x', Integer), + Column('y', Integer), + Column('unrelated_data', String(50)) + ) def setup(self): from sqlalchemy.ext import mutable mutable._setup_composite_listener() super(_CompositeTestBase, self).setup() - def teardown(self): # clear out mapper events Mapper.dispatch._clear() @@ -423,9 +483,46 @@ class _CompositeTestBase(object): @classmethod def _type_fixture(cls): - return Point + +class MutableCompositeColumnDefaultTest(_CompositeTestBase, + fixtures.MappedTest): + @classmethod + def define_tables(cls, metadata): + Table( + 'foo', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('x', Integer, default=5), + Column('y', Integer, default=9), + Column('unrelated_data', String(50)) + ) + + @classmethod + def setup_mappers(cls): + foo = cls.tables.foo + + cls.Point = cls._type_fixture() + + mapper(Foo, foo, properties={ + 'data': composite(cls.Point, foo.c.x, foo.c.y) + }) + + def test_evt_on_flush_refresh(self): + # this still worked prior to #3427 being fixed in any case + + sess = Session() + + f1 = Foo(data=self.Point(None, None)) + sess.add(f1) + sess.flush() + eq_(f1.data, self.Point(5, 9)) + assert f1 not in sess.dirty + f1.data.x = 10 + assert f1 in sess.dirty + + class MutableCompositesUnpickleTest(_CompositeTestBase, fixtures.MappedTest): @classmethod @@ -443,6 +540,7 @@ class MutableCompositesUnpickleTest(_CompositeTestBase, fixtures.MappedTest): for loads, dumps in picklers(): loads(dumps(u1)) + class MutableCompositesTest(_CompositeTestBase, fixtures.MappedTest): @classmethod @@ -516,6 +614,7 @@ class MutableCompositesTest(_CompositeTestBase, fixtures.MappedTest): eq_(f1.data.x, 5) + class MutableCompositeCallableTest(_CompositeTestBase, fixtures.MappedTest): @classmethod @@ -542,13 +641,14 @@ class MutableCompositeCallableTest(_CompositeTestBase, fixtures.MappedTest): eq_(f1.data.x, 3) -class MutableCompositeCustomCoerceTest(_CompositeTestBase, fixtures.MappedTest): +class MutableCompositeCustomCoerceTest(_CompositeTestBase, + fixtures.MappedTest): + @classmethod def _type_fixture(cls): return MyPoint - @classmethod def setup_mappers(cls): foo = cls.tables.foo @@ -576,16 +676,18 @@ class MutableCompositeCustomCoerceTest(_CompositeTestBase, fixtures.MappedTest): class MutableInheritedCompositesTest(_CompositeTestBase, fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('foo', metadata, - Column('id', Integer, primary_key=True, test_needs_autoincrement=True), - Column('x', Integer), - Column('y', Integer) - ) + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('x', Integer), + Column('y', Integer) + ) Table('subfoo', metadata, - Column('id', Integer, ForeignKey('foo.id'), primary_key=True), - ) + Column('id', Integer, ForeignKey('foo.id'), primary_key=True), + ) @classmethod def setup_mappers(cls): @@ -628,4 +730,3 @@ class MutableInheritedCompositesTest(_CompositeTestBase, fixtures.MappedTest): sess.add(f2) f2.data.y = 12 assert f2 in sess.dirty - diff --git a/test/orm/inheritance/test_basic.py b/test/orm/inheritance/test_basic.py index d8b2a44af..911d4bc5c 100644 --- a/test/orm/inheritance/test_basic.py +++ b/test/orm/inheritance/test_basic.py @@ -1148,6 +1148,62 @@ class FlushTest(fixtures.MappedTest): sess.flush() assert user_roles.count().scalar() == 1 + +class OptimizedGetOnDeferredTest(fixtures.MappedTest): + """test that the 'optimized get' path accommodates deferred columns.""" + + @classmethod + def define_tables(cls, metadata): + Table( + "a", metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True) + ) + Table( + "b", metadata, + Column('id', Integer, ForeignKey('a.id'), primary_key=True), + Column('data', String(10)) + ) + + @classmethod + def setup_classes(cls): + class A(cls.Basic): + pass + + class B(A): + pass + + @classmethod + def setup_mappers(cls): + A, B = cls.classes("A", "B") + a, b = cls.tables("a", "b") + + mapper(A, a) + mapper(B, b, inherits=A, properties={ + 'data': deferred(b.c.data), + 'expr': column_property(b.c.data + 'q', deferred=True) + }) + + def test_column_property(self): + A, B = self.classes("A", "B") + sess = Session() + b1 = B(data='x') + sess.add(b1) + sess.flush() + + eq_(b1.expr, 'xq') + + def test_expired_column(self): + A, B = self.classes("A", "B") + sess = Session() + b1 = B(data='x') + sess.add(b1) + sess.flush() + sess.expire(b1, ['data']) + + eq_(b1.data, 'x') + + class JoinedNoFKSortingTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py index dbbe4c435..9f5d21a43 100644 --- a/test/orm/inheritance/test_single.py +++ b/test/orm/inheritance/test_single.py @@ -410,6 +410,31 @@ class RelationshipToSingleTest(testing.AssertsCompiledSQL, fixtures.MappedTest): "AND employees_1.type IN (:type_1)" ) + def test_join_explicit_onclause_no_discriminator(self): + # test issue #3462 + Company, Employee, Engineer = ( + self.classes.Company, + self.classes.Employee, + self.classes.Engineer) + companies, employees = self.tables.companies, self.tables.employees + + mapper(Company, companies, properties={ + 'employees': relationship(Employee) + }) + mapper(Employee, employees) + mapper(Engineer, inherits=Employee) + + sess = create_session() + self.assert_compile( + sess.query(Company, Engineer.name).join( + Engineer, Company.company_id == Engineer.company_id), + "SELECT companies.company_id AS companies_company_id, " + "companies.name AS companies_name, " + "employees.name AS employees_name " + "FROM companies JOIN " + "employees ON companies.company_id = employees.company_id" + ) + def test_outer_join_prop(self): Company, Employee, Engineer = self.classes.Company,\ self.classes.Employee,\ diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index b22fff1a9..80d8cdc04 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -123,7 +123,7 @@ class AttributeImplAPITest(fixtures.MappedTest): assert_raises_message( ValueError, - r"list.remove\(x\): x not in list", + r"list.remove\(.*?\): .* not in list", A.b.impl.remove, attributes.instance_state(a1), attributes.instance_dict(a1), b2, None @@ -1524,6 +1524,13 @@ class HistoryTest(fixtures.TestBase): f.someattr = 3 eq_(self._someattr_committed_state(f), None) + def test_committed_value_set_active_hist(self): + Foo = self._fixture(uselist=False, useobject=False, + active_history=True) + f = Foo() + f.someattr = 3 + eq_(self._someattr_committed_state(f), None) + def test_committed_value_set_commit(self): Foo = self._fixture(uselist=False, useobject=False, active_history=False) diff --git a/test/orm/test_bulk.py b/test/orm/test_bulk.py index e27d3b73c..e2a1464a6 100644 --- a/test/orm/test_bulk.py +++ b/test/orm/test_bulk.py @@ -96,11 +96,62 @@ class BulkInsertUpdateTest(BulkTest, _fixtures.FixtureTest): asserter.assert_( CompiledSQL( - "UPDATE users SET id=:id, name=:name WHERE " + "UPDATE users SET name=:name WHERE " "users.id = :users_id", - [{'users_id': 1, 'id': 1, 'name': 'u1new'}, - {'users_id': 2, 'id': 2, 'name': 'u2'}, - {'users_id': 3, 'id': 3, 'name': 'u3new'}] + [{'users_id': 1, 'name': 'u1new'}, + {'users_id': 2, 'name': 'u2'}, + {'users_id': 3, 'name': 'u3new'}] + ) + ) + + def test_bulk_update(self): + User, = self.classes("User",) + + s = Session(expire_on_commit=False) + objects = [ + User(name="u1"), + User(name="u2"), + User(name="u3") + ] + s.add_all(objects) + s.commit() + + s = Session() + with self.sql_execution_asserter() as asserter: + s.bulk_update_mappings( + User, + [{'id': 1, 'name': 'u1new'}, + {'id': 2, 'name': 'u2'}, + {'id': 3, 'name': 'u3new'}] + ) + + asserter.assert_( + CompiledSQL( + "UPDATE users SET name=:name WHERE users.id = :users_id", + [{'users_id': 1, 'name': 'u1new'}, + {'users_id': 2, 'name': 'u2'}, + {'users_id': 3, 'name': 'u3new'}] + ) + ) + + def test_bulk_insert(self): + User, = self.classes("User",) + + s = Session() + with self.sql_execution_asserter() as asserter: + s.bulk_insert_mappings( + User, + [{'id': 1, 'name': 'u1new'}, + {'id': 2, 'name': 'u2'}, + {'id': 3, 'name': 'u3new'}] + ) + + asserter.assert_( + CompiledSQL( + "INSERT INTO users (id, name) VALUES (:id, :name)", + [{'id': 1, 'name': 'u1new'}, + {'id': 2, 'name': 'u2'}, + {'id': 3, 'name': 'u3new'}] ) ) diff --git a/test/orm/test_descriptor.py b/test/orm/test_descriptor.py index 2134d87b2..d9aca30e5 100644 --- a/test/orm/test_descriptor.py +++ b/test/orm/test_descriptor.py @@ -125,3 +125,4 @@ class DescriptorInstrumentationTest(fixtures.ORMTest): str(aliased(Foo).foo == 'ed'), "foobar(foo_1.name) = foobar(:foobar_1)" ) + diff --git a/test/orm/test_eager_relations.py b/test/orm/test_eager_relations.py index f532901f2..6d9d9ec4b 100644 --- a/test/orm/test_eager_relations.py +++ b/test/orm/test_eager_relations.py @@ -2301,6 +2301,142 @@ class InnerJoinSplicingTest(fixtures.MappedTest, testing.AssertsCompiledSQL): ) +class InnerJoinSplicingWSecondaryTest( + fixtures.MappedTest, testing.AssertsCompiledSQL): + __dialect__ = 'default' + __backend__ = True # exercise hardcore join nesting on backends + + @classmethod + def define_tables(cls, metadata): + Table( + 'a', metadata, + Column('id', Integer, primary_key=True), + Column('bid', ForeignKey('b.id')) + ) + + Table( + 'b', metadata, + Column('id', Integer, primary_key=True), + Column('cid', ForeignKey('c.id')) + ) + + Table( + 'c', metadata, + Column('id', Integer, primary_key=True), + ) + + Table('ctod', metadata, + Column('cid', ForeignKey('c.id'), primary_key=True), + Column('did', ForeignKey('d.id'), primary_key=True), + ) + Table('d', metadata, + Column('id', Integer, primary_key=True), + ) + + @classmethod + def setup_classes(cls): + + class A(cls.Comparable): + pass + + class B(cls.Comparable): + pass + + class C(cls.Comparable): + pass + + class D(cls.Comparable): + pass + + @classmethod + def setup_mappers(cls): + A, B, C, D = ( + cls.classes.A, cls.classes.B, cls.classes.C, + cls.classes.D) + mapper(A, cls.tables.a, properties={ + 'b': relationship(B) + }) + mapper(B, cls.tables.b, properties=odict([ + ('c', relationship(C)), + ])) + mapper(C, cls.tables.c, properties=odict([ + ('ds', relationship(D, secondary=cls.tables.ctod, + order_by=cls.tables.d.c.id)), + ])) + mapper(D, cls.tables.d) + + @classmethod + def _fixture_data(cls): + A, B, C, D = ( + cls.classes.A, cls.classes.B, cls.classes.C, + cls.classes.D) + + d1, d2, d3 = D(id=1), D(id=2), D(id=3) + return [ + A( + id=1, + b=B( + id=1, + c=C( + id=1, + ds=[d1, d2] + ) + ) + ), + A( + id=2, + b=B( + id=2, + c=C( + id=2, + ds=[d2, d3] + ) + ) + ) + ] + + @classmethod + def insert_data(cls): + s = Session(testing.db) + s.add_all(cls._fixture_data()) + s.commit() + + def _assert_result(self, query): + def go(): + eq_( + query.all(), + self._fixture_data() + ) + + self.assert_sql_count( + testing.db, + go, + 1 + ) + + def test_joined_across(self): + A = self.classes.A + + s = Session() + q = s.query(A) \ + .options( + joinedload('b'). + joinedload('c', innerjoin=True). + joinedload('ds', innerjoin=True)) + self.assert_compile( + q, + "SELECT a.id AS a_id, a.bid AS a_bid, d_1.id AS d_1_id, " + "c_1.id AS c_1_id, b_1.id AS b_1_id, b_1.cid AS b_1_cid " + "FROM a LEFT OUTER JOIN " + "(b AS b_1 JOIN " + "(c AS c_1 JOIN ctod AS ctod_1 ON c_1.id = ctod_1.cid) " + "ON c_1.id = b_1.cid " + "JOIN d AS d_1 ON d_1.id = ctod_1.did) ON b_1.id = a.bid " + "ORDER BY d_1.id" + ) + self._assert_result(q) + + class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL): """test #2188""" diff --git a/test/orm/test_events.py b/test/orm/test_events.py index 179f914fc..ae7ba98c1 100644 --- a/test/orm/test_events.py +++ b/test/orm/test_events.py @@ -15,9 +15,11 @@ from sqlalchemy.testing import AssertsCompiledSQL from sqlalchemy.testing.util import gc_collect from test.orm import _fixtures from sqlalchemy import event -from sqlalchemy.testing.mock import Mock, call +from sqlalchemy.testing.mock import Mock, call, ANY + class _RemoveListeners(object): + def teardown(self): events.MapperEvents._clear() events.InstanceEvents._clear() @@ -30,21 +32,29 @@ class _RemoveListeners(object): class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): run_inserts = None + @classmethod + def define_tables(cls, metadata): + super(MapperEventsTest, cls).define_tables(metadata) + metadata.tables['users'].append_column( + Column('extra', Integer, default=5, onupdate=10) + ) + def test_instance_event_listen(self): """test listen targets for instance events""" users, addresses = self.tables.users, self.tables.addresses - canary = [] + class A(object): pass + class B(A): pass mapper(A, users) mapper(B, addresses, inherits=A, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) def init_a(target, args, kwargs): canary.append(('init_a', target)) @@ -69,16 +79,16 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): a = A() eq_(canary, [('init_a', a), ('init_b', a), - ('init_c', a), ('init_d', a), ('init_e', a)]) + ('init_c', a), ('init_d', a), ('init_e', a)]) # test propagate flag canary[:] = [] b = B() eq_(canary, [('init_a', b), ('init_b', b), ('init_e', b)]) - def listen_all(self, mapper, **kw): canary = [] + def evt(meth): def go(*args, **kwargs): canary.append(meth) @@ -89,6 +99,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): 'init_failure', 'load', 'refresh', + 'refresh_flush', 'expire', 'before_insert', 'after_insert', @@ -111,7 +122,6 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_basic(self): User, users = self.classes.User, self.tables.users - mapper(User, users) canary = self.listen_all(User) named_canary = self.listen_all(User, named=True) @@ -130,10 +140,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() expected = [ 'init', 'before_insert', + 'refresh_flush', 'after_insert', 'expire', 'refresh', 'load', - 'before_update', 'after_update', 'before_delete', + 'before_update', 'refresh_flush', 'after_update', 'before_delete', 'after_delete'] eq_(canary, expected) eq_(named_canary, expected) @@ -189,13 +200,13 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): ] ) - def test_merge(self): users, User = self.tables.users, self.classes.User mapper(User, users) canary = [] + def load(obj, ctx): canary.append('load') event.listen(mapper, 'load', load) @@ -207,22 +218,22 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): s = Session() u2 = s.merge(u) s = Session() - u2 = s.merge(User(name='u2')) + u2 = s.merge(User(name='u2')) # noqa s.commit() s.query(User).order_by(User.id).first() eq_(canary, ['load', 'load', 'load']) def test_inheritance(self): users, addresses, User = (self.tables.users, - self.tables.addresses, - self.classes.User) + self.tables.addresses, + self.classes.User) class AdminUser(User): pass mapper(User, users) mapper(AdminUser, addresses, inherits=User, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) canary1 = self.listen_all(User, propagate=True) canary2 = self.listen_all(User) @@ -239,22 +250,23 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() sess.delete(am) sess.flush() - eq_(canary1, ['init', 'before_insert', 'after_insert', - 'refresh', 'load', - 'before_update', 'after_update', 'before_delete', - 'after_delete']) + eq_(canary1, ['init', 'before_insert', 'refresh_flush', 'after_insert', + 'refresh', 'load', + 'before_update', 'refresh_flush', + 'after_update', 'before_delete', + 'after_delete']) eq_(canary2, []) - eq_(canary3, ['init', 'before_insert', 'after_insert', - 'refresh', - 'load', - 'before_update', 'after_update', 'before_delete', - 'after_delete']) + eq_(canary3, ['init', 'before_insert', 'refresh_flush', 'after_insert', + 'refresh', + 'load', + 'before_update', 'refresh_flush', + 'after_update', 'before_delete', + 'after_delete']) def test_inheritance_subclass_deferred(self): users, addresses, User = (self.tables.users, - self.tables.addresses, - self.classes.User) - + self.tables.addresses, + self.classes.User) mapper(User, users) @@ -264,7 +276,7 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): class AdminUser(User): pass mapper(AdminUser, addresses, inherits=User, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) canary3 = self.listen_all(AdminUser) sess = create_session() @@ -278,16 +290,17 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() sess.delete(am) sess.flush() - eq_(canary1, ['init', 'before_insert', 'after_insert', - 'refresh', 'load', - 'before_update', 'after_update', 'before_delete', - 'after_delete']) + eq_(canary1, ['init', 'before_insert', 'refresh_flush', 'after_insert', + 'refresh', 'load', + 'before_update', 'refresh_flush', + 'after_update', 'before_delete', + 'after_delete']) eq_(canary2, []) - eq_(canary3, ['init', 'before_insert', 'after_insert', - 'refresh', 'load', - 'before_update', 'after_update', 'before_delete', - 'after_delete']) - + eq_(canary3, ['init', 'before_insert', 'refresh_flush', 'after_insert', + 'refresh', 'load', + 'before_update', 'refresh_flush', + 'after_update', 'before_delete', + 'after_delete']) def test_before_after_only_collection(self): """before_update is called on parent for collection modifications, @@ -296,12 +309,11 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ keywords, items, item_keywords, Keyword, Item = ( - self.tables.keywords, - self.tables.items, - self.tables.item_keywords, - self.classes.Keyword, - self.classes.Item) - + self.tables.keywords, + self.tables.items, + self.tables.item_keywords, + self.classes.Keyword, + self.classes.Item) mapper(Item, items, properties={ 'keywords': relationship(Keyword, secondary=item_keywords)}) @@ -318,13 +330,13 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() eq_(canary1, ['init', - 'before_insert', 'after_insert']) + 'before_insert', 'after_insert']) eq_(canary2, ['init', - 'before_insert', 'after_insert']) + 'before_insert', 'after_insert']) - canary1[:]= [] - canary2[:]= [] + canary1[:] = [] + canary2[:] = [] i1.keywords.append(k1) sess.flush() @@ -340,14 +352,16 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): assert_raises_message( sa.exc.SAWarning, "before_configured' and 'after_configured' ORM events only " - "invoke with the mapper\(\) function or Mapper class as the target.", + "invoke with the mapper\(\) function or Mapper class as " + "the target.", event.listen, User, 'before_configured', m1 ) assert_raises_message( sa.exc.SAWarning, "before_configured' and 'after_configured' ORM events only " - "invoke with the mapper\(\) function or Mapper class as the target.", + "invoke with the mapper\(\) function or Mapper class as " + "the target.", event.listen, User, 'after_configured', m1 ) @@ -370,11 +384,12 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_instrument_event(self): Address, addresses, users, User = (self.classes.Address, - self.tables.addresses, - self.tables.users, - self.classes.User) + self.tables.addresses, + self.tables.users, + self.classes.User) canary = [] + def instrument_class(mapper, cls): canary.append(cls) @@ -385,7 +400,45 @@ class MapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): mapper(Address, addresses) eq_(canary, [User, Address]) -class DeclarativeEventListenTest(_RemoveListeners, fixtures.DeclarativeMappedTest): + def test_instrument_class_precedes_class_instrumentation(self): + users = self.tables.users + + class MyClass(object): + pass + + canary = Mock() + + def my_init(self): + canary.init() + + # mapper level event + @event.listens_for(mapper, "instrument_class") + def instrument_class(mp, class_): + canary.instrument_class(class_) + class_.__init__ = my_init + + # instrumentationmanager event + @event.listens_for(object, "class_instrument") + def class_instrument(class_): + canary.class_instrument(class_) + + mapper(MyClass, users) + + m1 = MyClass() + assert attributes.instance_state(m1) + + eq_( + [ + call.instrument_class(MyClass), + call.class_instrument(MyClass), + call.init() + ], + canary.mock_calls + ) + + +class DeclarativeEventListenTest(_RemoveListeners, + fixtures.DeclarativeMappedTest): run_setup_classes = "each" run_deletes = None @@ -421,6 +474,7 @@ class DeclarativeEventListenTest(_RemoveListeners, fixtures.DeclarativeMappedTes class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): + """"test event listeners against unmapped classes. This incurs special logic. Note if we ever do the "remove" case, @@ -437,9 +491,10 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) canary = [] + def evt(x, y, z): canary.append(x) event.listen(User, "before_insert", evt, raw=True) @@ -456,7 +511,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -465,6 +520,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): pass canary = Mock() + def evt(x, y, z): canary.append(x) event.listen(User, "before_insert", canary, propagate=True, raw=True) @@ -472,14 +528,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m = mapper(SubUser, users) m.dispatch.before_insert(5, 6, 7) eq_(canary.mock_calls, - [call(5, 6, 7)]) + [call(5, 6, 7)]) m2 = mapper(SubSubUser, users) m2.dispatch.before_insert(8, 9, 10) eq_(canary.mock_calls, - [call(5, 6, 7), call(8, 9, 10)]) - + [call(5, 6, 7), call(8, 9, 10)]) def test_deferred_map_event_subclass_no_propagate(self): """ @@ -489,12 +544,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass canary = [] + def evt(x, y, z): canary.append(x) event.listen(User, "before_insert", evt, propagate=False) @@ -511,7 +567,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -519,6 +575,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m = mapper(SubUser, users) canary = [] + def evt(x, y, z): canary.append(x) event.listen(User, "before_insert", evt, propagate=True, raw=True) @@ -534,7 +591,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -563,7 +620,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -571,6 +628,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m = mapper(SubUser, users) canary = [] + def evt(x): canary.append(x) event.listen(User, "load", evt, propagate=True, raw=True) @@ -578,7 +636,6 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m.class_manager.dispatch.load(5) eq_(canary, [5]) - def test_deferred_instance_event_plain(self): """ 1. instance event listen on class, w/o propagate @@ -587,9 +644,10 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) canary = [] + def evt(x): canary.append(x) event.listen(User, "load", evt, raw=True) @@ -606,7 +664,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -615,6 +673,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): pass canary = [] + def evt(x): canary.append(x) event.listen(User, "load", evt, propagate=True, raw=True) @@ -631,14 +690,15 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_deferred_instance_event_subclass_propagate_baseclass(self): """ 1. instance event listen on class, w propagate - 2. map one subclass of class, map base class, leave 2nd subclass unmapped + 2. map one subclass of class, map base class, leave 2nd subclass + unmapped 3. event fire on sub should receive one and only one event 4. event fire on base should receive one and only one event 5. map 2nd subclass 6. event fire on 2nd subclass should receive one and only one event """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass @@ -664,7 +724,7 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): m3 = mapper(SubUser2, users) m3.class_manager.dispatch.load(instance) eq_(canary.mock_calls, [call(instance.obj()), - call(instance.obj()), call(instance.obj())]) + call(instance.obj()), call(instance.obj())]) def test_deferred_instance_event_subclass_no_propagate(self): """ @@ -673,12 +733,13 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): 3. event fire on subclass should not receive event """ users, User = (self.tables.users, - self.classes.User) + self.classes.User) class SubUser(User): pass canary = [] + def evt(x): canary.append(x) event.listen(User, "load", evt, propagate=False) @@ -688,29 +749,32 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): eq_(canary, []) def test_deferred_instrument_event(self): - users, User = (self.tables.users, - self.classes.User) + User = self.classes.User canary = [] + def evt(x): canary.append(x) event.listen(User, "attribute_instrument", evt) - instrumentation._instrumentation_factory.dispatch.attribute_instrument(User) + instrumentation._instrumentation_factory.\ + dispatch.attribute_instrument(User) eq_(canary, [User]) def test_isolation_instrument_event(self): - users, User = (self.tables.users, - self.classes.User) + User = self.classes.User + class Bar(object): pass canary = [] + def evt(x): canary.append(x) event.listen(Bar, "attribute_instrument", evt) - instrumentation._instrumentation_factory.dispatch.attribute_instrument(User) + instrumentation._instrumentation_factory.dispatch.\ + attribute_instrument(User) eq_(canary, []) @testing.requires.predictable_gc @@ -730,35 +794,38 @@ class DeferredMapperEventsTest(_RemoveListeners, _fixtures.FixtureTest): assert not dispatch.attribute_instrument - def test_deferred_instrument_event_subclass_propagate(self): - users, User = (self.tables.users, - self.classes.User) + User = self.classes.User + class SubUser(User): pass canary = [] + def evt(x): canary.append(x) event.listen(User, "attribute_instrument", evt, propagate=True) instrumentation._instrumentation_factory.dispatch.\ - attribute_instrument(SubUser) + attribute_instrument(SubUser) eq_(canary, [SubUser]) def test_deferred_instrument_event_subclass_no_propagate(self): users, User = (self.tables.users, - self.classes.User) + self.classes.User) + class SubUser(User): pass canary = [] + def evt(x): canary.append(x) event.listen(User, "attribute_instrument", evt, propagate=False) mapper(SubUser, users) - instrumentation._instrumentation_factory.dispatch.attribute_instrument(5) + instrumentation._instrumentation_factory.dispatch.\ + attribute_instrument(5) eq_(canary, []) @@ -775,8 +842,10 @@ class LoadTest(_fixtures.FixtureTest): User = self.classes.User canary = [] + def load(target, ctx): canary.append("load") + def refresh(target, ctx, attrs): canary.append(("refresh", attrs)) @@ -818,20 +887,19 @@ class LoadTest(_fixtures.FixtureTest): class RemovalTest(_fixtures.FixtureTest): run_inserts = None - def test_attr_propagated(self): User = self.classes.User users, addresses, User = (self.tables.users, - self.tables.addresses, - self.classes.User) + self.tables.addresses, + self.classes.User) class AdminUser(User): pass mapper(User, users) mapper(AdminUser, addresses, inherits=User, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) fn = Mock() event.listen(User.name, "set", fn, propagate=True) @@ -901,7 +969,6 @@ class RemovalTest(_fixtures.FixtureTest): eq_(fn.mock_calls, [call(u1, "u1")]) - class RefreshTest(_fixtures.FixtureTest): run_inserts = None @@ -915,8 +982,10 @@ class RefreshTest(_fixtures.FixtureTest): User = self.classes.User canary = [] + def load(target, ctx): canary.append("load") + def refresh(target, ctx, attrs): canary.append(("refresh", attrs)) @@ -979,9 +1048,6 @@ class RefreshTest(_fixtures.FixtureTest): assert "name" not in attributes.instance_state(u1).committed_state assert u1 in sess.dirty - - - def test_repeated_rows(self): User = self.classes.User @@ -994,7 +1060,7 @@ class RefreshTest(_fixtures.FixtureTest): sess.commit() sess.query(User).union_all(sess.query(User)).all() - eq_(canary, [('refresh', set(['id','name']))]) + eq_(canary, [('refresh', set(['id', 'name']))]) def test_via_refresh_state(self): User = self.classes.User @@ -1008,7 +1074,7 @@ class RefreshTest(_fixtures.FixtureTest): sess.commit() u1.name - eq_(canary, [('refresh', set(['id','name']))]) + eq_(canary, [('refresh', set(['id', 'name']))]) def test_was_expired(self): User = self.classes.User @@ -1023,7 +1089,7 @@ class RefreshTest(_fixtures.FixtureTest): sess.expire(u1) sess.query(User).first() - eq_(canary, [('refresh', set(['id','name']))]) + eq_(canary, [('refresh', set(['id', 'name']))]) def test_was_expired_via_commit(self): User = self.classes.User @@ -1037,7 +1103,7 @@ class RefreshTest(_fixtures.FixtureTest): sess.commit() sess.query(User).first() - eq_(canary, [('refresh', set(['id','name']))]) + eq_(canary, [('refresh', set(['id', 'name']))]) def test_was_expired_attrs(self): User = self.classes.User @@ -1087,6 +1153,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def my_listener_one(*arg, **kw): pass + def my_listener_two(*arg, **kw): pass @@ -1110,7 +1177,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def my_listener_one(*arg, **kw): pass - scope = scoped_session(lambda:Session()) + scope = scoped_session(lambda: Session()) assert_raises_message( sa.exc.ArgumentError, @@ -1126,6 +1193,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): pass class NotASession(object): + def __call__(self): return Session() @@ -1151,6 +1219,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def _listener_fixture(self, **kw): canary = [] + def listener(name): def go(*arg, **kw): canary.append(name) @@ -1181,24 +1250,23 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_flush_autocommit_hook(self): User, users = self.classes.User, self.tables.users - mapper(User, users) - sess, canary = self._listener_fixture(autoflush=False, - autocommit=True, expire_on_commit=False) + sess, canary = self._listener_fixture( + autoflush=False, + autocommit=True, expire_on_commit=False) u = User(name='u1') sess.add(u) sess.flush() eq_( canary, - [ 'before_attach', 'after_attach', 'before_flush', - 'after_transaction_create', 'after_begin', - 'after_flush', 'after_flush_postexec', - 'before_commit', 'after_commit','after_transaction_end'] + ['before_attach', 'after_attach', 'before_flush', + 'after_transaction_create', 'after_begin', + 'after_flush', 'after_flush_postexec', + 'before_commit', 'after_commit', 'after_transaction_end'] ) - def test_rollback_hook(self): User, users = self.classes.User, self.tables.users sess, canary = self._listener_fixture() @@ -1217,15 +1285,17 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.rollback() eq_(canary, - ['before_attach', 'after_attach', 'before_commit', 'before_flush', - 'after_transaction_create', 'after_begin', 'after_flush', - 'after_flush_postexec', 'after_transaction_end', 'after_commit', - 'after_transaction_end', 'after_transaction_create', - 'before_attach', 'after_attach', 'before_commit', - 'before_flush', 'after_transaction_create', 'after_begin', 'after_rollback', - 'after_transaction_end', - 'after_soft_rollback', 'after_transaction_end','after_transaction_create', - 'after_soft_rollback']) + ['before_attach', 'after_attach', 'before_commit', 'before_flush', + 'after_transaction_create', 'after_begin', 'after_flush', + 'after_flush_postexec', 'after_transaction_end', 'after_commit', + 'after_transaction_end', 'after_transaction_create', + 'before_attach', 'after_attach', 'before_commit', + 'before_flush', 'after_transaction_create', 'after_begin', + 'after_rollback', + 'after_transaction_end', + 'after_soft_rollback', 'after_transaction_end', + 'after_transaction_create', + 'after_soft_rollback']) def test_can_use_session_in_outer_rollback_hook(self): User, users = self.classes.User, self.tables.users @@ -1234,6 +1304,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess = Session() assertions = [] + @event.listens_for(sess, "after_soft_rollback") def do_something(session, previous_transaction): if session.is_active: @@ -1253,7 +1324,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.rollback() eq_(assertions, [True, True]) - def test_flush_noautocommit_hook(self): User, users = self.classes.User, self.tables.users @@ -1265,9 +1335,9 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.add(u) sess.flush() eq_(canary, ['before_attach', 'after_attach', 'before_flush', - 'after_transaction_create', 'after_begin', - 'after_flush', 'after_flush_postexec', - 'after_transaction_end']) + 'after_transaction_create', 'after_begin', + 'after_flush', 'after_flush_postexec', + 'after_transaction_end']) def test_flush_in_commit_hook(self): User, users = self.classes.User, self.tables.users @@ -1282,11 +1352,12 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): u.name = 'ed' sess.commit() - eq_(canary, ['before_commit', 'before_flush', 'after_transaction_create', 'after_flush', - 'after_flush_postexec', - 'after_transaction_end', - 'after_commit', - 'after_transaction_end', 'after_transaction_create',]) + eq_(canary, ['before_commit', 'before_flush', + 'after_transaction_create', 'after_flush', + 'after_flush_postexec', + 'after_transaction_end', + 'after_commit', + 'after_transaction_end', 'after_transaction_create', ]) def test_state_before_attach(self): User, users = self.classes.User, self.tables.users @@ -1301,7 +1372,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): assert inst not in session.new mapper(User, users) - u= User(name='u1') + u = User(name='u1') sess.add(u) sess.flush() sess.expunge(u) @@ -1320,7 +1391,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): assert inst in session.new mapper(User, users) - u= User(name='u1') + u = User(name='u1') sess.add(u) sess.flush() sess.expunge(u) @@ -1330,8 +1401,8 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess, canary = self._listener_fixture() sess.commit() eq_(canary, ['before_commit', 'after_commit', - 'after_transaction_end', - 'after_transaction_create']) + 'after_transaction_end', + 'after_transaction_create']) def test_on_bulk_update_hook(self): User, users = self.classes.User, self.tables.users @@ -1369,7 +1440,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): [call(sess, upd.query, upd.context, upd.result)] ) - def test_on_bulk_delete_hook(self): User, users = self.classes.User, self.tables.users @@ -1408,13 +1478,13 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_connection_emits_after_begin(self): sess, canary = self._listener_fixture(bind=testing.db) - conn = sess.connection() + sess.connection() eq_(canary, ['after_begin']) + sess.close() def test_reentrant_flush(self): users, User = self.tables.users, self.classes.User - mapper(User, users) def before_flush(session, flush_context, objects): @@ -1429,7 +1499,6 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): def test_before_flush_affects_flush_plan(self): users, User = self.tables.users, self.classes.User - mapper(User, users) def before_flush(session, flush_context, objects): @@ -1438,8 +1507,8 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): session.add(User(name='another %s' % obj.name)) for obj in list(session.deleted): if isinstance(obj, User): - x = session.query(User).filter(User.name - == 'another %s' % obj.name).one() + x = session.query(User).filter( + User.name == 'another %s' % obj.name).one() session.delete(x) sess = Session() @@ -1452,7 +1521,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): [ User(name='another u1'), User(name='u1') - ] + ] ) sess.flush() @@ -1460,17 +1529,17 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): [ User(name='another u1'), User(name='u1') - ] + ] ) - u.name='u2' + u.name = 'u2' sess.flush() eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), User(name='another u2'), User(name='u2') - ] + ] ) sess.delete(u) @@ -1478,7 +1547,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): eq_(sess.query(User).order_by(User.name).all(), [ User(name='another u1'), - ] + ] ) def test_before_flush_affects_dirty(self): @@ -1498,7 +1567,7 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): sess.flush() eq_(sess.query(User).order_by(User.name).all(), [User(name='u1')] - ) + ) sess.add(User(name='u2')) sess.flush() @@ -1507,12 +1576,12 @@ class SessionEventsTest(_RemoveListeners, _fixtures.FixtureTest): [ User(name='u1 modified'), User(name='u2') - ] + ] ) - class MapperExtensionTest(_fixtures.FixtureTest): + """Superseded by MapperEventsTest - test backwards compatibility of MapperExtension.""" @@ -1522,15 +1591,18 @@ class MapperExtensionTest(_fixtures.FixtureTest): methods = [] class Ext(sa.orm.MapperExtension): + def instrument_class(self, mapper, cls): methods.append('instrument_class') return sa.orm.EXT_CONTINUE - def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): + def init_instance( + self, mapper, class_, oldinit, instance, args, kwargs): methods.append('init_instance') return sa.orm.EXT_CONTINUE - def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): + def init_failed( + self, mapper, class_, oldinit, instance, args, kwargs): methods.append('init_failed') return sa.orm.EXT_CONTINUE @@ -1591,8 +1663,8 @@ class MapperExtensionTest(_fixtures.FixtureTest): def test_inheritance(self): users, addresses, User = (self.tables.users, - self.tables.addresses, - self.classes.User) + self.tables.addresses, + self.classes.User) Ext, methods = self.extension() @@ -1601,7 +1673,7 @@ class MapperExtensionTest(_fixtures.FixtureTest): mapper(User, users, extension=Ext()) mapper(AdminUser, addresses, inherits=User, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) sess = create_session() am = AdminUser(name='au1', email_address='au1@e1') @@ -1627,17 +1699,17 @@ class MapperExtensionTest(_fixtures.FixtureTest): """ - keywords, items, item_keywords, Keyword, Item = (self.tables.keywords, - self.tables.items, - self.tables.item_keywords, - self.classes.Keyword, - self.classes.Item) - + keywords, items, item_keywords, Keyword, Item = ( + self.tables.keywords, + self.tables.items, + self.tables.item_keywords, + self.classes.Keyword, + self.classes.Item) Ext1, methods1 = self.extension() Ext2, methods2 = self.extension() - mapper(Item, items, extension=Ext1() , properties={ + mapper(Item, items, extension=Ext1(), properties={ 'keywords': relationship(Keyword, secondary=item_keywords)}) mapper(Keyword, keywords, extension=Ext2()) @@ -1649,10 +1721,10 @@ class MapperExtensionTest(_fixtures.FixtureTest): sess.flush() eq_(methods1, ['instrument_class', 'init_instance', - 'before_insert', 'after_insert']) + 'before_insert', 'after_insert']) eq_(methods2, ['instrument_class', 'init_instance', - 'before_insert', 'after_insert']) + 'before_insert', 'after_insert']) del methods1[:] del methods2[:] @@ -1661,13 +1733,12 @@ class MapperExtensionTest(_fixtures.FixtureTest): eq_(methods1, ['before_update', 'after_update']) eq_(methods2, []) - def test_inheritance_with_dupes(self): """Inheritance with the same extension instance on both mappers.""" users, addresses, User = (self.tables.users, - self.tables.addresses, - self.classes.User) + self.tables.addresses, + self.classes.User) Ext, methods = self.extension() @@ -1677,7 +1748,7 @@ class MapperExtensionTest(_fixtures.FixtureTest): ext = Ext() mapper(User, users, extension=ext) mapper(AdminUser, addresses, inherits=User, extension=ext, - properties={'address_id': addresses.c.id}) + properties={'address_id': addresses.c.id}) sess = create_session() am = AdminUser(name="au1", email_address="au1@e1") @@ -1697,11 +1768,11 @@ class MapperExtensionTest(_fixtures.FixtureTest): 'before_update', 'after_update', 'before_delete', 'after_delete']) - def test_unnecessary_methods_not_evented(self): users = self.tables.users class MyExtension(sa.orm.MapperExtension): + def before_insert(self, mapper, connection, instance): pass @@ -1714,15 +1785,16 @@ class MapperExtensionTest(_fixtures.FixtureTest): class AttributeExtensionTest(fixtures.MappedTest): + @classmethod def define_tables(cls, metadata): Table('t1', - metadata, - Column('id', Integer, primary_key=True), - Column('type', String(40)), - Column('data', String(50)) + metadata, + Column('id', Integer, primary_key=True), + Column('type', String(40)), + Column('data', String(50)) - ) + ) def test_cascading_extensions(self): t1 = self.tables.t1 @@ -1730,28 +1802,35 @@ class AttributeExtensionTest(fixtures.MappedTest): ext_msg = [] class Ex1(sa.orm.AttributeExtension): + def set(self, state, value, oldvalue, initiator): ext_msg.append("Ex1 %r" % value) return "ex1" + value class Ex2(sa.orm.AttributeExtension): + def set(self, state, value, oldvalue, initiator): ext_msg.append("Ex2 %r" % value) return "ex2" + value class A(fixtures.BasicEntity): pass + class B(A): pass + class C(B): pass - mapper(A, t1, polymorphic_on=t1.c.type, polymorphic_identity='a', properties={ - 'data':column_property(t1.c.data, extension=Ex1()) - }) + mapper( + A, t1, polymorphic_on=t1.c.type, polymorphic_identity='a', + properties={ + 'data': column_property(t1.c.data, extension=Ex1()) + } + ) mapper(B, polymorphic_identity='b', inherits=A) - mc = mapper(C, polymorphic_identity='c', inherits=B, properties={ - 'data':column_property(t1.c.data, extension=Ex2()) + mapper(C, polymorphic_identity='c', inherits=B, properties={ + 'data': column_property(t1.c.data, extension=Ex2()) }) a1 = A(data='a1') @@ -1763,15 +1842,14 @@ class AttributeExtensionTest(fixtures.MappedTest): eq_(c1.data, 'ex2c1') a1.data = 'a2' - b1.data='b2' + b1.data = 'b2' c1.data = 'c2' eq_(a1.data, 'ex1a2') eq_(b1.data, 'ex1b2') eq_(c1.data, 'ex2c2') eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'", - "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"]) - + "Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"]) class SessionExtensionTest(_fixtures.FixtureTest): @@ -1782,36 +1860,46 @@ class SessionExtensionTest(_fixtures.FixtureTest): mapper(User, users) log = [] + class MyExt(sa.orm.session.SessionExtension): + def before_commit(self, session): log.append('before_commit') + def after_commit(self, session): log.append('after_commit') + def after_rollback(self, session): log.append('after_rollback') + def before_flush(self, session, flush_context, objects): log.append('before_flush') + def after_flush(self, session, flush_context): log.append('after_flush') + def after_flush_postexec(self, session, flush_context): log.append('after_flush_postexec') + def after_begin(self, session, transaction, connection): log.append('after_begin') + def after_attach(self, session, instance): log.append('after_attach') + def after_bulk_update( self, session, query, query_context, result - ): + ): log.append('after_bulk_update') def after_bulk_delete( self, session, query, query_context, result - ): + ): log.append('after_bulk_delete') - sess = create_session(extension = MyExt()) + sess = create_session(extension=MyExt()) u = User(name='u1') sess.add(u) sess.flush() @@ -1823,7 +1911,7 @@ class SessionExtensionTest(_fixtures.FixtureTest): 'after_flush_postexec', 'before_commit', 'after_commit', - ] + ] log = [] sess = create_session(autocommit=False, extension=MyExt()) u = User(name='u1') @@ -1848,34 +1936,38 @@ class SessionExtensionTest(_fixtures.FixtureTest): log = [] sess = create_session(autocommit=False, extension=MyExt(), bind=testing.db) - conn = sess.connection() + sess.connection() assert log == ['after_begin'] + sess.close() def test_multiple_extensions(self): User, users = self.classes.User, self.tables.users log = [] + class MyExt1(sa.orm.session.SessionExtension): + def before_commit(self, session): log.append('before_commit_one') - class MyExt2(sa.orm.session.SessionExtension): + def before_commit(self, session): log.append('before_commit_two') mapper(User, users) - sess = create_session(extension = [MyExt1(), MyExt2()]) + sess = create_session(extension=[MyExt1(), MyExt2()]) u = User(name='u1') sess.add(u) sess.flush() assert log == [ 'before_commit_one', 'before_commit_two', - ] + ] def test_unnecessary_methods_not_evented(self): class MyExtension(sa.orm.session.SessionExtension): + def before_commit(self, session): pass @@ -1886,7 +1978,6 @@ class SessionExtensionTest(_fixtures.FixtureTest): class QueryEventsTest( _RemoveListeners, _fixtures.FixtureTest, AssertsCompiledSQL): - run_inserts = None __dialect__ = 'default' @classmethod @@ -1917,3 +2008,86 @@ class QueryEventsTest( checkparams={'id_2': 10, 'id_1': 7} ) + def test_alters_entities(self): + User = self.classes.User + + @event.listens_for(query.Query, "before_compile", retval=True) + def fn(query): + return query.add_columns(User.name) + + s = Session() + + q = s.query(User.id, ).filter_by(id=7) + self.assert_compile( + q, + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users " + "WHERE users.id = :id_1", + checkparams={'id_1': 7} + ) + eq_( + q.all(), + [(7, 'jack')] + ) + + +class RefreshFlushInReturningTest(fixtures.MappedTest): + """test [ticket:3427]. + + this is a rework of the test for [ticket:3167] stated + in test_unitofworkv2, which tests that returning doesn't trigger + attribute events; the test here is *reversed* so that we test that + it *does* trigger the new refresh_flush event. + + """ + + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + 'test', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('prefetch_val', Integer, default=5), + Column('returning_val', Integer, server_default="5") + ) + + @classmethod + def setup_classes(cls): + class Thing(cls.Basic): + pass + + @classmethod + def setup_mappers(cls): + Thing = cls.classes.Thing + + mapper(Thing, cls.tables.test, eager_defaults=True) + + def test_no_attr_events_flush(self): + Thing = self.classes.Thing + mock = Mock() + event.listen(Thing, "refresh_flush", mock) + t1 = Thing() + s = Session() + s.add(t1) + s.flush() + + if testing.requires.returning.enabled: + # ordering is deterministic in this test b.c. the routine + # appends the "returning" params before the "prefetch" + # ones. if there were more than one attribute in each category, + # then we'd have hash order issues. + eq_( + mock.mock_calls, + [call(t1, ANY, ['returning_val', 'prefetch_val'])] + ) + else: + eq_( + mock.mock_calls, + [call(t1, ANY, ['prefetch_val'])] + ) + + eq_(t1.id, 1) + eq_(t1.prefetch_val, 5) + eq_(t1.returning_val, 5) diff --git a/test/orm/test_joins.py b/test/orm/test_joins.py index 23d220dcc..540056dae 100644 --- a/test/orm/test_joins.py +++ b/test/orm/test_joins.py @@ -750,6 +750,17 @@ class JoinTest(QueryTest, AssertsCompiledSQL): filter_by(id=3).outerjoin('orders','address').filter_by(id=1).all() assert [User(id=7, name='jack')] == result + def test_raises_on_dupe_target_rel(self): + User = self.classes.User + + assert_raises_message( + sa.exc.SAWarning, + "Pathed join target Order.items has already been joined to; " + "skipping", + lambda: create_session().query(User).outerjoin('orders', 'items').\ + outerjoin('orders', 'items') + ) + def test_from_joinpoint(self): Item, User, Order = (self.classes.Item, self.classes.User, diff --git a/test/orm/test_lazy_relations.py b/test/orm/test_lazy_relations.py index e99e22725..ea39753b4 100644 --- a/test/orm/test_lazy_relations.py +++ b/test/orm/test_lazy_relations.py @@ -9,6 +9,7 @@ from sqlalchemy import Integer, String, ForeignKey, SmallInteger, Boolean from sqlalchemy.types import TypeDecorator from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column +from sqlalchemy import orm from sqlalchemy.orm import mapper, relationship, create_session, Session from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures @@ -559,7 +560,60 @@ class GetterStateTest(_fixtures.FixtureTest): run_inserts = None - def _u_ad_fixture(self, populate_user): + def _unhashable_fixture(self, metadata, load_on_pending=False): + class MyHashType(sa.TypeDecorator): + impl = sa.String(100) + + def process_bind_param(self, value, dialect): + return ";".join( + "%s=%s" % (k, v) + for k, v in + sorted(value.items(), key=lambda key: key[0])) + + def process_result_value(self, value, dialect): + return dict(elem.split("=", 1) for elem in value.split(";")) + + category = Table( + 'category', metadata, + Column('id', Integer, primary_key=True), + Column('data', MyHashType()) + ) + article = Table( + 'article', metadata, + Column('id', Integer, primary_key=True), + Column('data', MyHashType()) + ) + + class Category(fixtures.ComparableEntity): + pass + + class Article(fixtures.ComparableEntity): + pass + + mapper(Category, category) + mapper(Article, article, properties={ + "category": relationship( + Category, + primaryjoin=orm.foreign(article.c.data) == category.c.data, + load_on_pending=load_on_pending + ) + }) + + metadata.create_all() + sess = Session(autoflush=False) + data = {"im": "unhashable"} + a1 = Article(id=1, data=data) + c1 = Category(id=1, data=data) + if load_on_pending: + sess.add(c1) + else: + sess.add_all([c1, a1]) + sess.flush() + if load_on_pending: + sess.add(a1) + return Category, Article, sess, a1, c1 + + def _u_ad_fixture(self, populate_user, dont_use_get=False): users, Address, addresses, User = ( self.tables.users, self.classes.Address, @@ -567,9 +621,17 @@ class GetterStateTest(_fixtures.FixtureTest): self.classes.User) mapper(User, users, properties={ - 'addresses': relationship(Address, backref='user') + 'addresses': relationship(Address, back_populates='user') + }) + mapper(Address, addresses, properties={ + 'user': relationship( + User, + primaryjoin=and_( + users.c.id == addresses.c.user_id, users.c.id != 27) + if dont_use_get else None, + back_populates='addresses' + ) }) - mapper(Address, addresses) sess = create_session() a1 = Address(email_address='a1') @@ -581,6 +643,47 @@ class GetterStateTest(_fixtures.FixtureTest): sess.expire_all() return User, Address, sess, a1 + def test_no_use_get_params_missing(self): + User, Address, sess, a1 = self._u_ad_fixture(False, True) + + def go(): + eq_(a1.user, None) + + # doesn't emit SQL + self.assert_sql_count( + testing.db, + go, + 0 + ) + + @testing.provide_metadata + def test_no_use_get_params_not_hashable(self): + Category, Article, sess, a1, c1 = \ + self._unhashable_fixture(self.metadata) + + def go(): + eq_(a1.category, c1) + + self.assert_sql_count( + testing.db, + go, + 1 + ) + + @testing.provide_metadata + def test_no_use_get_params_not_hashable_on_pending(self): + Category, Article, sess, a1, c1 = \ + self._unhashable_fixture(self.metadata, load_on_pending=True) + + def go(): + eq_(a1.category, c1) + + self.assert_sql_count( + testing.db, + go, + 1 + ) + def test_get_empty_passive_return_never_set(self): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( diff --git a/test/orm/test_query.py b/test/orm/test_query.py index 4c909d6aa..55af023b1 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -69,10 +69,12 @@ class RowTupleTest(QueryTest): mapper(Address, addresses) sess = create_session() user_alias = aliased(User) + user_alias_id_label = user_alias.id.label('foo') address_alias = aliased(Address, name='aalias') fn = func.count(User.id) name_label = User.name.label('uname') bundle = Bundle('b1', User.id, User.name) + cte = sess.query(User.id).cte() for q, asserted in [ ( sess.query(User), @@ -104,6 +106,24 @@ class RowTupleTest(QueryTest): ] ), ( + sess.query(user_alias.id), + [ + { + 'name': 'id', 'type': users.c.id.type, + 'aliased': True, 'expr': user_alias.id, + 'entity': user_alias}, + ] + ), + ( + sess.query(user_alias_id_label), + [ + { + 'name': 'foo', 'type': users.c.id.type, + 'aliased': True, 'expr': user_alias_id_label, + 'entity': user_alias}, + ] + ), + ( sess.query(address_alias), [ { @@ -123,6 +143,26 @@ class RowTupleTest(QueryTest): ] ), ( + sess.query(cte), + [ + { + 'aliased': False, + 'expr': cte.c.id, 'type': cte.c.id.type, + 'name': 'id', 'entity': None + }] + ), + ( + sess.query(users), + [ + {'aliased': False, + 'expr': users.c.id, 'type': users.c.id.type, + 'name': 'id', 'entity': None}, + {'aliased': False, + 'expr': users.c.name, 'type': users.c.name.type, + 'name': 'name', 'entity': None} + ] + ), + ( sess.query(users.c.name), [{ "name": "name", "type": users.c.name.type, @@ -742,7 +782,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): __dialect__ = 'default' - def _test(self, clause, expected, entity=None): + def _test(self, clause, expected, entity=None, checkparams=None): dialect = default.DefaultDialect() if entity is not None: # specify a lead entity, so that when we are testing @@ -754,9 +794,11 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): lead = context.statement.compile(dialect=dialect) expected = (str(lead) + " WHERE " + expected).replace("\n", "") clause = sess.query(entity).filter(clause) - self.assert_compile(clause, expected) + self.assert_compile(clause, expected, checkparams=checkparams) - def _test_filter_aliases(self, clause, expected, from_, onclause): + def _test_filter_aliases( + self, + clause, expected, from_, onclause, checkparams=None): dialect = default.DefaultDialect() sess = Session() lead = sess.query(from_).join(onclause, aliased=True) @@ -766,7 +808,7 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): lead = context.statement.compile(dialect=dialect) expected = (str(lead) + " WHERE " + expected).replace("\n", "") - self.assert_compile(full, expected) + self.assert_compile(full, expected, checkparams=checkparams) def test_arithmetic(self): User = self.classes.User @@ -933,65 +975,126 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): def test_m2o_compare_instance(self): User, Address = self.classes.User, self.classes.Address - u7 = User(id=7) + u7 = User(id=5) attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7)) + u7.id = 7 self._test(Address.user == u7, ":param_1 = addresses.user_id") def test_m2o_compare_instance_negated(self): User, Address = self.classes.User, self.classes.Address - u7 = User(id=7) + u7 = User(id=5) attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7)) + u7.id = 7 self._test( Address.user != u7, - "addresses.user_id != :user_id_1 OR addresses.user_id IS NULL") + "addresses.user_id != :user_id_1 OR addresses.user_id IS NULL", + checkparams={'user_id_1': 7}) def test_m2o_compare_instance_orm_adapt(self): User, Address = self.classes.User, self.classes.Address - u7 = User(id=7) + u7 = User(id=5) attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7)) + u7.id = 7 self._test_filter_aliases( Address.user == u7, - ":param_1 = addresses_1.user_id", User, User.addresses + ":param_1 = addresses_1.user_id", User, User.addresses, + checkparams={'param_1': 7} ) + def test_m2o_compare_instance_negated_warn_on_none(self): + User, Address = self.classes.User, self.classes.Address + + u7_transient = User(id=None) + + with expect_warnings("Got None for value of column users.id; "): + self._test_filter_aliases( + Address.user != u7_transient, + "addresses_1.user_id != :user_id_1 " + "OR addresses_1.user_id IS NULL", + User, User.addresses, + checkparams={'user_id_1': None} + ) + def test_m2o_compare_instance_negated_orm_adapt(self): User, Address = self.classes.User, self.classes.Address - u7 = User(id=7) + u7 = User(id=5) attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7)) + u7.id = 7 + + u7_transient = User(id=7) self._test_filter_aliases( Address.user != u7, "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL", - User, User.addresses + User, User.addresses, + checkparams={'user_id_1': 7} ) self._test_filter_aliases( ~(Address.user == u7), ":param_1 != addresses_1.user_id", - User, User.addresses + User, User.addresses, + checkparams={'param_1': 7} ) self._test_filter_aliases( ~(Address.user != u7), "NOT (addresses_1.user_id != :user_id_1 " - "OR addresses_1.user_id IS NULL)", User, User.addresses + "OR addresses_1.user_id IS NULL)", User, User.addresses, + checkparams={'user_id_1': 7} + ) + + self._test_filter_aliases( + Address.user != u7_transient, + "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL", + User, User.addresses, + checkparams={'user_id_1': 7} + ) + + self._test_filter_aliases( + ~(Address.user == u7_transient), ":param_1 != addresses_1.user_id", + User, User.addresses, + checkparams={'param_1': 7} + ) + + self._test_filter_aliases( + ~(Address.user != u7_transient), + "NOT (addresses_1.user_id != :user_id_1 " + "OR addresses_1.user_id IS NULL)", User, User.addresses, + checkparams={'user_id_1': 7} ) def test_m2o_compare_instance_aliased(self): User, Address = self.classes.User, self.classes.Address - u7 = User(id=7) + u7 = User(id=5) attributes.instance_state(u7)._commit_all(attributes.instance_dict(u7)) + u7.id = 7 + + u7_transient = User(id=7) a1 = aliased(Address) self._test( a1.user == u7, - ":param_1 = addresses_1.user_id") + ":param_1 = addresses_1.user_id", + checkparams={'param_1': 7}) self._test( a1.user != u7, - "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL") + "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL", + checkparams={'user_id_1': 7}) + + a1 = aliased(Address) + self._test( + a1.user == u7_transient, + ":param_1 = addresses_1.user_id", + checkparams={'param_1': 7}) + + self._test( + a1.user != u7_transient, + "addresses_1.user_id != :user_id_1 OR addresses_1.user_id IS NULL", + checkparams={'user_id_1': 7}) def test_selfref_relationship(self): @@ -1004,7 +1107,8 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): Node.children.any(Node.data == 'n1'), "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE " "nodes.id = nodes_1.parent_id AND nodes_1.data = :data_1)", - entity=Node + entity=Node, + checkparams={'data_1': 'n1'} ) # needs autoaliasing @@ -1012,36 +1116,43 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): Node.children == None, "NOT (EXISTS (SELECT 1 FROM nodes AS nodes_1 " "WHERE nodes.id = nodes_1.parent_id))", - entity=Node + entity=Node, + checkparams={} ) self._test( Node.parent == None, - "nodes.parent_id IS NULL" + "nodes.parent_id IS NULL", + checkparams={} ) self._test( nalias.parent == None, - "nodes_1.parent_id IS NULL" + "nodes_1.parent_id IS NULL", + checkparams={} ) self._test( nalias.parent != None, - "nodes_1.parent_id IS NOT NULL" + "nodes_1.parent_id IS NOT NULL", + checkparams={} ) self._test( nalias.children == None, "NOT (EXISTS (" "SELECT 1 FROM nodes WHERE nodes_1.id = nodes.parent_id))", - entity=nalias + entity=nalias, + checkparams={} ) self._test( nalias.children.any(Node.data == 'some data'), "EXISTS (SELECT 1 FROM nodes WHERE " "nodes_1.id = nodes.parent_id AND nodes.data = :data_1)", - entity=nalias) + entity=nalias, + checkparams={'data_1': 'some data'} + ) # this fails because self-referential any() is auto-aliasing; # the fact that we use "nalias" here means we get two aliases. @@ -1056,33 +1167,48 @@ class OperatorTest(QueryTest, AssertsCompiledSQL): nalias.parent.has(Node.data == 'some data'), "EXISTS (SELECT 1 FROM nodes WHERE nodes.id = nodes_1.parent_id " "AND nodes.data = :data_1)", - entity=nalias + entity=nalias, + checkparams={'data_1': 'some data'} ) self._test( Node.parent.has(Node.data == 'some data'), "EXISTS (SELECT 1 FROM nodes AS nodes_1 WHERE " "nodes_1.id = nodes.parent_id AND nodes_1.data = :data_1)", - entity=Node + entity=Node, + checkparams={'data_1': 'some data'} ) self._test( Node.parent == Node(id=7), - ":param_1 = nodes.parent_id" + ":param_1 = nodes.parent_id", + checkparams={"param_1": 7} ) self._test( nalias.parent == Node(id=7), - ":param_1 = nodes_1.parent_id" + ":param_1 = nodes_1.parent_id", + checkparams={"param_1": 7} + ) + + self._test( + nalias.parent != Node(id=7), + 'nodes_1.parent_id != :parent_id_1 ' + 'OR nodes_1.parent_id IS NULL', + checkparams={"parent_id_1": 7} ) self._test( nalias.parent != Node(id=7), - 'nodes_1.parent_id != :parent_id_1 OR nodes_1.parent_id IS NULL' + 'nodes_1.parent_id != :parent_id_1 ' + 'OR nodes_1.parent_id IS NULL', + checkparams={"parent_id_1": 7} ) self._test( - nalias.children.contains(Node(id=7)), "nodes_1.id = :param_1" + nalias.children.contains(Node(id=7, parent_id=12)), + "nodes_1.id = :param_1", + checkparams={"param_1": 12} ) def test_multilevel_any(self): @@ -1592,6 +1718,25 @@ class ColumnPropertyTest(_fixtures.FixtureTest, AssertsCompiledSQL): ) +class ComparatorTest(QueryTest): + def test_clause_element_query_resolve(self): + from sqlalchemy.orm.properties import ColumnProperty + User = self.classes.User + + class Comparator(ColumnProperty.Comparator): + def __init__(self, expr): + self.expr = expr + + def __clause_element__(self): + return self.expr + + sess = Session() + eq_( + sess.query(Comparator(User.id)).order_by(Comparator(User.id)).all(), + [(7, ), (8, ), (9, ), (10, )] + ) + + # more slice tests are available in test/orm/generative.py class SliceTest(QueryTest): def test_first(self): @@ -2530,10 +2675,12 @@ class YieldTest(_fixtures.FixtureTest): User = self.classes.User sess = create_session() - q = sess.query(User).yield_per(1) + q = sess.query(User).yield_per(15) q = q.execution_options(foo='bar') assert q._yield_per - eq_(q._execution_options, {"stream_results": True, "foo": "bar"}) + eq_( + q._execution_options, + {"stream_results": True, "foo": "bar", "max_row_buffer": 15}) def test_no_joinedload_opt(self): self._eagerload_mappings() @@ -2774,44 +2921,143 @@ class TextTest(QueryTest, AssertsCompiledSQL): [User(id=7), User(id=8), User(id=9), User(id=10)] ) - def test_order_by_w_eager(self): + def test_order_by_w_eager_one(self): + User = self.classes.User + s = create_session() + + # from 1.0.0 thru 1.0.2, the "name" symbol here was considered + # to be part of the things we need to ORDER BY and it was being + # placed into the inner query's columns clause, as part of + # query._compound_eager_statement where we add unwrap_order_by() + # to the columns clause. However, as #3392 illustrates, unlocatable + # string expressions like "name desc" will only fail in this scenario, + # so in general the changing of the query structure with string labels + # is dangerous. + # + # the queries here are again "invalid" from a SQL perspective, as the + # "name" field isn't matched up to anything. + # + with expect_warnings("Can't resolve label reference 'name';"): + self.assert_compile( + s.query(User).options(joinedload("addresses")). + order_by(desc("name")).limit(1), + "SELECT anon_1.users_id AS anon_1_users_id, " + "anon_1.users_name AS anon_1_users_name, " + "addresses_1.id AS addresses_1_id, " + "addresses_1.user_id AS addresses_1_user_id, " + "addresses_1.email_address AS addresses_1_email_address " + "FROM (SELECT users.id AS users_id, users.name AS users_name " + "FROM users ORDER BY users.name " + "DESC LIMIT :param_1) AS anon_1 " + "LEFT OUTER JOIN addresses AS addresses_1 " + "ON anon_1.users_id = addresses_1.user_id " + "ORDER BY name DESC, addresses_1.id" + ) + + def test_order_by_w_eager_two(self): + User = self.classes.User + s = create_session() + + with expect_warnings("Can't resolve label reference 'name';"): + self.assert_compile( + s.query(User).options(joinedload("addresses")). + order_by("name").limit(1), + "SELECT anon_1.users_id AS anon_1_users_id, " + "anon_1.users_name AS anon_1_users_name, " + "addresses_1.id AS addresses_1_id, " + "addresses_1.user_id AS addresses_1_user_id, " + "addresses_1.email_address AS addresses_1_email_address " + "FROM (SELECT users.id AS users_id, users.name AS users_name " + "FROM users ORDER BY users.name " + "LIMIT :param_1) AS anon_1 " + "LEFT OUTER JOIN addresses AS addresses_1 " + "ON anon_1.users_id = addresses_1.user_id " + "ORDER BY name, addresses_1.id" + ) + + def test_order_by_w_eager_three(self): + User = self.classes.User + s = create_session() + + self.assert_compile( + s.query(User).options(joinedload("addresses")). + order_by("users_name").limit(1), + "SELECT anon_1.users_id AS anon_1_users_id, " + "anon_1.users_name AS anon_1_users_name, " + "addresses_1.id AS addresses_1_id, " + "addresses_1.user_id AS addresses_1_user_id, " + "addresses_1.email_address AS addresses_1_email_address " + "FROM (SELECT users.id AS users_id, users.name AS users_name " + "FROM users ORDER BY users.name " + "LIMIT :param_1) AS anon_1 " + "LEFT OUTER JOIN addresses AS addresses_1 " + "ON anon_1.users_id = addresses_1.user_id " + "ORDER BY anon_1.users_name, addresses_1.id" + ) + + # however! this works (again?) + eq_( + s.query(User).options(joinedload("addresses")). + order_by("users_name").first(), + User(name='chuck', addresses=[]) + ) + + def test_order_by_w_eager_four(self): User = self.classes.User Address = self.classes.Address s = create_session() - # here, we are seeing how Query has to take the order by expressions - # of the query and then add them to the columns list, so that the - # outer subquery can order by that same label. With the anonymous - # label, our column gets sucked up and restated again in the - # inner columns list! - # we could try to play games with making this "smarter" but it - # would add permanent overhead to Select._columns_plus_names, - # since that's where references would need to be resolved. - # so as it is, this query takes the _label_reference and makes a - # full blown proxy and all the rest of it. self.assert_compile( s.query(User).options(joinedload("addresses")). - order_by(desc("name")).limit(1), + order_by(desc("users_name")).limit(1), "SELECT anon_1.users_id AS anon_1_users_id, " "anon_1.users_name AS anon_1_users_name, " - "anon_1.anon_2 AS anon_1_anon_2, " "addresses_1.id AS addresses_1_id, " "addresses_1.user_id AS addresses_1_user_id, " "addresses_1.email_address AS addresses_1_email_address " - "FROM (SELECT users.id AS users_id, users.name AS users_name, " - "users.name AS anon_2 FROM users ORDER BY users.name " - "DESC LIMIT :param_1) AS anon_1 " + "FROM (SELECT users.id AS users_id, users.name AS users_name " + "FROM users ORDER BY users.name DESC " + "LIMIT :param_1) AS anon_1 " "LEFT OUTER JOIN addresses AS addresses_1 " "ON anon_1.users_id = addresses_1.user_id " - "ORDER BY anon_1.anon_2 DESC, addresses_1.id" + "ORDER BY anon_1.users_name DESC, addresses_1.id" ) + # however! this works (again?) eq_( s.query(User).options(joinedload("addresses")). - order_by(desc("name")).first(), + order_by(desc("users_name")).first(), User(name='jack', addresses=[Address()]) ) + def test_order_by_w_eager_five(self): + """essentially the same as test_eager_relations -> test_limit_3, + but test for textual label elements that are freeform. + this is again #3392.""" + + User = self.classes.User + Address = self.classes.Address + Order = self.classes.Order + + sess = create_session() + + q = sess.query(User, Address.email_address.label('email_address')) + + l = q.join('addresses').options(joinedload(User.orders)).\ + order_by( + "email_address desc").limit(1).offset(0) + with expect_warnings( + "Can't resolve label reference 'email_address desc'"): + eq_( + [ + (User( + id=7, + orders=[Order(id=1), Order(id=3), Order(id=5)], + addresses=[Address(id=1)] + ), 'jack@bean.com') + ], + l.all()) + class TextWarningTest(QueryTest, AssertsCompiledSQL): def _test(self, fn, arg, offending_clause, expected): @@ -2944,6 +3190,7 @@ class ParentTest(QueryTest, AssertsCompiledSQL): o.all() ) + def test_with_pending_autoflush(self): Order, User = self.classes.Order, self.classes.User @@ -3018,7 +3265,133 @@ class ParentTest(QueryTest, AssertsCompiledSQL): ) -class SynonymTest(QueryTest): +class WithTransientOnNone(_fixtures.FixtureTest, AssertsCompiledSQL): + run_inserts = None + __dialect__ = 'default' + + def _fixture1(self): + User, Address = self.classes.User, self.classes.Address + users, addresses = self.tables.users, self.tables.addresses + + mapper(User, users) + mapper(Address, addresses, properties={ + 'user': relationship(User), + 'special_user': relationship( + User, primaryjoin=and_( + users.c.id == addresses.c.user_id, + users.c.name == addresses.c.email_address)) + }) + + def test_filter_with_transient_assume_pk(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + sess = Session() + + q = sess.query(Address).filter(Address.user == User()) + with expect_warnings("Got None for value of column "): + self.assert_compile( + q, + "SELECT addresses.id AS addresses_id, " + "addresses.user_id AS addresses_user_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses WHERE :param_1 = addresses.user_id", + checkparams={'param_1': None} + ) + + def test_filter_with_transient_warn_for_none_against_non_pk(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + s = Session() + q = s.query(Address).filter(Address.special_user == User()) + with expect_warnings("Got None for value of column"): + + self.assert_compile( + q, + "SELECT addresses.id AS addresses_id, " + "addresses.user_id AS addresses_user_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses WHERE :param_1 = addresses.user_id " + "AND :param_2 = addresses.email_address", + checkparams={"param_1": None, "param_2": None} + ) + + def test_with_parent_with_transient_assume_pk(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + sess = Session() + + q = sess.query(User).with_parent(Address(), "user") + with expect_warnings("Got None for value of column"): + self.assert_compile( + q, + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id = :param_1", + checkparams={'param_1': None} + ) + + def test_with_parent_with_transient_warn_for_none_against_non_pk(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + s = Session() + q = s.query(User).with_parent(Address(), "special_user") + with expect_warnings("Got None for value of column"): + + self.assert_compile( + q, + "SELECT users.id AS users_id, users.name AS users_name " + "FROM users WHERE users.id = :param_1 " + "AND users.name = :param_2", + checkparams={"param_1": None, "param_2": None} + ) + + def test_negated_contains_or_equals_plain_m2o(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + s = Session() + q = s.query(Address).filter(Address.user != User()) + with expect_warnings("Got None for value of column"): + self.assert_compile( + q, + + "SELECT addresses.id AS addresses_id, " + "addresses.user_id AS addresses_user_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses " + "WHERE addresses.user_id != :user_id_1 " + "OR addresses.user_id IS NULL", + checkparams={'user_id_1': None} + ) + + def test_negated_contains_or_equals_complex_rel(self): + self._fixture1() + User, Address = self.classes.User, self.classes.Address + + s = Session() + + # this one does *not* warn because we do the criteria + # without deferral + q = s.query(Address).filter(Address.special_user != User()) + self.assert_compile( + q, + "SELECT addresses.id AS addresses_id, " + "addresses.user_id AS addresses_user_id, " + "addresses.email_address AS addresses_email_address " + "FROM addresses " + "WHERE NOT (EXISTS (SELECT 1 " + "FROM users " + "WHERE users.id = addresses.user_id AND " + "users.name = addresses.email_address AND users.id IS NULL))", + checkparams={} + ) + + +class SynonymTest(QueryTest, AssertsCompiledSQL): + __dialect__ = 'default' @classmethod def setup_mappers(cls): @@ -3138,6 +3511,20 @@ class SynonymTest(QueryTest): Order(description="order 1"), Order(description="order 3"), Order(description="order 5")] == o + def test_froms_aliased_col(self): + Address, User = self.classes.Address, self.classes.User + + sess = create_session() + ua = aliased(User) + + q = sess.query(ua.name_syn).join( + Address, ua.id == Address.user_id) + self.assert_compile( + q, + "SELECT users_1.name AS users_1_name FROM " + "users AS users_1 JOIN addresses ON users_1.id = addresses.user_id" + ) + class ImmediateTest(_fixtures.FixtureTest): run_inserts = 'once' diff --git a/test/orm/test_rel_fn.py b/test/orm/test_rel_fn.py index 230f3b18a..8f15c4c39 100644 --- a/test/orm/test_rel_fn.py +++ b/test/orm/test_rel_fn.py @@ -3,9 +3,9 @@ from sqlalchemy.testing import assert_raises_message, eq_, \ from sqlalchemy.testing import fixtures from sqlalchemy.orm import relationships, foreign, remote from sqlalchemy import MetaData, Table, Column, ForeignKey, Integer, \ - select, ForeignKeyConstraint, exc, func, and_, String + select, ForeignKeyConstraint, exc, func, and_, String, Boolean from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY - +from sqlalchemy.testing import mock class _JoinFixtures(object): @classmethod @@ -71,6 +71,7 @@ class _JoinFixtures(object): ) cls.base = Table('base', m, Column('id', Integer, primary_key=True), + Column('flag', Boolean) ) cls.sub = Table('sub', m, Column('id', Integer, ForeignKey('base.id'), @@ -504,6 +505,31 @@ class _JoinFixtures(object): foreign(remote(self.selfref.c.sid))) ) + def _join_fixture_inh_selfref_w_entity(self, **kw): + fake_logger = mock.Mock(info=lambda *arg, **kw: None) + prop = mock.Mock( + parent=mock.Mock(), + mapper=mock.Mock(), + logger=fake_logger + ) + local_selectable = self.base.join(self.sub) + remote_selectable = self.base.join(self.sub_w_sub_rel) + + sub_w_sub_rel__sub_id = self.sub_w_sub_rel.c.sub_id._annotate( + {'parentmapper': prop.mapper}) + sub__id = self.sub.c.id._annotate({'parentmapper': prop.parent}) + sub_w_sub_rel__flag = self.base.c.flag._annotate( + {"parentmapper": prop.mapper}) + return relationships.JoinCondition( + local_selectable, remote_selectable, + local_selectable, remote_selectable, + primaryjoin=and_( + sub_w_sub_rel__sub_id == sub__id, + sub_w_sub_rel__flag == True + ), + prop=prop + ) + def _assert_non_simple_warning(self, fn): assert_raises_message( exc.SAWarning, @@ -904,6 +930,17 @@ class ColumnCollectionsTest(_JoinFixtures, fixtures.TestBase, [(self.purely_single_col.c.path, self.purely_single_col.c.path)] ) + def test_determine_local_remote_pairs_inh_selfref_w_entities(self): + joincond = self._join_fixture_inh_selfref_w_entity() + eq_( + joincond.local_remote_pairs, + [(self.sub.c.id, self.sub_w_sub_rel.c.sub_id)] + ) + eq_( + joincond.remote_columns, + set([self.base.c.flag, self.sub_w_sub_rel.c.sub_id]) + ) + class DirectionTest(_JoinFixtures, fixtures.TestBase, AssertsCompiledSQL): def test_determine_direction_compound_2(self): joincond = self._join_fixture_compound_expression_2( diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index ae5a8ef60..5a47903f0 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -7,7 +7,8 @@ from sqlalchemy.orm import mapper as orm_mapper import sqlalchemy as sa from sqlalchemy.util import u, ue, b -from sqlalchemy import Integer, String, ForeignKey, literal_column, event +from sqlalchemy import Integer, String, ForeignKey, \ + literal_column, event, Boolean from sqlalchemy.testing import engines from sqlalchemy import testing from sqlalchemy.testing.schema import Table @@ -18,6 +19,7 @@ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy.testing.assertsql import AllOf, CompiledSQL + class UnitOfWorkTest(object): pass @@ -383,16 +385,26 @@ class ClauseAttributesTest(fixtures.MappedTest): Column('name', String(30)), Column('counter', Integer, default=1)) + Table('boolean_t', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('value', Boolean), + ) + @classmethod def setup_classes(cls): class User(cls.Comparable): pass + class HasBoolean(cls.Comparable): + pass + @classmethod def setup_mappers(cls): User, users_t = cls.classes.User, cls.tables.users_t - + HasBoolean, boolean_t = cls.classes.HasBoolean, cls.tables.boolean_t mapper(User, users_t) + mapper(HasBoolean, boolean_t) def test_update(self): User = self.classes.User @@ -446,6 +458,30 @@ class ClauseAttributesTest(fixtures.MappedTest): assert (u.counter == 5) is True + def test_update_special_comparator(self): + HasBoolean = self.classes.HasBoolean + + # make sure the comparison we're shooting + # for is invalid, otherwise we need to + # test something else here + assert_raises_message( + TypeError, + "Boolean value of this clause is not defined", + bool, None == sa.false() + ) + s = create_session() + hb = HasBoolean(value=None) + s.add(hb) + s.flush() + + hb.value = sa.false() + + s.flush() + + # needs to be refreshed + assert 'value' not in hb.__dict__ + eq_(hb.value, False) + class PassiveDeletesTest(fixtures.MappedTest): __requires__ = ('foreign_keys',) diff --git a/test/orm/test_unitofworkv2.py b/test/orm/test_unitofworkv2.py index cef71370d..9e9f400be 100644 --- a/test/orm/test_unitofworkv2.py +++ b/test/orm/test_unitofworkv2.py @@ -1800,7 +1800,13 @@ class LoadersUsingCommittedTest(UOWTest): class NoAttrEventInFlushTest(fixtures.MappedTest): - """test [ticket:3167]""" + """test [ticket:3167]. + + See also RefreshFlushInReturningTest in test/orm/test_events.py which + tests the positive case for the refresh_flush event, added in + [ticket:3427]. + + """ __backend__ = True @@ -1840,3 +1846,111 @@ class NoAttrEventInFlushTest(fixtures.MappedTest): eq_(t1.id, 1) eq_(t1.prefetch_val, 5) eq_(t1.returning_val, 5) + + +class TypeWoBoolTest(fixtures.MappedTest, testing.AssertsExecutionResults): + """test support for custom datatypes that return a non-__bool__ value + when compared via __eq__(), eg. ticket 3469""" + + @classmethod + def define_tables(cls, metadata): + from sqlalchemy import TypeDecorator + + class NoBool(object): + def __nonzero__(self): + raise NotImplementedError("not supported") + + class MyWidget(object): + def __init__(self, text): + self.text = text + + def __eq__(self, other): + return NoBool() + + cls.MyWidget = MyWidget + + class MyType(TypeDecorator): + impl = String(50) + + def process_bind_param(self, value, dialect): + if value is not None: + value = value.text + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = MyWidget(value) + return value + + Table( + 'test', metadata, + Column('id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('value', MyType), + Column('unrelated', String(50)) + ) + + @classmethod + def setup_classes(cls): + class Thing(cls.Basic): + pass + + @classmethod + def setup_mappers(cls): + Thing = cls.classes.Thing + + mapper(Thing, cls.tables.test) + + def test_update_against_none(self): + Thing = self.classes.Thing + + s = Session() + s.add(Thing(value=self.MyWidget("foo"))) + s.commit() + + t1 = s.query(Thing).first() + t1.value = None + s.commit() + + eq_( + s.query(Thing.value).scalar(), None + ) + + def test_update_against_something_else(self): + Thing = self.classes.Thing + + s = Session() + s.add(Thing(value=self.MyWidget("foo"))) + s.commit() + + t1 = s.query(Thing).first() + t1.value = self.MyWidget("bar") + s.commit() + + eq_( + s.query(Thing.value).scalar().text, "bar" + ) + + def test_no_update_no_change(self): + Thing = self.classes.Thing + + s = Session() + s.add(Thing(value=self.MyWidget("foo"), unrelated='unrelated')) + s.commit() + + t1 = s.query(Thing).first() + t1.unrelated = 'something else' + + self.assert_sql_execution( + testing.db, + s.commit, + CompiledSQL( + "UPDATE test SET unrelated=:unrelated " + "WHERE test.id = :test_id", + [{'test_id': 1, 'unrelated': 'something else'}] + ), + ) + + eq_( + s.query(Thing.value).scalar().text, "foo" + ) diff --git a/test/orm/test_update_delete.py b/test/orm/test_update_delete.py index dedc2133b..973053947 100644 --- a/test/orm/test_update_delete.py +++ b/test/orm/test_update_delete.py @@ -3,7 +3,7 @@ from sqlalchemy.testing import fixtures from sqlalchemy import Integer, String, ForeignKey, or_, exc, \ select, func, Boolean, case, text, column from sqlalchemy.orm import mapper, relationship, backref, Session, \ - joinedload, synonym + joinedload, synonym, query from sqlalchemy import testing from sqlalchemy.testing.schema import Table, Column @@ -907,6 +907,18 @@ class ExpressionUpdateTest(fixtures.MappedTest): eq_(d1.cnt, 2) sess.close() + def test_update_args(self): + Data = self.classes.Data + session = testing.mock.Mock(wraps=Session()) + update_args = {"mysql_limit": 1} + query.Query(Data, session).update({Data.cnt: Data.cnt + 1}, + update_args=update_args) + eq_(session.execute.call_count, 1) + args, kwargs = session.execute.call_args + eq_(len(args), 1) + update_stmt = args[0] + eq_(update_stmt.dialect_kwargs, update_args) + class InheritTest(fixtures.DeclarativeMappedTest): diff --git a/test/orm/test_utils.py b/test/orm/test_utils.py index ae225ad92..168cee19c 100644 --- a/test/orm/test_utils.py +++ b/test/orm/test_utils.py @@ -222,6 +222,56 @@ class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL): "WHERE point_1.x > point.x" ) + def test_parententity_vs_parentmapper(self): + class Point(object): + pass + + self._fixture(Point, properties={ + 'x_syn': synonym("x") + }) + pa = aliased(Point) + + is_(Point.x_syn._parententity, inspect(Point)) + is_(Point.x._parententity, inspect(Point)) + is_(Point.x_syn._parentmapper, inspect(Point)) + is_(Point.x._parentmapper, inspect(Point)) + + is_( + Point.x_syn.__clause_element__()._annotations['parententity'], + inspect(Point)) + is_( + Point.x.__clause_element__()._annotations['parententity'], + inspect(Point)) + is_( + Point.x_syn.__clause_element__()._annotations['parentmapper'], + inspect(Point)) + is_( + Point.x.__clause_element__()._annotations['parentmapper'], + inspect(Point)) + + pa = aliased(Point) + + is_(pa.x_syn._parententity, inspect(pa)) + is_(pa.x._parententity, inspect(pa)) + is_(pa.x_syn._parentmapper, inspect(Point)) + is_(pa.x._parentmapper, inspect(Point)) + + is_( + pa.x_syn.__clause_element__()._annotations['parententity'], + inspect(pa) + ) + is_( + pa.x.__clause_element__()._annotations['parententity'], + inspect(pa) + ) + is_( + pa.x_syn.__clause_element__()._annotations['parentmapper'], + inspect(Point)) + is_( + pa.x.__clause_element__()._annotations['parentmapper'], + inspect(Point)) + + class IdentityKeyTest(_fixtures.FixtureTest): run_inserts = None diff --git a/test/orm/test_versioning.py b/test/orm/test_versioning.py index 8348cb588..d46799c5a 100644 --- a/test/orm/test_versioning.py +++ b/test/orm/test_versioning.py @@ -355,6 +355,97 @@ class VersioningTest(fixtures.MappedTest): ) +class NoBumpOnRelationshipTest(fixtures.MappedTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + 'a', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('version_id', Integer) + ) + Table( + 'b', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('a_id', ForeignKey('a.id')) + ) + + @classmethod + def setup_classes(cls): + class A(cls.Basic): + pass + + class B(cls.Basic): + pass + + def _run_test(self, auto_version_counter=True): + A, B = self.classes('A', 'B') + s = Session() + if auto_version_counter: + a1 = A() + else: + a1 = A(version_id=1) + s.add(a1) + s.commit() + eq_(a1.version_id, 1) + + b1 = B() + b1.a = a1 + s.add(b1) + s.commit() + + eq_(a1.version_id, 1) + + def test_plain_counter(self): + A, B = self.classes('A', 'B') + a, b = self.tables('a', 'b') + + mapper( + A, a, properties={ + 'bs': relationship(B, backref='a') + }, + version_id_col=a.c.version_id, + ) + mapper(B, b) + + self._run_test() + + def test_functional_counter(self): + A, B = self.classes('A', 'B') + a, b = self.tables('a', 'b') + + mapper( + A, a, properties={ + 'bs': relationship(B, backref='a') + }, + version_id_col=a.c.version_id, + version_id_generator=lambda num: (num or 0) + 1 + ) + mapper(B, b) + + self._run_test() + + def test_no_counter(self): + A, B = self.classes('A', 'B') + a, b = self.tables('a', 'b') + + mapper( + A, a, properties={ + 'bs': relationship(B, backref='a') + }, + version_id_col=a.c.version_id, + version_id_generator=False + ) + mapper(B, b) + + self._run_test(False) + + class ColumnTypeTest(fixtures.MappedTest): __backend__ = True @@ -587,6 +678,53 @@ class AlternateGeneratorTest(fixtures.MappedTest): sess2.commit +class PlainInheritanceTest(fixtures.MappedTest): + __backend__ = True + + @classmethod + def define_tables(cls, metadata): + Table( + 'base', metadata, + Column( + 'id', Integer, primary_key=True, + test_needs_autoincrement=True), + Column('version_id', Integer, nullable=True), + Column('data', String(50)) + ) + Table( + 'sub', metadata, + Column('id', Integer, ForeignKey('base.id'), primary_key=True), + Column('sub_data', String(50)) + ) + + @classmethod + def setup_classes(cls): + + class Base(cls.Basic): + pass + + class Sub(Base): + pass + + def test_update_child_table_only(self): + Base, sub, base, Sub = ( + self.classes.Base, self.tables.sub, self.tables.base, + self.classes.Sub) + + mapper(Base, base, version_id_col=base.c.version_id) + mapper(Sub, sub, inherits=Base) + + s = Session() + s1 = Sub(data='b', sub_data='s') + s.add(s1) + s.commit() + + s1.sub_data = 's2' + s.commit() + + eq_(s1.version_id, 2) + + class InheritanceTwoVersionIdsTest(fixtures.MappedTest): """Test versioning where both parent/child table have a versioning column. diff --git a/test/profiles.txt b/test/profiles.txt index 24d3b9f76..691d1a54d 100644 --- a/test/profiles.txt +++ b/test/profiles.txt @@ -13,6 +13,7 @@ # TEST: test.aaa_profiling.test_compiler.CompileTest.test_insert +test.aaa_profiling.test_compiler.CompileTest.test_insert 2.6_sqlite_pysqlite_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_cextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_mysql_mysqldb_nocextensions 74 test.aaa_profiling.test_compiler.CompileTest.test_insert 2.7_postgresql_psycopg2_cextensions 74 @@ -34,6 +35,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_insert 3.4_sqlite_pysqlite_noc # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select +test.aaa_profiling.test_compiler.CompileTest.test_select 2.6_sqlite_pysqlite_nocextensions 157 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_cextensions 153 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_mysql_mysqldb_nocextensions 153 test.aaa_profiling.test_compiler.CompileTest.test_select 2.7_postgresql_psycopg2_cextensions 153 @@ -46,15 +48,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_postgresql_psycopg2_nocextensions 166 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_cextensions 166 test.aaa_profiling.test_compiler.CompileTest.test_select 3.3_sqlite_pysqlite_nocextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_cextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_nocextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 166 -test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_nocextensions 166 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_cextensions 170 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_mysql_pymysql_nocextensions 170 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_cextensions 170 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_postgresql_psycopg2_nocextensions 170 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_cextensions 170 +test.aaa_profiling.test_compiler.CompileTest.test_select 3.4_sqlite_pysqlite_nocextensions 170 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_select_labels +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.6_sqlite_pysqlite_nocextensions 190 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_cextensions 188 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_mysql_mysqldb_nocextensions 188 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 2.7_postgresql_psycopg2_cextensions 188 @@ -67,15 +70,16 @@ test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_p test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_postgresql_psycopg2_nocextensions 201 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_cextensions 201 test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.3_sqlite_pysqlite_nocextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_cextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_nocextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 201 -test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_nocextensions 201 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_cextensions 203 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_mysql_pymysql_nocextensions 203 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_cextensions 203 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_postgresql_psycopg2_nocextensions 203 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_cextensions 203 +test.aaa_profiling.test_compiler.CompileTest.test_select_labels 3.4_sqlite_pysqlite_nocextensions 203 # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update +test.aaa_profiling.test_compiler.CompileTest.test_update 2.6_sqlite_pysqlite_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_cextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_mysql_mysqldb_nocextensions 77 test.aaa_profiling.test_compiler.CompileTest.test_update 2.7_postgresql_psycopg2_cextensions 77 @@ -97,6 +101,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update 3.4_sqlite_pysqlite_noc # TEST: test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause +test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.6_sqlite_pysqlite_nocextensions 146 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_cextensions 146 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_mysql_mysqldb_nocextensions 146 test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 2.7_postgresql_psycopg2_cextensions 146 @@ -118,6 +123,7 @@ test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause 3.4_sqlite_ # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set +test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.6_sqlite_pysqlite_nocextensions 4262 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_cextensions 4262 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_mysql_mysqldb_nocextensions 4262 test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 2.7_postgresql_psycopg2_cextensions 4262 @@ -139,6 +145,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_attribute_set 3.4_sqlite_ # TEST: test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove +test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.6_sqlite_pysqlite_nocextensions 6426 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_cextensions 6426 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_mysql_mysqldb_nocextensions 6426 test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove 2.7_postgresql_psycopg2_cextensions 6426 @@ -160,6 +167,7 @@ test.aaa_profiling.test_orm.AttributeOverheadTest.test_collection_append_remove # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.6_sqlite_pysqlite_nocextensions 26358 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_cextensions 16194 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_mysql_mysqldb_nocextensions 25197 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 2.7_postgresql_psycopg2_cextensions 28177 @@ -172,15 +180,16 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycop test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_postgresql_psycopg2_nocextensions 26194 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_cextensions 17361 test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.3_sqlite_pysqlite_nocextensions 26364 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_cextensions 129933 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_nocextensions 138936 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 17191 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 26194 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 17361 -test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 26364 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_cextensions 83733 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_mysql_pymysql_nocextensions 92736 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_cextensions 18221 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_postgresql_psycopg2_nocextensions 27224 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_cextensions 18393 +test.aaa_profiling.test_orm.DeferOptionsTest.test_baseline 3.4_sqlite_pysqlite_nocextensions 27396 # TEST: test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.6_sqlite_pysqlite_nocextensions 26282 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_cextensions 22212 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_mysql_mysqldb_nocextensions 25215 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 2.7_postgresql_psycopg2_cextensions 22183 @@ -193,15 +202,16 @@ test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_postgresql_psycopg2_nocextensions 26208 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_cextensions 23309 test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.3_sqlite_pysqlite_nocextensions 26312 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_cextensions 51393 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_nocextensions 54396 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 23205 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 26208 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 23309 -test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 26312 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_cextensions 47353 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_mysql_pymysql_nocextensions 50356 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_cextensions 24215 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_postgresql_psycopg2_nocextensions 27218 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_cextensions 24321 +test.aaa_profiling.test_orm.DeferOptionsTest.test_defer_many_cols 3.4_sqlite_pysqlite_nocextensions 27324 # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.6_sqlite_pysqlite_nocextensions 17988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_cextensions 17988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_mysql_mysqldb_nocextensions 17988 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_identity 2.7_postgresql_psycopg2_cextensions 17988 @@ -223,6 +233,7 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ # TEST: test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.6_sqlite_pysqlite_nocextensions 161101 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_cextensions 127101 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_mysql_mysqldb_nocextensions 128851 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 2.7_postgresql_psycopg2_cextensions 120101 @@ -235,15 +246,16 @@ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_ test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_postgresql_psycopg2_nocextensions 127306 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_cextensions 165355 test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.3_sqlite_pysqlite_nocextensions 167105 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_cextensions 206806 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_nocextensions 208556 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 125605 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 127306 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 165355 -test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 167105 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_cextensions 187056 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_mysql_pymysql_nocextensions 188855 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_cextensions 128556 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_postgresql_psycopg2_nocextensions 130306 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_cextensions 168806 +test.aaa_profiling.test_orm.LoadManyToOneFromIdentityTest.test_many_to_one_load_no_identity 3.4_sqlite_pysqlite_nocextensions 170556 # TEST: test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.6_sqlite_pysqlite_nocextensions 21505 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_cextensions 19393 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_mysql_mysqldb_nocextensions 19597 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 2.7_postgresql_psycopg2_cextensions 18881 @@ -256,15 +268,16 @@ test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3. test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_postgresql_psycopg2_nocextensions 19644 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_cextensions 22066 test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.3_sqlite_pysqlite_nocextensions 22221 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_cextensions 25092 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_nocextensions 25296 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19428 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19632 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22078 -test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22233 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_cextensions 23716 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_mysql_pymysql_nocextensions 23871 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_cextensions 19552 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_postgresql_psycopg2_nocextensions 19744 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_cextensions 22051 +test.aaa_profiling.test_orm.MergeBackrefsTest.test_merge_pending_with_all_pks 3.4_sqlite_pysqlite_nocextensions 22255 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_load +test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.6_sqlite_pysqlite_nocextensions 1520 test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_cextensions 1400 test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_mysql_mysqldb_nocextensions 1415 test.aaa_profiling.test_orm.MergeTest.test_merge_load 2.7_postgresql_psycopg2_cextensions 1319 @@ -277,15 +290,16 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_ce test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_postgresql_psycopg2_nocextensions 1365 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_cextensions 1594 test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.3_sqlite_pysqlite_nocextensions 1609 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_cextensions 2289 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_nocextensions 2304 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1350 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1365 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1594 -test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1609 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_cextensions 2038 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_mysql_pymysql_nocextensions 2053 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_cextensions 1335 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_postgresql_psycopg2_nocextensions 1350 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_cextensions 1577 +test.aaa_profiling.test_orm.MergeTest.test_merge_load 3.4_sqlite_pysqlite_nocextensions 1592 # TEST: test.aaa_profiling.test_orm.MergeTest.test_merge_no_load +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.6_sqlite_pysqlite_nocextensions 89,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_cextensions 93,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_mysql_mysqldb_nocextensions 93,19 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 2.7_postgresql_psycopg2_cextensions 93,19 @@ -298,15 +312,16 @@ test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_postgresql_psycopg2_nocextensions 96,20 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_cextensions 96,20 test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.3_sqlite_pysqlite_nocextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_cextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_nocextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 96,20 -test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 96,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_cextensions 92,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_mysql_pymysql_nocextensions 92,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_cextensions 92,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_postgresql_psycopg2_nocextensions 92,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_cextensions 92,20 +test.aaa_profiling.test_orm.MergeTest.test_merge_no_load 3.4_sqlite_pysqlite_nocextensions 92,20 # TEST: test.aaa_profiling.test_orm.QueryTest.test_query_cols +test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.6_sqlite_pysqlite_nocextensions 8064 test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_cextensions 6220 test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_mysql_mysqldb_nocextensions 6750 test.aaa_profiling.test_orm.QueryTest.test_query_cols 2.7_postgresql_psycopg2_cextensions 6790 @@ -319,15 +334,16 @@ test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_postgresql_psycopg2_ce test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_postgresql_psycopg2_nocextensions 6864 test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_cextensions 8016 test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.3_sqlite_pysqlite_nocextensions 8546 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_cextensions 18304 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_nocextensions 18834 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_cextensions 6334 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6864 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_cextensions 8016 -test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocextensions 8546 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_cextensions 13744 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_mysql_pymysql_nocextensions 14274 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_cextensions 6234 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_postgresql_psycopg2_nocextensions 6674 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_cextensions 7846 +test.aaa_profiling.test_orm.QueryTest.test_query_cols 3.4_sqlite_pysqlite_nocextensions 8376 # TEST: test.aaa_profiling.test_orm.SessionTest.test_expire_lots +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.6_sqlite_pysqlite_nocextensions 1156 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_cextensions 1145 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_mysql_mysqldb_nocextensions 1148 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 2.7_postgresql_psycopg2_cextensions 1160 @@ -340,15 +356,16 @@ test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_postgresql_psycopg2_nocextensions 1264 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_cextensions 1264 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.3_sqlite_pysqlite_nocextensions 1255 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_cextensions 1262 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_nocextensions 1253 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1259 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1251 -test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1261 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_cextensions 1254 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_mysql_pymysql_nocextensions 1280 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_cextensions 1247 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_postgresql_psycopg2_nocextensions 1262 +test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_cextensions 1238 test.aaa_profiling.test_orm.SessionTest.test_expire_lots 3.4_sqlite_pysqlite_nocextensions 1272 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.6_sqlite_pysqlite_nocextensions 97 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_cextensions 95 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_mysql_mysqldb_nocextensions 95 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 2.7_postgresql_psycopg2_cextensions 95 @@ -361,15 +378,16 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psy test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_postgresql_psycopg2_nocextensions 82 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_cextensions 82 test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.3_sqlite_pysqlite_nocextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_cextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_nocextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_cextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_nocextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_cextensions 82 -test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_nocextensions 82 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_cextensions 83 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_mysql_pymysql_nocextensions 83 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_cextensions 83 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_postgresql_psycopg2_nocextensions 83 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_cextensions 83 +test.aaa_profiling.test_pool.QueuePoolTest.test_first_connect 3.4_sqlite_pysqlite_nocextensions 83 # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect +test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.6_sqlite_pysqlite_nocextensions 31 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_cextensions 31 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_mysql_mysqldb_nocextensions 31 test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 2.7_postgresql_psycopg2_cextensions 31 @@ -391,6 +409,7 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_connect 3.4_sqlite_pysqli # TEST: test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect +test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.6_sqlite_pysqlite_nocextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_cextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_mysql_mysqldb_nocextensions 8 test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 2.7_postgresql_psycopg2_cextensions 8 @@ -412,6 +431,7 @@ test.aaa_profiling.test_pool.QueuePoolTest.test_second_samethread_connect 3.4_sq # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.6_sqlite_pysqlite_nocextensions 45 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_cextensions 43 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_mysql_mysqldb_nocextensions 45 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute 2.7_postgresql_psycopg2_cextensions 43 @@ -433,6 +453,7 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_connection_execute # TEST: test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute +test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.6_sqlite_pysqlite_nocextensions 84 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_cextensions 82 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_mysql_mysqldb_nocextensions 84 test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 2.7_postgresql_psycopg2_cextensions 82 @@ -454,6 +475,7 @@ test.aaa_profiling.test_resultset.ExecutionTest.test_minimal_engine_execute 3.4_ # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile +test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.6_sqlite_pysqlite_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_cextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_mysql_mysqldb_nocextensions 15 test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 2.7_postgresql_psycopg2_cextensions 15 @@ -475,6 +497,7 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_contains_doesnt_compile 3.4 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_string +test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.6_sqlite_pysqlite_nocextensions 15439 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_cextensions 488 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_mysql_mysqldb_nocextensions 15488 test.aaa_profiling.test_resultset.ResultSetTest.test_string 2.7_postgresql_psycopg2_cextensions 20477 @@ -487,15 +510,16 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psyco test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_postgresql_psycopg2_nocextensions 14481 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_cextensions 440 test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.3_sqlite_pysqlite_nocextensions 14440 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_cextensions 159566 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_nocextensions 173566 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 481 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14481 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 440 -test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_nocextensions 14440 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_cextensions 87259 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_mysql_pymysql_nocextensions 101259 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_cextensions 501 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_postgresql_psycopg2_nocextensions 14501 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_cextensions 460 +test.aaa_profiling.test_resultset.ResultSetTest.test_string 3.4_sqlite_pysqlite_nocextensions 14460 # TEST: test.aaa_profiling.test_resultset.ResultSetTest.test_unicode +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.6_sqlite_pysqlite_nocextensions 15439 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_cextensions 488 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_mysql_mysqldb_nocextensions 45488 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 2.7_postgresql_psycopg2_cextensions 20477 @@ -508,12 +532,12 @@ test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psyc test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_postgresql_psycopg2_nocextensions 14481 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_cextensions 440 test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.3_sqlite_pysqlite_nocextensions 14440 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_cextensions 159566 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_nocextensions 173566 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 481 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14481 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 440 -test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_nocextensions 14440 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_cextensions 87259 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_mysql_pymysql_nocextensions 101259 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_cextensions 501 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_postgresql_psycopg2_nocextensions 14501 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_cextensions 460 +test.aaa_profiling.test_resultset.ResultSetTest.test_unicode 3.4_sqlite_pysqlite_nocextensions 14460 # TEST: test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation @@ -521,8 +545,8 @@ test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psyco test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 5833,295,3681,12720,1241,1980,2655 test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 5591,277,3569,11458,1134,1924,2489 test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 5613,277,3665,12630,1228,1931,2681 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5591,277,3569,11458,1134,1924,2489 -test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5613,277,3665,12630,1228,1931,2681 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 5619,277,3705,11902,1144,1966,2532 +test.aaa_profiling.test_zoomark.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 5624,277,3801,13074,1238,1970,2724 # TEST: test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation @@ -530,5 +554,5 @@ test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_p test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 2.7_postgresql_psycopg2_nocextensions 6341,407,6703,18167,1244,2598 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_cextensions 6228,393,6747,17582,1148,2623 test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.3_postgresql_psycopg2_nocextensions 6318,398,6851,18609,1234,2652 -test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6228,393,6747,17582,1148,2623 -test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6313,398,6851,18609,1234,2652 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_cextensions 6257,393,6891,18056,1159,2671 +test.aaa_profiling.test_zoomark_orm.ZooMarkTest.test_invocation 3.4_postgresql_psycopg2_nocextensions 6341,398,6995,19083,1245,2700 diff --git a/test/requirements.py b/test/requirements.py index 3ed6bea4d..db4daca20 100644 --- a/test/requirements.py +++ b/test/requirements.py @@ -130,7 +130,7 @@ class DefaultRequirements(SuiteRequirements): def temporary_tables(self): """target database supports temporary tables""" return skip_if( - ["mssql"], "sql server has some other syntax?" + ["mssql", "firebird"], "not supported (?)" ) @property @@ -669,6 +669,10 @@ class DefaultRequirements(SuiteRequirements): ) @property + def duplicate_key_raises_integrity_error(self): + return fails_on("postgresql+pg8000") + + @property def python2(self): return skip_if( lambda: sys.version_info >= (3,), @@ -723,12 +727,12 @@ class DefaultRequirements(SuiteRequirements): @property def range_types(self): def check_range_types(config): - if not against(config, "postgresql+psycopg2"): + if not against( + config, + ["postgresql+psycopg2", "postgresql+psycopg2cffi"]): return False try: - config.db.execute("select '[1,2)'::int4range;") - # only supported in psycopg 2.5+ - from psycopg2.extras import NumericRange + config.db.scalar("select '[1,2)'::int4range;") return True except: return False @@ -761,6 +765,27 @@ class DefaultRequirements(SuiteRequirements): ) @property + def psycopg2_native_json(self): + return self.psycopg2_compatibility + + @property + def psycopg2_native_hstore(self): + return self.psycopg2_compatibility + + @property + def psycopg2_compatibility(self): + return only_on( + ["postgresql+psycopg2", "postgresql+psycopg2cffi"] + ) + + @property + def psycopg2_or_pg8000_compatibility(self): + return only_on( + ["postgresql+psycopg2", "postgresql+psycopg2cffi", + "postgresql+pg8000"] + ) + + @property def percent_schema_names(self): return skip_if( [ @@ -807,11 +832,14 @@ class DefaultRequirements(SuiteRequirements): ) @property + def no_mssql_freetds(self): + return self.mssql_freetds.not_() + + @property def selectone(self): """target driver must support the literal statement 'select 1'""" return skip_if(["oracle", "firebird"], "non-standard SELECT scalar syntax") - @property def mysql_fully_case_sensitive(self): return only_if(self._has_mysql_fully_case_sensitive) diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py index 4b143c150..04e3171a9 100644 --- a/test/sql/test_compiler.py +++ b/test/sql/test_compiler.py @@ -260,16 +260,16 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): class MyCompiler(compiler.SQLCompiler): - def get_select_precolumns(self, select): + def get_select_precolumns(self, select, **kw): result = "" if select._limit: result += "FIRST %s " % self.process( literal( - select._limit)) + select._limit), **kw) if select._offset: result += "SKIP %s " % self.process( literal( - select._offset)) + select._offset), **kw) return result def limit_clause(self, select, **kw): @@ -380,7 +380,7 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): # this is native_boolean=False for default dialect self.assert_compile( select([not_(True)], use_labels=True), - "SELECT :param_1 = 0" + "SELECT :param_1 = 0 AS anon_1" ) self.assert_compile( @@ -561,13 +561,13 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): self.assert_compile(exists([table1.c.myid], table1.c.myid == 5).select(), 'SELECT EXISTS (SELECT mytable.myid FROM ' - 'mytable WHERE mytable.myid = :myid_1)', + 'mytable WHERE mytable.myid = :myid_1) AS anon_1', params={'mytable_myid': 5}) self.assert_compile(select([table1, exists([1], from_obj=table2)]), 'SELECT mytable.myid, mytable.name, ' 'mytable.description, EXISTS (SELECT 1 ' - 'FROM myothertable) FROM mytable', + 'FROM myothertable) AS anon_1 FROM mytable', params={}) self.assert_compile(select([table1, exists([1], @@ -961,6 +961,19 @@ class SelectTest(fixtures.TestBase, AssertsCompiledSQL): dialect=dialect ) + def test_no_group_by_labels(self): + lab1 = (table1.c.myid + 12).label('foo') + lab2 = func.somefunc(table1.c.name).label('bar') + dialect = default.DefaultDialect() + + self.assert_compile( + select([lab1, lab2]).group_by(lab1, lab2), + "SELECT mytable.myid + :myid_1 AS foo, somefunc(mytable.name) " + "AS bar FROM mytable GROUP BY mytable.myid + :myid_1, " + "somefunc(mytable.name)", + dialect=dialect + ) + def test_conjunctions(self): a, b, c = text('a'), text('b'), text('c') x = and_(a, b, c) @@ -3523,7 +3536,3 @@ class ResultMapTest(fixtures.TestBase): (table1.c.description, 'description', 'description'), table1.c.description.type)} ) - - - - diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index d024e1a27..3e8021ebe 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -8,8 +8,9 @@ from sqlalchemy.testing import fixtures, AssertsExecutionResults, \ from sqlalchemy import testing from sqlalchemy.engine import default from sqlalchemy.testing import engines +from sqlalchemy.testing.assertions import expect_warnings from sqlalchemy.testing import eq_ -from sqlalchemy.testing.assertsql import AllOf, RegexSQL, CompiledSQL +from sqlalchemy.testing.assertsql import AllOf, RegexSQL, CompiledSQL, DialectSQL from sqlalchemy.sql import table, column @@ -84,9 +85,11 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): metadata.drop_all, testing.db ) else: - - with self.sql_execution_asserter() as asserter: - metadata.drop_all(testing.db, checkfirst=False) + with expect_warnings( + "Can't sort tables for DROP; an unresolvable " + "foreign key dependency "): + with self.sql_execution_asserter() as asserter: + metadata.drop_all(testing.db, checkfirst=False) asserter.assert_( AllOf( @@ -109,10 +112,11 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): Column('id', Integer, primary_key=True), Column("aid", Integer), ForeignKeyConstraint(["aid"], ["a.id"], name="bfk")) - self._assert_cyclic_constraint(metadata, auto=True) + self._assert_cyclic_constraint( + metadata, auto=True, sqlite_warning=True) @testing.provide_metadata - def test_fk_column_auto_alter_constraint_create(self): + def test_fk_column_auto_alter_inline_constraint_create(self): metadata = self.metadata Table("a", metadata, @@ -125,7 +129,24 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): ForeignKey("a.id", name="bfk") ), ) - self._assert_cyclic_constraint(metadata, auto=True) + self._assert_cyclic_constraint( + metadata, auto=True, sqlite_warning=True) + + @testing.provide_metadata + def test_fk_column_use_alter_inline_constraint_create(self): + metadata = self.metadata + + Table("a", metadata, + Column('id', Integer, primary_key=True), + Column('bid', Integer, ForeignKey("b.id")), + ) + Table("b", metadata, + Column('id', Integer, primary_key=True), + Column("aid", Integer, + ForeignKey("a.id", name="bfk", use_alter=True) + ), + ) + self._assert_cyclic_constraint(metadata, auto=False) @testing.provide_metadata def test_fk_table_use_alter_constraint_create(self): @@ -137,9 +158,10 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): ForeignKeyConstraint(["bid"], ["b.id"]) ) Table( - "b", metadata, Column( - 'id', Integer, primary_key=True), Column( - "aid", Integer), ForeignKeyConstraint( + "b", metadata, + Column('id', Integer, primary_key=True), + Column("aid", Integer), + ForeignKeyConstraint( ["aid"], ["a.id"], use_alter=True, name="bfk")) self._assert_cyclic_constraint(metadata) @@ -157,63 +179,42 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): ForeignKey("a.id", use_alter=True, name="bfk") ), ) - self._assert_cyclic_constraint(metadata) + self._assert_cyclic_constraint(metadata, auto=False) + + def _assert_cyclic_constraint( + self, metadata, auto=False, sqlite_warning=False): + if testing.db.dialect.supports_alter: + self._assert_cyclic_constraint_supports_alter(metadata, auto=auto) + else: + self._assert_cyclic_constraint_no_alter( + metadata, auto=auto, sqlite_warning=sqlite_warning) - def _assert_cyclic_constraint(self, metadata, auto=False): + def _assert_cyclic_constraint_supports_alter(self, metadata, auto=False): table_assertions = [] if auto: - if testing.db.dialect.supports_alter: - table_assertions.append( - CompiledSQL('CREATE TABLE b (' - 'id INTEGER NOT NULL, ' - 'aid INTEGER, ' - 'PRIMARY KEY (id)' - ')' - ) - ) - else: - table_assertions.append( - CompiledSQL( - 'CREATE TABLE b (' - 'id INTEGER NOT NULL, ' - 'aid INTEGER, ' - 'PRIMARY KEY (id), ' - 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)' - ')' - ) - ) - - if testing.db.dialect.supports_alter: - table_assertions.append( - CompiledSQL( - 'CREATE TABLE a (' - 'id INTEGER NOT NULL, ' - 'bid INTEGER, ' - 'PRIMARY KEY (id)' - ')' - ) - ) - else: - table_assertions.append( - CompiledSQL( - 'CREATE TABLE a (' - 'id INTEGER NOT NULL, ' - 'bid INTEGER, ' - 'PRIMARY KEY (id), ' - 'FOREIGN KEY(bid) REFERENCES b (id)' - ')' - ) + table_assertions = [ + CompiledSQL('CREATE TABLE b (' + 'id INTEGER NOT NULL, ' + 'aid INTEGER, ' + 'PRIMARY KEY (id)' + ')' + ), + CompiledSQL( + 'CREATE TABLE a (' + 'id INTEGER NOT NULL, ' + 'bid INTEGER, ' + 'PRIMARY KEY (id)' + ')' ) + ] else: - table_assertions.append( + table_assertions = [ CompiledSQL('CREATE TABLE b (' 'id INTEGER NOT NULL, ' 'aid INTEGER, ' 'PRIMARY KEY (id)' ')' - ) - ) - table_assertions.append( + ), CompiledSQL( 'CREATE TABLE a (' 'id INTEGER NOT NULL, ' @@ -222,41 +223,238 @@ class ConstraintGenTest(fixtures.TestBase, AssertsExecutionResults): 'FOREIGN KEY(bid) REFERENCES b (id)' ')' ) - ) + ] assertions = [AllOf(*table_assertions)] - if testing.db.dialect.supports_alter: - fk_assertions = [] + fk_assertions = [] + fk_assertions.append( + CompiledSQL('ALTER TABLE b ADD CONSTRAINT bfk ' + 'FOREIGN KEY(aid) REFERENCES a (id)') + ) + if auto: fk_assertions.append( - CompiledSQL('ALTER TABLE b ADD CONSTRAINT bfk ' - 'FOREIGN KEY(aid) REFERENCES a (id)') + CompiledSQL('ALTER TABLE a ADD ' + 'FOREIGN KEY(bid) REFERENCES b (id)') ) - if auto: - fk_assertions.append( - CompiledSQL('ALTER TABLE a ADD ' - 'FOREIGN KEY(bid) REFERENCES b (id)') + assertions.append(AllOf(*fk_assertions)) + + with self.sql_execution_asserter() as asserter: + metadata.create_all(checkfirst=False) + asserter.assert_(*assertions) + + assertions = [ + CompiledSQL('ALTER TABLE b DROP CONSTRAINT bfk'), + CompiledSQL("DROP TABLE a"), + CompiledSQL("DROP TABLE b") + ] + + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False), + asserter.assert_(*assertions) + + def _assert_cyclic_constraint_no_alter( + self, metadata, auto=False, sqlite_warning=False): + table_assertions = [] + if auto: + table_assertions.append( + DialectSQL( + 'CREATE TABLE b (' + 'id INTEGER NOT NULL, ' + 'aid INTEGER, ' + 'PRIMARY KEY (id), ' + 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)' + ')' + ) + ) + table_assertions.append( + DialectSQL( + 'CREATE TABLE a (' + 'id INTEGER NOT NULL, ' + 'bid INTEGER, ' + 'PRIMARY KEY (id), ' + 'FOREIGN KEY(bid) REFERENCES b (id)' + ')' + ) + ) + else: + table_assertions.append( + DialectSQL( + 'CREATE TABLE b (' + 'id INTEGER NOT NULL, ' + 'aid INTEGER, ' + 'PRIMARY KEY (id), ' + 'CONSTRAINT bfk FOREIGN KEY(aid) REFERENCES a (id)' + ')' ) - assertions.append(AllOf(*fk_assertions)) + ) + + table_assertions.append( + DialectSQL( + 'CREATE TABLE a (' + 'id INTEGER NOT NULL, ' + 'bid INTEGER, ' + 'PRIMARY KEY (id), ' + 'FOREIGN KEY(bid) REFERENCES b (id)' + ')' + ) + ) + + assertions = [AllOf(*table_assertions)] with self.sql_execution_asserter() as asserter: metadata.create_all(checkfirst=False) asserter.assert_(*assertions) + assertions = [AllOf( + CompiledSQL("DROP TABLE a"), + CompiledSQL("DROP TABLE b") + )] + + if sqlite_warning: + with expect_warnings("Can't sort tables for DROP; "): + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False), + else: + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False), + asserter.assert_(*assertions) + + @testing.force_drop_names("a", "b") + def test_cycle_unnamed_fks(self): + metadata = MetaData(testing.db) + + Table("a", metadata, + Column('id', Integer, primary_key=True), + Column('bid', Integer, ForeignKey("b.id")), + ) + + Table("b", metadata, + Column('id', Integer, primary_key=True), + Column("aid", Integer, ForeignKey("a.id")), + ) + + assertions = [ + AllOf( + CompiledSQL( + 'CREATE TABLE b (' + 'id INTEGER NOT NULL, ' + 'aid INTEGER, ' + 'PRIMARY KEY (id)' + ')' + ), + CompiledSQL( + 'CREATE TABLE a (' + 'id INTEGER NOT NULL, ' + 'bid INTEGER, ' + 'PRIMARY KEY (id)' + ')' + ) + ), + AllOf( + CompiledSQL('ALTER TABLE b ADD ' + 'FOREIGN KEY(aid) REFERENCES a (id)'), + CompiledSQL('ALTER TABLE a ADD ' + 'FOREIGN KEY(bid) REFERENCES b (id)') + ), + ] + with self.sql_execution_asserter() as asserter: + metadata.create_all(checkfirst=False) + if testing.db.dialect.supports_alter: - assertions = [ - CompiledSQL('ALTER TABLE b DROP CONSTRAINT bfk'), - CompiledSQL("DROP TABLE a"), - CompiledSQL("DROP TABLE b") - ] + asserter.assert_(*assertions) + + assert_raises_message( + exc.CircularDependencyError, + "Can't sort tables for DROP; an unresolvable foreign key " + "dependency exists between tables: a, b. " + "Please ensure that the " + "ForeignKey and ForeignKeyConstraint objects involved in the " + "cycle have names so that they can be dropped using " + "DROP CONSTRAINT.", + metadata.drop_all, checkfirst=False + ) else: - assertions = [AllOf( - CompiledSQL("DROP TABLE a"), - CompiledSQL("DROP TABLE b") - )] + with expect_warnings( + "Can't sort tables for DROP; an unresolvable " + "foreign key dependency exists between tables"): + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False) + + asserter.assert_( + AllOf( + CompiledSQL("DROP TABLE b"), + CompiledSQL("DROP TABLE a"), + ) + ) + + @testing.force_drop_names("a", "b") + def test_cycle_named_fks(self): + metadata = MetaData(testing.db) + + Table("a", metadata, + Column('id', Integer, primary_key=True), + Column('bid', Integer, ForeignKey("b.id")), + ) + + Table("b", metadata, + Column('id', Integer, primary_key=True), + Column( + "aid", Integer, + ForeignKey("a.id", use_alter=True, name='aidfk')), + ) + assertions = [ + AllOf( + CompiledSQL( + 'CREATE TABLE b (' + 'id INTEGER NOT NULL, ' + 'aid INTEGER, ' + 'PRIMARY KEY (id)' + ')' + ), + CompiledSQL( + 'CREATE TABLE a (' + 'id INTEGER NOT NULL, ' + 'bid INTEGER, ' + 'PRIMARY KEY (id), ' + 'FOREIGN KEY(bid) REFERENCES b (id)' + ')' + ) + ), + CompiledSQL('ALTER TABLE b ADD CONSTRAINT aidfk ' + 'FOREIGN KEY(aid) REFERENCES a (id)'), + ] with self.sql_execution_asserter() as asserter: - metadata.drop_all(checkfirst=False), - asserter.assert_(*assertions) + metadata.create_all(checkfirst=False) + + if testing.db.dialect.supports_alter: + asserter.assert_(*assertions) + + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False) + + asserter.assert_( + CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"), + AllOf( + CompiledSQL("DROP TABLE b"), + CompiledSQL("DROP TABLE a"), + ) + ) + else: + with self.sql_execution_asserter() as asserter: + metadata.drop_all(checkfirst=False) + + asserter.assert_( + AllOf( + CompiledSQL("DROP TABLE b"), + CompiledSQL("DROP TABLE a"), + ), + ) + + + + + @testing.requires.check_constraints @testing.provide_metadata @@ -1149,6 +1347,65 @@ class ConstraintAPITest(fixtures.TestBase): t2.append_column, c ) + def test_auto_append_uq_on_col_attach_four(self): + """Test that a uniqueconstraint that names Column and string names + won't autoattach using deferred column attachment. + + """ + m = MetaData() + + a = Column('a', Integer) + b = Column('b', Integer) + c = Column('c', Integer) + uq = UniqueConstraint(a, 'b', 'c') + + t = Table('tbl', m, a) + assert uq not in t.constraints + + t.append_column(b) + assert uq not in t.constraints + + t.append_column(c) + + # we don't track events for previously unknown columns + # named 'c' to be attached + assert uq not in t.constraints + + t.append_constraint(uq) + + assert uq in t.constraints + + eq_( + [cn for cn in t.constraints if isinstance(cn, UniqueConstraint)], + [uq] + ) + + def test_auto_append_uq_on_col_attach_five(self): + """Test that a uniqueconstraint that names Column and string names + *will* autoattach if the table has all those names up front. + + """ + m = MetaData() + + a = Column('a', Integer) + b = Column('b', Integer) + c = Column('c', Integer) + + t = Table('tbl', m, a, c, b) + + uq = UniqueConstraint(a, 'b', 'c') + + assert uq in t.constraints + + t.append_constraint(uq) + + assert uq in t.constraints + + eq_( + [cn for cn in t.constraints if isinstance(cn, UniqueConstraint)], + [uq] + ) + def test_index_asserts_cols_standalone(self): metadata = MetaData() diff --git a/test/sql/test_cte.py b/test/sql/test_cte.py index c7906dcb7..b59914afc 100644 --- a/test/sql/test_cte.py +++ b/test/sql/test_cte.py @@ -491,4 +491,4 @@ class CTETest(fixtures.TestBase, AssertsCompiledSQL): 'FROM "order") pg suffix SELECT "order"."order" FROM "order", ' 'regional_sales WHERE "order"."order" > regional_sales."order"', dialect='postgresql' - )
\ No newline at end of file + ) diff --git a/test/sql/test_generative.py b/test/sql/test_generative.py index 6b86614e6..9cf1ef612 100644 --- a/test/sql/test_generative.py +++ b/test/sql/test_generative.py @@ -454,6 +454,27 @@ class ClauseTest(fixtures.TestBase, AssertsCompiledSQL): str(f1), str(f2) ) + def test_labeled_expression_adapt(self): + lbl_x = (t3.c.col1 == 1).label('x') + t3_alias = t3.alias() + + adapter = sql_util.ColumnAdapter(t3_alias) + + lblx_adapted = adapter.traverse(lbl_x) + is_not_(lblx_adapted._element, lbl_x._element) + + lblx_adapted = adapter.traverse(lbl_x) + self.assert_compile( + select([lblx_adapted.self_group()]), + "SELECT (table3_1.col1 = :col1_1) AS x FROM table3 AS table3_1" + ) + + self.assert_compile( + select([lblx_adapted.is_(True)]), + "SELECT (table3_1.col1 = :col1_1) IS 1 AS anon_1 " + "FROM table3 AS table3_1" + ) + def test_text(self): clause = text( "select * from table where foo=:bar", @@ -878,7 +899,6 @@ class ColumnAdapterTest(fixtures.TestBase, AssertsCompiledSQL): a2_to_a1.columns[t2.c.col2], stmt2.c.col2 ) - def test_wrapping_multiple(self): """illustrate that wrapping runs both adapters""" @@ -1531,7 +1551,6 @@ class ClauseAdapterTest(fixtures.TestBase, AssertsCompiledSQL): eq_(l3._allow_label_resolve, False) - class SpliceJoinsTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = 'default' diff --git a/test/sql/test_insert.py b/test/sql/test_insert.py index 8a41d4be7..3c533d75f 100644 --- a/test/sql/test_insert.py +++ b/test/sql/test_insert.py @@ -8,6 +8,7 @@ from sqlalchemy.testing import AssertsCompiledSQL,\ assert_raises_message, fixtures from sqlalchemy.sql import crud + class _InsertTestBase(object): @classmethod @@ -175,6 +176,41 @@ class InsertTest(_InsertTestBase, fixtures.TablesTest, AssertsCompiledSQL): checkparams={"name_1": "foo"} ) + def test_insert_from_select_cte_one(self): + table1 = self.tables.mytable + + cte = select([table1.c.name]).where(table1.c.name == 'bar').cte() + + sel = select([table1.c.myid, table1.c.name]).where( + table1.c.name == cte.c.name) + + ins = self.tables.myothertable.insert().\ + from_select(("otherid", "othername"), sel) + self.assert_compile( + ins, + "INSERT INTO myothertable (otherid, othername) WITH anon_1 AS " + "(SELECT mytable.name AS name FROM mytable " + "WHERE mytable.name = :name_1) " + "SELECT mytable.myid, mytable.name FROM mytable, anon_1 " + "WHERE mytable.name = anon_1.name", + checkparams={"name_1": "bar"} + ) + + def test_insert_from_select_cte_two(self): + table1 = self.tables.mytable + + cte = table1.select().cte("c") + stmt = cte.select() + ins = table1.insert().from_select(table1.c, stmt) + + self.assert_compile( + ins, + "INSERT INTO mytable (myid, name, description) " + "WITH c AS (SELECT mytable.myid AS myid, mytable.name AS name, " + "mytable.description AS description FROM mytable) " + "SELECT c.myid, c.name, c.description FROM c" + ) + def test_insert_from_select_select_alt_ordering(self): table1 = self.tables.mytable sel = select([table1.c.name, table1.c.myid]).where( diff --git a/test/sql/test_labels.py b/test/sql/test_labels.py index 1792a42d8..7f548eb49 100644 --- a/test/sql/test_labels.py +++ b/test/sql/test_labels.py @@ -138,8 +138,9 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL): issuperset(['this_is_the_data_column', s.c.this_is_the_data_column]) assert \ - set(compiled._create_result_map()['this_is_the_primarykey_column'][1]).\ + set(compiled._create_result_map()['this_is_the_primarykey__1'][1]).\ issuperset(['this_is_the_primarykey_column', + 'this_is_the_primarykey__1', s.c.this_is_the_primarykey_column]) def test_result_map_anon_alias(self): @@ -150,29 +151,28 @@ class MaxIdentTest(fixtures.TestBase, AssertsCompiledSQL): s = select([q]).apply_labels() self.assert_compile( - s, 'SELECT ' - 'anon_1.this_is_the_primarykey_column ' - 'AS anon_1_this_is_the_prim_1, ' - 'anon_1.this_is_the_data_column ' - 'AS anon_1_this_is_the_data_2 ' - 'FROM (' - 'SELECT ' - 'some_large_named_table.' - 'this_is_the_primarykey_column ' - 'AS this_is_the_primarykey_column, ' - 'some_large_named_table.this_is_the_data_column ' - 'AS this_is_the_data_column ' - 'FROM ' - 'some_large_named_table ' - 'WHERE ' - 'some_large_named_table.this_is_the_primarykey_column ' - '= :this_is_the_primarykey__1' - ') ' - 'AS anon_1', dialect=dialect) + s, + "SELECT " + "anon_1.this_is_the_primarykey__2 AS anon_1_this_is_the_prim_1, " + "anon_1.this_is_the_data_column AS anon_1_this_is_the_data_3 " + "FROM (" + "SELECT " + "some_large_named_table." + "this_is_the_primarykey_column AS this_is_the_primarykey__2, " + "some_large_named_table." + "this_is_the_data_column AS this_is_the_data_column " + "FROM " + "some_large_named_table " + "WHERE " + "some_large_named_table.this_is_the_primarykey_column " + "= :this_is_the_primarykey__1" + ") " + "AS anon_1", dialect=dialect) + compiled = s.compile(dialect=dialect) - assert set(compiled._create_result_map()['anon_1_this_is_the_data_2'][1]).\ + assert set(compiled._create_result_map()['anon_1_this_is_the_data_3'][1]).\ issuperset([ - 'anon_1_this_is_the_data_2', + 'anon_1_this_is_the_data_3', q.corresponding_column( table1.c.this_is_the_data_column) ]) @@ -542,3 +542,26 @@ class LabelLengthTest(fixtures.TestBase, AssertsCompiledSQL): compiled = s.compile(dialect=dialect) assert set(compiled._create_result_map()['_1'][1]).issuperset([ 'asdf_abcde', a1.c.abcde, '_1']) + + def test_label_overlap_unlabeled(self): + """test that an anon col can't overlap with a fixed name, #3396""" + + table1 = table( + "tablename", column('columnname_one'), column('columnn_1')) + + stmt = select([table1]).apply_labels() + + dialect = default.DefaultDialect(label_length=23) + self.assert_compile( + stmt, + "SELECT tablename.columnname_one AS tablename_columnn_1, " + "tablename.columnn_1 AS tablename_columnn_2 FROM tablename", + dialect=dialect + ) + compiled = stmt.compile(dialect=dialect) + eq_( + set(compiled._create_result_map()), + set(['tablename_columnn_1', 'tablename_columnn_2']) + ) + + diff --git a/test/sql/test_metadata.py b/test/sql/test_metadata.py index 1eec502e7..2e51b9a91 100644 --- a/test/sql/test_metadata.py +++ b/test/sql/test_metadata.py @@ -18,6 +18,7 @@ from sqlalchemy.testing import eq_, is_, mock from contextlib import contextmanager from sqlalchemy import util + class MetaDataTest(fixtures.TestBase, ComparesTables): def test_metadata_connect(self): @@ -393,7 +394,6 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): assert t.c.x.default is s2 assert m1._sequences['x_seq'] is s2 - def test_sequence_attach_to_table(self): m1 = MetaData() s1 = Sequence("s") @@ -492,6 +492,21 @@ class MetaDataTest(fixtures.TestBase, ComparesTables): [d, b, a, c, e] ) + def test_deterministic_order(self): + meta = MetaData() + a = Table('a', meta, Column('foo', Integer)) + b = Table('b', meta, Column('foo', Integer)) + c = Table('c', meta, Column('foo', Integer)) + d = Table('d', meta, Column('foo', Integer)) + e = Table('e', meta, Column('foo', Integer)) + + e.add_is_dependent_on(c) + a.add_is_dependent_on(b) + eq_( + meta.sorted_tables, + [b, c, d, a, e] + ) + def test_nonexistent(self): assert_raises(tsa.exc.NoSuchTableError, Table, 'fake_table', @@ -3560,3 +3575,16 @@ class NamingConventionTest(fixtures.TestBase, AssertsCompiledSQL): u1.append_constraint(ck1) eq_(ck1.name, "ck_user_foo") + + def test_pickle_metadata(self): + m = MetaData(naming_convention={"pk": "%(table_name)s_pk"}) + + m2 = pickle.loads(pickle.dumps(m)) + + eq_(m2.naming_convention, {"pk": "%(table_name)s_pk"}) + + t2a = Table('t2', m, Column('id', Integer, primary_key=True)) + t2b = Table('t2', m2, Column('id', Integer, primary_key=True)) + + eq_(t2a.primary_key.name, t2b.primary_key.name) + eq_(t2b.primary_key.name, "t2_pk") diff --git a/test/sql/test_query.py b/test/sql/test_query.py index 08afc3256..98f375018 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -275,13 +275,20 @@ class QueryTest(fixtures.TestBase): r = t6.insert().values(manual_id=id).execute() eq_(r.inserted_primary_key, [12, 1]) - def test_implicit_id_insert_select(self): + def test_implicit_id_insert_select_columns(self): stmt = users.insert().from_select( (users.c.user_id, users.c.user_name), users.select().where(users.c.user_id == 20)) testing.db.execute(stmt) + def test_implicit_id_insert_select_keys(self): + stmt = users.insert().from_select( + ["user_id", "user_name"], + users.select().where(users.c.user_id == 20)) + + testing.db.execute(stmt) + def test_row_iteration(self): users.insert().execute( {'user_id': 7, 'user_name': 'jack'}, diff --git a/test/sql/test_selectable.py b/test/sql/test_selectable.py index 3931f99e4..3390f4a77 100644 --- a/test/sql/test_selectable.py +++ b/test/sql/test_selectable.py @@ -2113,7 +2113,7 @@ class WithLabelsTest(fixtures.TestBase): self._assert_result_keys(sel, ['t1_a', 't2_b']) -class SelectProxyTest(fixtures.TestBase): +class ResultMapTest(fixtures.TestBase): def _fixture(self): m = MetaData() @@ -2183,6 +2183,35 @@ class SelectProxyTest(fixtures.TestBase): assert l1 in mapping assert ta.c.x not in mapping + def test_column_subquery_exists(self): + t = self._fixture() + s = exists().where(t.c.x == 5).select() + mapping = self._mapping(s) + assert t.c.x not in mapping + eq_( + [type(entry[-1]) for entry in s.compile()._result_columns], + [Boolean] + ) + + def test_column_subquery_plain(self): + t = self._fixture() + s1 = select([t.c.x]).where(t.c.x > 5).as_scalar() + s2 = select([s1]) + mapping = self._mapping(s2) + assert t.c.x not in mapping + assert s1 in mapping + eq_( + [type(entry[-1]) for entry in s2.compile()._result_columns], + [Integer] + ) + + def test_unary_boolean(self): + + s1 = select([not_(True)], use_labels=True) + eq_( + [type(entry[-1]) for entry in s1.compile()._result_columns], + [Boolean] + ) class ForUpdateTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = "default" diff --git a/test/sql/test_text.py b/test/sql/test_text.py index 1c3cb0cb4..78c3282ac 100644 --- a/test/sql/test_text.py +++ b/test/sql/test_text.py @@ -761,4 +761,3 @@ class OrderByLabelResolutionTest(fixtures.TestBase, AssertsCompiledSQL): "mytable_1.name AS t1name, foo(:foo_1) AS x " "FROM mytable AS mytable_1 ORDER BY mytable_1.myid, t1name, x" ) - diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 017a176db..2545dec59 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -964,6 +964,14 @@ class UnicodeTest(fixtures.TestBase): testing.db.dialect.returns_unicode_strings, True if util.py3k else "conditional" ) + elif testing.against("mysql+mysqldb"): + eq_( + testing.db.dialect.returns_unicode_strings, + True if util.py3k or util.asbool( + testing.db.url.query.get("use_unicode") + ) + else False + ) else: expected = (testing.db.name, testing.db.driver) in \ ( @@ -5,8 +5,21 @@ envlist = full,py26,py27,py33,py34 deps=pytest mock +# -E : ignore PYTHON* environment variables (such as PYTHONPATH) +# -s : don't add user site directory to sys.path; also PYTHONNOUSERSITE +# the latter is picked up by conftest.py +setenv= + PYTHONPATH= + PYTHONNOUSERSITE=1 + +# we need this because our CI has all the DBAPIs and such +# pre-installed in individual site-packages directories. sitepackages=True -usedevelop=True + +# always install fully and use that; this way options like +# DISABLE_SQLALCHEMY_CEXT are honored +usedevelop=False + commands= python -m pytest {posargs} |
